+ export WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + [[ openshift-3.10-release =~ openshift-.* ]] + [[ openshift-3.10-release =~ .*-crio-.* ]] + export KUBEVIRT_PROVIDER=os-3.10.0 + KUBEVIRT_PROVIDER=os-3.10.0 + export KUBEVIRT_NUM_NODES=2 + KUBEVIRT_NUM_NODES=2 + export NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + export NAMESPACE=kube-system + NAMESPACE=kube-system + trap '{ make cluster-down; }' EXIT SIGINT SIGTERM SIGSTOP + make cluster-down ./cluster/down.sh + make cluster-up ./cluster/up.sh Downloading ....... Downloading ....... 2018/07/31 07:28:15 Waiting for host: 192.168.66.102:22 2018/07/31 07:28:18 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/31 07:28:26 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/31 07:28:34 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/31 07:28:42 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/31 07:28:47 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: connection refused. Sleeping 5s 2018/07/31 07:28:52 Connected to tcp://192.168.66.102:22 + systemctl stop origin-node.service + rm -rf /etc/origin/ /etc/etcd/ /var/lib/origin /var/lib/etcd/ ++ docker ps -q + containers= + '[' -n '' ']' ++ docker ps -q -a + containers='2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' + '[' -n '2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' ']' + docker rm -f 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2018/07/31 07:28:54 Waiting for host: 192.168.66.101:22 2018/07/31 07:28:57 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/31 07:29:05 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/31 07:29:13 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/31 07:29:18 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: connection refused. Sleeping 5s 2018/07/31 07:29:23 Connected to tcp://192.168.66.101:22 + inventory_file=/root/inventory + openshift_ansible=/root/openshift-ansible + echo '[new_nodes]' + sed -i '/\[OSEv3:children\]/a new_nodes' /root/inventory + nodes_found=false ++ seq 2 100 + for i in '$(seq 2 100)' ++ printf node%02d 2 + node=node02 ++ printf 192.168.66.1%02d 2 + node_ip=192.168.66.102 + set +e + ping 192.168.66.102 -c 1 PING 192.168.66.102 (192.168.66.102) 56(84) bytes of data. 64 bytes from 192.168.66.102: icmp_seq=1 ttl=64 time=2.65 ms --- 192.168.66.102 ping statistics --- 1 packets transmitted, 1 received, 0% packet loss, time 0ms rtt min/avg/max/mdev = 2.659/2.659/2.659/0.000 ms Found node02. Adding it to the inventory. + '[' 0 -ne 0 ']' + nodes_found=true + set -e + echo '192.168.66.102 node02' + echo 'Found node02. Adding it to the inventory.' + echo 'node02 openshift_node_group_name="node-config-compute" openshift_schedulable=true openshift_ip=192.168.66.102' + for i in '$(seq 2 100)' ++ printf node%02d 3 + node=node03 ++ printf 192.168.66.1%02d 3 + node_ip=192.168.66.103 + set +e + ping 192.168.66.103 -c 1 PING 192.168.66.103 (192.168.66.103) 56(84) bytes of data. From 192.168.66.101 icmp_seq=1 Destination Host Unreachable --- 192.168.66.103 ping statistics --- 1 packets transmitted, 0 received, +1 errors, 100% packet loss, time 0ms + '[' 1 -ne 0 ']' + break + '[' true = true ']' + ansible-playbook -i /root/inventory /root/openshift-ansible/playbooks/openshift-node/scaleup.yml PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) PLAY [Ensure there are new_nodes] ********************************************** TASK [fail] ******************************************************************** skipping: [localhost] TASK [fail] ******************************************************************** skipping: [localhost] PLAY [Initialization Checkpoint Start] ***************************************** TASK [Set install initialization 'In Progress'] ******************************** ok: [node01] PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) [WARNING]: Could not match supplied host pattern, ignoring: oo_lb_to_config PLAY [Ensure that all non-node hosts are accessible] *************************** TASK [Gathering Facts] ********************************************************* ok: [node01] PLAY [Initialize basic host facts] ********************************************* TASK [Gathering Facts] ********************************************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml for node01, node02 TASK [openshift_sanitize_inventory : Check for usage of deprecated variables] *** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : debug] ************************************ skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_stats] ******************************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Assign deprecated variables to correct counterparts] *** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml for node01, node02 included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml for node01, node02 TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Standardize on latest variable names] ***** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Normalize openshift_release] ************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Abort when openshift_release is invalid] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml for node01, node02 TASK [openshift_sanitize_inventory : Ensure that openshift_use_dnsmasq is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that openshift_node_dnsmasq_install_network_manager_hook is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* skipping: [node01] => (item=openshift_hosted_etcd_storage_kind) skipping: [node02] => (item=openshift_hosted_etcd_storage_kind) TASK [openshift_sanitize_inventory : Ensure that dynamic provisioning is set if using dynamic storage] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure clusterid is set along with the cloudprovider] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure ansible_service_broker_remove and ansible_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure template_service_broker_remove and template_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that all requires vsphere configuration variables are set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : ensure provider configuration variables are defined] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure removed web console extension variables are not set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that web console port matches API server port] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : At least one master is schedulable] ******* skipping: [node01] skipping: [node02] TASK [Detecting Operating System from ostree_booted] *************************** ok: [node02] ok: [node01] TASK [set openshift_deployment_type if unset] ********************************** skipping: [node01] skipping: [node02] TASK [check for node already bootstrapped] ************************************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_bootstrapped] ********************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_atomic and openshift_is_containerized] *** ok: [node01] ok: [node02] TASK [Determine Atomic Host Docker Version] ************************************ skipping: [node01] skipping: [node02] TASK [assert atomic host docker version is 1.12 or later] ********************** skipping: [node01] skipping: [node02] PLAY [Retrieve existing master configs and validate] *************************** TASK [openshift_control_plane : stat] ****************************************** ok: [node01] TASK [openshift_control_plane : slurp] ***************************************** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [openshift_control_plane : Check for file paths outside of /etc/origin/master in master's config] *** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Initialize special first-master variables] ******************************* TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Disable web console if required] ***************************************** TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Setup yum repositories for all hosts] ************************************ TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Install Red Hat Subscription manager] ******************* skipping: [node02] TASK [rhel_subscribe : Is host already registered?] **************************** skipping: [node02] TASK [rhel_subscribe : Register host] ****************************************** skipping: [node02] TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Determine if OpenShift Pool Already Attached] *********** skipping: [node02] TASK [rhel_subscribe : Attach to OpenShift Pool] ******************************* skipping: [node02] TASK [rhel_subscribe : Satellite preparation] ********************************** skipping: [node02] TASK [openshift_repos : openshift_repos detect ostree] ************************* ok: [node02] TASK [openshift_repos : Ensure libselinux-python is installed] ***************** ok: [node02] TASK [openshift_repos : Remove openshift_additional.repo file] ***************** ok: [node02] TASK [openshift_repos : Create any additional repos that are defined] ********** TASK [openshift_repos : include_tasks] ***************************************** skipping: [node02] TASK [openshift_repos : include_tasks] ***************************************** included: /root/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml for node02 TASK [openshift_repos : Configure origin gpg keys] ***************************** ok: [node02] TASK [openshift_repos : Configure correct origin release repository] *********** ok: [node02] => (item=/root/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin.repo.j2) TASK [openshift_repos : Ensure clean repo cache in the event repos have been changed manually] *** changed: [node02] => { "msg": "First run of openshift_repos" } TASK [openshift_repos : Record that openshift_repos already ran] *************** ok: [node02] RUNNING HANDLER [openshift_repos : refresh cache] ****************************** changed: [node02] PLAY [Install packages necessary for installer] ******************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Determine if chrony is installed] **************************************** [WARNING]: Consider using the yum, dnf or zypper module rather than running rpm. If you need to use command because yum, dnf or zypper is insufficient you can add warn=False to this command task or set command_warnings=False in ansible.cfg to get rid of this message. changed: [node02] TASK [Install ntp package] ***************************************************** skipping: [node02] TASK [Start and enable ntpd/chronyd] ******************************************* changed: [node02] TASK [Ensure openshift-ansible installer package deps are installed] *********** ok: [node02] => (item=iproute) ok: [node02] => (item=dbus-python) ok: [node02] => (item=PyYAML) ok: [node02] => (item=python-ipaddress) ok: [node02] => (item=libsemanage-python) ok: [node02] => (item=yum-utils) ok: [node02] => (item=python-docker) PLAY [Initialize cluster facts] ************************************************ TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [get openshift_current_version] ******************************************* ok: [node02] ok: [node01] TASK [set_fact openshift_portal_net if present on masters] ********************* ok: [node01] ok: [node02] TASK [Gather Cluster facts] **************************************************** changed: [node02] changed: [node01] TASK [Set fact of no_proxy_internal_hostnames] ********************************* skipping: [node01] skipping: [node02] TASK [Initialize openshift.node.sdn_mtu] *************************************** changed: [node02] ok: [node01] PLAY [Initialize etcd host variables] ****************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** ok: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** ok: [node01] => { "msg": "openshift_pkg_version was not defined. Falling back to -3.10.0" } TASK [openshift_version : set_fact] ******************************************** ok: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Verify Requirements] ***************************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [Run variable sanity checks] ********************************************** ok: [node01] TASK [Validate openshift_node_groups and openshift_node_group_name] ************ ok: [node01] PLAY [Initialization Checkpoint End] ******************************************* TASK [Set install initialization 'Complete'] *********************************** ok: [node01] PLAY [Validate node hostnames] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Query DNS for IP address of node02] ************************************** ok: [node02] TASK [Validate openshift_hostname when defined] ******************************** skipping: [node02] TASK [Validate openshift_ip exists on node when defined] *********************** skipping: [node02] PLAY [Configure os_firewall] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [os_firewall : Detecting Atomic Host Operating System] ******************** ok: [node02] TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** ok: [node02] TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** skipping: [node02] TASK [os_firewall : Install firewalld packages] ******************************** skipping: [node02] TASK [os_firewall : Ensure iptables services are not enabled] ****************** skipping: [node02] => (item=iptables) skipping: [node02] => (item=ip6tables) TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** skipping: [node02] TASK [os_firewall : Start and enable firewalld service] ************************ skipping: [node02] TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** skipping: [node02] TASK [os_firewall : Restart polkitd] ******************************************* skipping: [node02] TASK [os_firewall : Wait for polkit action to have been created] *************** skipping: [node02] TASK [os_firewall : Ensure firewalld service is not enabled] ******************* ok: [node02] TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** skipping: [node02] TASK [os_firewall : Install iptables packages] ********************************* ok: [node02] => (item=iptables) ok: [node02] => (item=iptables-services) TASK [os_firewall : Start and enable iptables service] ************************* ok: [node02 -> node02] => (item=node02) TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** skipping: [node02] PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [container_runtime : Setup the docker-storage for overlay] **************** skipping: [node02] TASK [container_runtime : Create file system on extra volume device] *********** TASK [container_runtime : Create mount entry for extra volume] ***************** PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** ok: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* ok: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Add http_proxy to /etc/atomic.conf] ****************** skipping: [node02] TASK [container_runtime : Add https_proxy to /etc/atomic.conf] ***************** skipping: [node02] TASK [container_runtime : Add no_proxy to /etc/atomic.conf] ******************** skipping: [node02] TASK [container_runtime : Get current installed Docker version] **************** ok: [node02] TASK [container_runtime : Error out if Docker pre-installed but too old] ******* skipping: [node02] TASK [container_runtime : Error out if requested Docker is too old] ************ skipping: [node02] TASK [container_runtime : Install Docker] ************************************** skipping: [node02] TASK [container_runtime : Ensure docker.service.d directory exists] ************ ok: [node02] TASK [container_runtime : Configure Docker service unit file] ****************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Set registry params] ********************************* skipping: [node02] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) TASK [container_runtime : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** skipping: [node02] TASK [container_runtime : Set Proxy Settings] ********************************** skipping: [node02] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) TASK [container_runtime : Set various Docker options] ************************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Configure Docker Network OPTIONS] ******************** ok: [node02] TASK [container_runtime : Detect if docker is already started] ***************** ok: [node02] TASK [container_runtime : Start the Docker service] **************************** ok: [node02] TASK [container_runtime : set_fact] ******************************************** ok: [node02] TASK [container_runtime : Check for docker_storage_path/overlay2] ************** ok: [node02] TASK [container_runtime : Fixup SELinux permissions for docker] **************** changed: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* ok: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** ok: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** ok: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] TASK [container_runtime : Fail if Atomic Host since this is an rpm request] **** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Check that overlay is in the kernel] ***************** skipping: [node02] TASK [container_runtime : Add overlay to modprobe.d] *************************** skipping: [node02] TASK [container_runtime : Manually modprobe overlay into the kernel] *********** skipping: [node02] TASK [container_runtime : Enable and start systemd-modules-load] *************** skipping: [node02] TASK [container_runtime : Install cri-o] *************************************** skipping: [node02] TASK [container_runtime : Remove CRI-O default configuration files] ************ skipping: [node02] => (item=/etc/cni/net.d/200-loopback.conf) skipping: [node02] => (item=/etc/cni/net.d/100-crio-bridge.conf) TASK [container_runtime : Create the CRI-O configuration] ********************** skipping: [node02] TASK [container_runtime : Ensure CNI configuration directory exists] *********** skipping: [node02] TASK [container_runtime : Add iptables allow rules] **************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove iptables rules] ******************************* TASK [container_runtime : Add firewalld allow rules] *************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove firewalld allow rules] ************************ TASK [container_runtime : Configure the CNI network] *************************** skipping: [node02] TASK [container_runtime : Create /etc/sysconfig/crio-network] ****************** skipping: [node02] TASK [container_runtime : Start the CRI-O service] ***************************** skipping: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* skipping: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** skipping: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** skipping: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** skipping: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Node Preparation Checkpoint Start] *************************************** TASK [Set Node preparation 'In Progress'] ************************************** ok: [node01] PLAY [Only target nodes that have not yet been bootstrapped] ******************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [add_host] **************************************************************** skipping: [localhost] => (item=node02) ok: [localhost] => (item=node01) PLAY [Disable excluders] ******************************************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for node02 TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-docker-excluder version detected] ************ skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-docker-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-excluder version detected] ******************* skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** skipping: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] PLAY [Configure nodes] ********************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_cloud_provider : Set cloud provider facts] ********************* skipping: [node02] TASK [openshift_cloud_provider : Create cloudprovider config dir] ************** skipping: [node02] TASK [openshift_cloud_provider : include the defined cloud provider files] ***** skipping: [node02] TASK [openshift_node : fail] *************************************************** skipping: [node02] TASK [openshift_node : Check for NetworkManager service] *********************** ok: [node02] TASK [openshift_node : Set fact using_network_manager] ************************* ok: [node02] TASK [openshift_node : Install dnsmasq] **************************************** ok: [node02] TASK [openshift_node : ensure origin/node directory exists] ******************** changed: [node02] => (item=/etc/origin) changed: [node02] => (item=/etc/origin/node) TASK [openshift_node : Install NetworkManager during node_bootstrap provisioning] *** skipping: [node02] TASK [openshift_node : Install network manager dispatch script] **************** skipping: [node02] TASK [openshift_node : Install dnsmasq configuration] ************************** ok: [node02] TASK [openshift_node : Deploy additional dnsmasq.conf] ************************* skipping: [node02] TASK [openshift_node : Enable dnsmasq] ***************************************** ok: [node02] TASK [openshift_node : Install network manager dispatch script] **************** ok: [node02] TASK [openshift_node : Add iptables allow rules] ******************************* ok: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) ok: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) ok: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) ok: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) ok: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove iptables rules] ********************************** TASK [openshift_node : Add firewalld allow rules] ****************************** skipping: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) skipping: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) skipping: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) skipping: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) skipping: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove firewalld allow rules] *************************** TASK [openshift_node : Checking for journald.conf] ***************************** ok: [node02] TASK [openshift_node : Create journald persistence directories] **************** ok: [node02] TASK [openshift_node : Update journald setup] ********************************** ok: [node02] => (item={u'var': u'Storage', u'val': u'persistent'}) ok: [node02] => (item={u'var': u'Compress', u'val': True}) ok: [node02] => (item={u'var': u'SyncIntervalSec', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitInterval', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitBurst', u'val': 10000}) ok: [node02] => (item={u'var': u'SystemMaxUse', u'val': u'8G'}) ok: [node02] => (item={u'var': u'SystemKeepFree', u'val': u'20%'}) ok: [node02] => (item={u'var': u'SystemMaxFileSize', u'val': u'10M'}) ok: [node02] => (item={u'var': u'MaxRetentionSec', u'val': u'1month'}) ok: [node02] => (item={u'var': u'MaxFileSec', u'val': u'1day'}) ok: [node02] => (item={u'var': u'ForwardToSyslog', u'val': False}) ok: [node02] => (item={u'var': u'ForwardToWall', u'val': False}) TASK [openshift_node : Restart journald] *************************************** skipping: [node02] TASK [openshift_node : Disable swap] ******************************************* ok: [node02] TASK [openshift_node : Install node, clients, and conntrack packages] ********** ok: [node02] => (item={u'name': u'origin-node-3.10.0*'}) ok: [node02] => (item={u'name': u'origin-clients-3.10.0*'}) ok: [node02] => (item={u'name': u'conntrack-tools'}) TASK [openshift_node : Restart cri-o] ****************************************** skipping: [node02] TASK [openshift_node : restart NetworkManager to ensure resolv.conf is present] *** changed: [node02] TASK [openshift_node : sysctl] ************************************************* ok: [node02] TASK [openshift_node : Check for credentials file for registry auth] *********** skipping: [node02] TASK [openshift_node : Create credentials for registry auth] ******************* skipping: [node02] TASK [openshift_node : Create credentials for registry auth (alternative)] ***** skipping: [node02] TASK [openshift_node : Setup ro mount of /root/.docker for containerized hosts] *** skipping: [node02] TASK [openshift_node : Check that node image is present] *********************** changed: [node02] TASK [openshift_node : Pre-pull node image] ************************************ skipping: [node02] TASK [openshift_node : Copy node script to the node] *************************** ok: [node02] TASK [openshift_node : Install Node service file] ****************************** ok: [node02] TASK [openshift_node : Ensure old system path is set] ************************** skipping: [node02] => (item=/etc/origin/openvswitch) skipping: [node02] => (item=/var/lib/kubelet) skipping: [node02] => (item=/opt/cni/bin) TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Copy node container image to ostree storage] ************ skipping: [node02] TASK [openshift_node : Install or Update node system container] **************** skipping: [node02] TASK [openshift_node : Restart network manager to ensure networking configuration is in place] *** skipping: [node02] TASK [openshift_node : Configure Node settings] ******************************** ok: [node02] => (item={u'regex': u'^OPTIONS=', u'line': u'OPTIONS='}) ok: [node02] => (item={u'regex': u'^DEBUG_LOGLEVEL=', u'line': u'DEBUG_LOGLEVEL=2'}) ok: [node02] => (item={u'regex': u'^IMAGE_VERSION=', u'line': u'IMAGE_VERSION=v3.10.0-rc.0'}) TASK [openshift_node : Configure Proxy Settings] ******************************* skipping: [node02] => (item={u'regex': u'^HTTP_PROXY=', u'line': u'HTTP_PROXY='}) skipping: [node02] => (item={u'regex': u'^HTTPS_PROXY=', u'line': u'HTTPS_PROXY='}) skipping: [node02] => (item={u'regex': u'^NO_PROXY=', u'line': u'NO_PROXY=[],172.30.0.0/16,10.128.0.0/14'}) TASK [openshift_node : file] *************************************************** skipping: [node02] TASK [openshift_node : Create the Node config] ********************************* changed: [node02] TASK [openshift_node : Configure Node Environment Variables] ******************* TASK [openshift_node : Ensure the node static pod directory exists] ************ changed: [node02] TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Install NFS storage plugin dependencies] **************** ok: [node02] TASK [openshift_node : Check for existence of nfs sebooleans] ****************** ok: [node02] => (item=virt_use_nfs) ok: [node02] => (item=virt_sandbox_use_nfs) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-31 07:37:36.438844', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.015964', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-31 07:37:36.422880', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-31 07:37:37.719886', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.010774', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-31 07:37:37.709112', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-31 07:37:36.438844', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.015964', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-31 07:37:36.422880', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-31 07:37:37.719886', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.010774', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-31 07:37:37.709112', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install GlusterFS storage plugin dependencies] ********** ok: [node02] TASK [openshift_node : Check for existence of fusefs sebooleans] *************** ok: [node02] => (item=virt_use_fusefs) ok: [node02] => (item=virt_sandbox_use_fusefs) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-31 07:37:43.920218', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.011797', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-31 07:37:43.908421', '_ansible_ignore_errors': None, 'failed': False}) ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-31 07:37:45.101871', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.007415', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-31 07:37:45.094456', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-31 07:37:43.920218', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.011797', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-31 07:37:43.908421', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-31 07:37:45.101871', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.007415', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-31 07:37:45.094456', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install Ceph storage plugin dependencies] *************** ok: [node02] TASK [openshift_node : Install iSCSI storage plugin dependencies] ************** ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=device-mapper-multipath) TASK [openshift_node : restart services] *************************************** ok: [node02] => (item=multipathd) ok: [node02] => (item=rpcbind) ok: [node02] => (item=iscsid) TASK [openshift_node : Template multipath configuration] *********************** changed: [node02] TASK [openshift_node : Enable and start multipath] ***************************** changed: [node02] TASK [tuned : Check for tuned package] ***************************************** ok: [node02] TASK [tuned : Set tuned OpenShift variables] *********************************** ok: [node02] TASK [tuned : Ensure directory structure exists] ******************************* ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': u's0', 'seuser': u'unconfined_u', 'serole': u'object_r', 'ctime': 1531032437.8490183, 'state': u'file', 'gid': 0, 'mode': u'0644', 'mtime': 1531032437.8490183, 'owner': u'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': u'admin_home_t'}) TASK [tuned : Ensure files are populated from templates] *********************** skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Make tuned use the recommended tuned profile on restart] ********* changed: [node02] => (item=/etc/tuned/active_profile) changed: [node02] => (item=/etc/tuned/profile_mode) TASK [tuned : Restart tuned service] ******************************************* changed: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] ******* ok: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] *** PLAY [node bootstrap config] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_node : install needed rpm(s)] ********************************** ok: [node02] => (item=origin-node) ok: [node02] => (item=origin-docker-excluder) ok: [node02] => (item=ansible) ok: [node02] => (item=bash-completion) ok: [node02] => (item=docker) ok: [node02] => (item=haproxy) ok: [node02] => (item=dnsmasq) ok: [node02] => (item=ntp) ok: [node02] => (item=logrotate) ok: [node02] => (item=httpd-tools) ok: [node02] => (item=bind-utils) ok: [node02] => (item=firewalld) ok: [node02] => (item=libselinux-python) ok: [node02] => (item=conntrack-tools) ok: [node02] => (item=openssl) ok: [node02] => (item=iproute) ok: [node02] => (item=python-dbus) ok: [node02] => (item=PyYAML) ok: [node02] => (item=yum-utils) ok: [node02] => (item=glusterfs-fuse) ok: [node02] => (item=device-mapper-multipath) ok: [node02] => (item=nfs-utils) ok: [node02] => (item=cockpit-ws) ok: [node02] => (item=cockpit-system) ok: [node02] => (item=cockpit-bridge) ok: [node02] => (item=cockpit-docker) ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=ceph-common) TASK [openshift_node : create the directory for node] ************************** skipping: [node02] TASK [openshift_node : laydown systemd override] ******************************* skipping: [node02] TASK [openshift_node : update the sysconfig to have necessary variables] ******* ok: [node02] => (item={u'regexp': u'^KUBECONFIG=.*', u'line': u'KUBECONFIG=/etc/origin/node/bootstrap.kubeconfig'}) TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : disable origin-node service] **************************** changed: [node02] => (item=origin-node.service) TASK [openshift_node : Check for RPM generated config marker file .config_managed] *** ok: [node02] TASK [openshift_node : create directories for bootstrapping] ******************* ok: [node02] => (item=/root/openshift_bootstrap) changed: [node02] => (item=/var/lib/origin/openshift.local.config) changed: [node02] => (item=/var/lib/origin/openshift.local.config/node) ok: [node02] => (item=/etc/docker/certs.d/docker-registry.default.svc:5000) TASK [openshift_node : laydown the bootstrap.yml file for on boot configuration] *** ok: [node02] TASK [openshift_node : Create a symlink to the node client CA for the docker registry] *** ok: [node02] TASK [openshift_node : Remove RPM generated config files if present] *********** skipping: [node02] => (item=master) skipping: [node02] => (item=.config_managed) TASK [openshift_node : find all files in /etc/origin/node so we can remove them] *** skipping: [node02] TASK [openshift_node : Remove everything except the resolv.conf required for node] *** skipping: [node02] TASK [openshift_node_group : create node config template] ********************** changed: [node02] TASK [openshift_node_group : remove existing node config] ********************** changed: [node02] TASK [openshift_node_group : Ensure required directories are present] ********** ok: [node02] => (item=/etc/origin/node/pods) changed: [node02] => (item=/etc/origin/node/certificates) TASK [openshift_node_group : Update the sysconfig to group "node-config-compute"] *** changed: [node02] TASK [set_fact] **************************************************************** ok: [node02] PLAY [Re-enable excluder if it was previously enabled] ************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/enable.yml for node02 TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] PLAY [Node Preparation Checkpoint End] ***************************************** TASK [Set Node preparation 'Complete'] ***************************************** ok: [node01] PLAY [Distribute bootstrap and start nodes] ************************************ TASK [openshift_node : Gather node information] ******************************** changed: [node02] ok: [node01] TASK [openshift_node : Copy master bootstrap config locally] ******************* ok: [node02] TASK [openshift_node : Distribute bootstrap kubeconfig if one does not exist] *** ok: [node01] changed: [node02] TASK [openshift_node : Start and enable node for bootstrapping] **************** changed: [node02] changed: [node01] TASK [openshift_node : Get node logs] ****************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : debug] ************************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : fail] *************************************************** skipping: [node02] skipping: [node01] PLAY [Approve any pending CSR requests from inventory nodes] ******************* TASK [Dump all candidate bootstrap hostnames] ********************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Find all hostnames for bootstrapping] ************************************ ok: [node01] TASK [Dump the bootstrap hostnames] ******************************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Approve bootstrap nodes] ************************************************* changed: [node01] TASK [Get CSRs] **************************************************************** skipping: [node01] TASK [Report approval errors] ************************************************** skipping: [node01] PLAY [Ensure any inventory labels are applied to the nodes] ******************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_manage_node : Wait for master API to become available before proceeding] *** skipping: [node02] TASK [openshift_manage_node : Wait for Node Registration] ********************** ok: [node01 -> node01] ok: [node02 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/config.yml for node02, node01 TASK [openshift_manage_node : Set node schedulability] ************************* ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/set_default_node_role.yml for node02, node01 TASK [openshift_manage_node : Retrieve nodes that are marked with the infra selector or the legacy infra selector] *** ok: [node02 -> node01] TASK [openshift_manage_node : Label infra or legacy infra nodes with the new role label] *** TASK [openshift_manage_node : Retrieve non-infra, non-master nodes that are not yet labeled compute] *** ok: [node02 -> node01] TASK [openshift_manage_node : label non-master non-infra nodes compute] ******** TASK [openshift_manage_node : Label all-in-one master as a compute node] ******* skipping: [node02] PLAY RECAP ********************************************************************* localhost : ok=30 changed=0 unreachable=0 failed=0 node01 : ok=71 changed=3 unreachable=0 failed=0 node02 : ok=155 changed=33 unreachable=0 failed=0 INSTALLER STATUS *************************************************************** Initialization : Complete (0:03:57) Node Preparation : Complete (0:05:09) Sending file modes: C0755 110489328 oc Sending file modes: C0600 5649 admin.kubeconfig Cluster "node01:8443" set. Cluster "node01:8443" set. + set +e + kubectl get nodes --no-headers + cluster/kubectl.sh get nodes --no-headers node01 Ready compute,infra,master 23d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + kubectl_rc=0 + '[' 0 -ne 0 ']' ++ kubectl get nodes --no-headers ++ cluster/kubectl.sh get nodes --no-headers ++ grep NotReady + '[' -n '' ']' + set -e + echo 'Nodes are ready:' Nodes are ready: + kubectl get nodes + cluster/kubectl.sh get nodes NAME STATUS ROLES AGE VERSION node01 Ready compute,infra,master 23d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + make cluster-sync ./cluster/build.sh Building ... sha256:dcf2b21fa2ed11dcf9dbba21b1cca0ee3fad521a0e9aee61c06d0b0b66a4b200 go version go1.10 linux/amd64 go version go1.10 linux/amd64 make[1]: Entering directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' hack/dockerized "./hack/check.sh && KUBEVIRT_VERSION= ./hack/build-go.sh install " && ./hack/build-copy-artifacts.sh sha256:dcf2b21fa2ed11dcf9dbba21b1cca0ee3fad521a0e9aee61c06d0b0b66a4b200 go version go1.10 linux/amd64 go version go1.10 linux/amd64 find: '/root/go/src/kubevirt.io/kubevirt/_out/cmd': No such file or directory Compiling tests... compiled tests.test hack/build-docker.sh build Sending build context to Docker daemon 40.39 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-controller ---> Using cache ---> b00c84523b53 Step 4/8 : WORKDIR /home/virt-controller ---> Using cache ---> b76b8bd8cd39 Step 5/8 : USER 1001 ---> Using cache ---> b6d9ad9ed232 Step 6/8 : COPY virt-controller /usr/bin/virt-controller ---> Using cache ---> 26e47e2eed9a Step 7/8 : ENTRYPOINT /usr/bin/virt-controller ---> Using cache ---> 00128c27df18 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-controller" '' ---> Running in 3684e091bffb ---> 4a201c003111 Removing intermediate container 3684e091bffb Successfully built 4a201c003111 Sending build context to Docker daemon 43.31 MB Step 1/10 : FROM kubevirt/libvirt:4.2.0 ---> 5f0bfe81a3e0 Step 2/10 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 945996802736 Step 3/10 : RUN dnf -y install socat genisoimage util-linux libcgroup-tools ethtool net-tools sudo && dnf -y clean all && test $(id -u qemu) = 107 # make sure that the qemu user really is 107 ---> Using cache ---> 672f9ab56316 Step 4/10 : COPY virt-launcher /usr/bin/virt-launcher ---> Using cache ---> e4fbfd7dd6f4 Step 5/10 : COPY kubevirt-sudo /etc/sudoers.d/kubevirt ---> Using cache ---> 277fd58fb838 Step 6/10 : RUN setcap CAP_NET_BIND_SERVICE=+eip /usr/bin/qemu-system-x86_64 ---> Using cache ---> 41fb49daafbb Step 7/10 : RUN mkdir -p /usr/share/kubevirt/virt-launcher ---> Using cache ---> 0f116f97a756 Step 8/10 : COPY entrypoint.sh libvirtd.sh sock-connector /usr/share/kubevirt/virt-launcher/ ---> Using cache ---> 6d3d9fef7422 Step 9/10 : ENTRYPOINT /usr/share/kubevirt/virt-launcher/entrypoint.sh ---> Using cache ---> e447a0c26991 Step 10/10 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-launcher" '' ---> Running in 6626fde2efb1 ---> 60275eafa66c Removing intermediate container 6626fde2efb1 Successfully built 60275eafa66c Sending build context to Docker daemon 41.69 MB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/5 : COPY virt-handler /usr/bin/virt-handler ---> Using cache ---> ebf34d3b2e34 Step 4/5 : ENTRYPOINT /usr/bin/virt-handler ---> Using cache ---> 2ab400acac6f Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-handler" '' ---> Running in 9a723cc5ddf8 ---> 779690690652 Removing intermediate container 9a723cc5ddf8 Successfully built 779690690652 Sending build context to Docker daemon 38.81 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-api ---> Using cache ---> ed1ebf600ee1 Step 4/8 : WORKDIR /home/virt-api ---> Using cache ---> 0769dad023e5 Step 5/8 : USER 1001 ---> Using cache ---> 0cb65afb0c2b Step 6/8 : COPY virt-api /usr/bin/virt-api ---> Using cache ---> fdbdfdd8a925 Step 7/8 : ENTRYPOINT /usr/bin/virt-api ---> Using cache ---> cbd2bf56e797 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-api" '' ---> Running in 5a3279c1f53f ---> e0de336e3ebe Removing intermediate container 5a3279c1f53f Successfully built e0de336e3ebe Sending build context to Docker daemon 4.096 kB Step 1/7 : FROM fedora:28 ---> cc510acfcd70 Step 2/7 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/7 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/7 : RUN mkdir -p /images/custom /images/alpine && truncate -s 64M /images/custom/disk.img && curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /images/alpine/disk.img ---> Using cache ---> 02134835a6aa Step 5/7 : ADD entrypoint.sh / ---> Using cache ---> ec0843818da7 Step 6/7 : CMD /entrypoint.sh ---> Using cache ---> 754029bb4bd2 Step 7/7 : LABEL "disks-images-provider" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> 6327b8256318 Successfully built 6327b8256318 Sending build context to Docker daemon 2.56 kB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/5 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/5 : RUN dnf -y install procps-ng nmap-ncat && dnf -y clean all ---> Using cache ---> 207487abe7b2 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "vm-killer" '' ---> Using cache ---> 27cf5472530f Successfully built 27cf5472530f Sending build context to Docker daemon 5.12 kB Step 1/7 : FROM debian:sid ---> 68f33cf86aab Step 2/7 : MAINTAINER "David Vossel" \ ---> Using cache ---> 5734d749eb5c Step 3/7 : ENV container docker ---> Using cache ---> f8775a77966f Step 4/7 : RUN apt-get update && apt-get install -y bash curl bzip2 qemu-utils && mkdir -p /disk && rm -rf /var/lib/apt/lists/* ---> Using cache ---> 1a40cf222a61 Step 5/7 : ADD entry-point.sh / ---> Using cache ---> 77b545d92fe7 Step 6/7 : CMD /entry-point.sh ---> Using cache ---> dfe20d463305 Step 7/7 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "registry-disk-v1alpha" '' ---> Using cache ---> 5efdf368e732 Successfully built 5efdf368e732 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33198/kubevirt/registry-disk-v1alpha:devel ---> 5efdf368e732 Step 2/4 : MAINTAINER "David Vossel" \ ---> Using cache ---> 386f7e924456 Step 3/4 : RUN curl https://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img > /disk/cirros.img ---> Using cache ---> f473a86e4d6a Step 4/4 : LABEL "cirros-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> a4ca4c67d45c Successfully built a4ca4c67d45c Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33198/kubevirt/registry-disk-v1alpha:devel ---> 5efdf368e732 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 369bca39dcc2 Step 3/4 : RUN curl -g -L https://download.fedoraproject.org/pub/fedora/linux/releases/27/CloudImages/x86_64/images/Fedora-Cloud-Base-27-1.6.x86_64.qcow2 > /disk/fedora.qcow2 ---> Using cache ---> de1e81f43a59 Step 4/4 : LABEL "fedora-cloud-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> a5867eac6e05 Successfully built a5867eac6e05 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33198/kubevirt/registry-disk-v1alpha:devel ---> 5efdf368e732 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 369bca39dcc2 Step 3/4 : RUN curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /disk/alpine.iso ---> Using cache ---> 1083d820f9c8 Step 4/4 : LABEL "alpine-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> 11512d828b9c Successfully built 11512d828b9c Sending build context to Docker daemon 35.59 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virtctl ---> Using cache ---> 985fe391c056 Step 4/8 : WORKDIR /home/virtctl ---> Using cache ---> 3b2cae8ac543 Step 5/8 : USER 1001 ---> Using cache ---> 0c06e5b4a900 Step 6/8 : COPY subresource-access-test /subresource-access-test ---> Using cache ---> 5834911395de Step 7/8 : ENTRYPOINT /subresource-access-test ---> Using cache ---> c748c4ca2d4b Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "subresource-access-test" '' ---> Running in 6b6597c97a8a ---> 0016c01777f2 Removing intermediate container 6b6597c97a8a Successfully built 0016c01777f2 Sending build context to Docker daemon 3.072 kB Step 1/9 : FROM fedora:28 ---> cc510acfcd70 Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/9 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/9 : RUN dnf -y install make git gcc && dnf -y clean all ---> Using cache ---> d3456b1644b1 Step 5/9 : ENV GIMME_GO_VERSION 1.9.2 ---> Using cache ---> 0ba81fddbba1 Step 6/9 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 5d33abe3f819 Step 7/9 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 783826523be1 Step 8/9 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/masterzen/winrm-cli ---> Using cache ---> 711bc8d15952 Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "winrmcli" '' ---> Using cache ---> fe40426b785b Successfully built fe40426b785b Sending build context to Docker daemon 36.8 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> e3238544ad97 Step 3/5 : COPY example-hook-sidecar /example-hook-sidecar ---> Using cache ---> d649ae9c07b2 Step 4/5 : ENTRYPOINT /example-hook-sidecar ---> Using cache ---> ed2ddb41eafe Step 5/5 : LABEL "example-hook-sidecar" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Running in 40d515a1915a ---> 28b17f4ae304 Removing intermediate container 40d515a1915a Successfully built 28b17f4ae304 hack/build-docker.sh push The push refers to a repository [localhost:33198/kubevirt/virt-controller] fb53462f160c: Preparing aa89340cf7a8: Preparing 891e1e4ef82a: Preparing aa89340cf7a8: Pushed fb53462f160c: Pushed 891e1e4ef82a: Pushed devel: digest: sha256:9bf90106d81994bcf6803d68ed9c1a9ac5e9b656e3710d07edc66098b33984f0 size: 949 The push refers to a repository [localhost:33198/kubevirt/virt-launcher] ce259f5a8292: Preparing 26240f121795: Preparing 25e7c2948d10: Preparing 0b9aa09f91bf: Preparing 320e5049bb39: Preparing 633427c64a24: Preparing da38cf808aa5: Preparing b83399358a92: Preparing 186d8b3e4fd8: Preparing fa6154170bf5: Preparing 5eefb9960a36: Preparing 891e1e4ef82a: Preparing da38cf808aa5: Waiting 186d8b3e4fd8: Waiting 633427c64a24: Waiting 891e1e4ef82a: Waiting fa6154170bf5: Waiting 26240f121795: Pushed 25e7c2948d10: Pushed 0b9aa09f91bf: Pushed ce259f5a8292: Pushed 320e5049bb39: Pushed da38cf808aa5: Pushed fa6154170bf5: Pushed b83399358a92: Pushed 186d8b3e4fd8: Pushed 891e1e4ef82a: Mounted from kubevirt/virt-controller 633427c64a24: Pushed 5eefb9960a36: Pushed devel: digest: sha256:a27e41edb5916c8e64c2f33c620440f8a2d4462144c7ecf5666d76d2b8f340f3 size: 2828 The push refers to a repository [localhost:33198/kubevirt/virt-handler] e67a348bbd5e: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-launcher e67a348bbd5e: Pushed devel: digest: sha256:991f1a2b62b64a9f62fc5f501b5086cf8e4b1c810450fefcaf5c6de0b1c5131a size: 741 The push refers to a repository [localhost:33198/kubevirt/virt-api] e0c88486f23a: Preparing 82fc744c99b4: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-handler 82fc744c99b4: Pushed e0c88486f23a: Pushed devel: digest: sha256:2a290ce264af408b6cc2942c58333359941ae1e846149472230163bbfdf4b578 size: 948 The push refers to a repository [localhost:33198/kubevirt/disks-images-provider] 71ad31feb2c5: Preparing 21d4b721776e: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-api 71ad31feb2c5: Pushed 21d4b721776e: Pushed devel: digest: sha256:5dc088106df85eb01f2ad0566624239b95b34986820107944e36d309183fd4cd size: 948 The push refers to a repository [localhost:33198/kubevirt/vm-killer] c4cfadeeaf5f: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/disks-images-provider c4cfadeeaf5f: Pushed devel: digest: sha256:39b817b79b1fbce75dbb476bc261b2752fd6466bf98d373208d5144579da22b0 size: 740 The push refers to a repository [localhost:33198/kubevirt/registry-disk-v1alpha] 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 661cce8d8e52: Pushed 41e0baba3077: Pushed 25edbec0eaea: Pushed devel: digest: sha256:0df707a55243af8792380fba68a76307017494c503e0e9071ed55d7d3c3611d4 size: 948 The push refers to a repository [localhost:33198/kubevirt/cirros-registry-disk-demo] f9f97de3966a: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 25edbec0eaea: Mounted from kubevirt/registry-disk-v1alpha 661cce8d8e52: Mounted from kubevirt/registry-disk-v1alpha 41e0baba3077: Mounted from kubevirt/registry-disk-v1alpha f9f97de3966a: Pushed devel: digest: sha256:3f818f67105a36bdc42bdbfad87fc29d0028e39a0dceef92d12efbcf8e16e5ed size: 1160 The push refers to a repository [localhost:33198/kubevirt/fedora-cloud-registry-disk-demo] 24cdf3b545f2: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 25edbec0eaea: Mounted from kubevirt/cirros-registry-disk-demo 41e0baba3077: Mounted from kubevirt/cirros-registry-disk-demo 661cce8d8e52: Mounted from kubevirt/cirros-registry-disk-demo 24cdf3b545f2: Pushed devel: digest: sha256:a6a571626690141c7da4cf0e1eb4fd75e5dd9ae427d5070c2729214cfbd6a192 size: 1161 The push refers to a repository [localhost:33198/kubevirt/alpine-registry-disk-demo] d8e356e905f4: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 41e0baba3077: Mounted from kubevirt/fedora-cloud-registry-disk-demo 661cce8d8e52: Mounted from kubevirt/fedora-cloud-registry-disk-demo 25edbec0eaea: Mounted from kubevirt/fedora-cloud-registry-disk-demo d8e356e905f4: Pushed devel: digest: sha256:c27568048aa8e031860d98cdced0370763745ad80581e62432568dac45abf1fb size: 1160 The push refers to a repository [localhost:33198/kubevirt/subresource-access-test] 67e4275094d1: Preparing 25cb73590a9d: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/vm-killer 25cb73590a9d: Pushed 67e4275094d1: Pushed devel: digest: sha256:66442c9dc9c238fc5c1a167727dceb7238b378f1fb9bef83b6006b6dda08fd95 size: 948 The push refers to a repository [localhost:33198/kubevirt/winrmcli] f8083e002d0b: Preparing 53c709abc882: Preparing 9ca98a0f492b: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/subresource-access-test f8083e002d0b: Pushed 9ca98a0f492b: Pushed 53c709abc882: Pushed devel: digest: sha256:4fe6c9666a841b61b962d7fb73ccb7cb0dabc3b56e1657cfdfd9005e1a36d38c size: 1165 The push refers to a repository [localhost:33198/kubevirt/example-hook-sidecar] 9167c7cd1cd7: Preparing 39bae602f753: Preparing 9167c7cd1cd7: Pushed 39bae602f753: Pushed devel: digest: sha256:241863e982884cd48b3c556ac6eba67a3dd41c13f8a119f07a2d606a9d979886 size: 740 make[1]: Leaving directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' Done ./cluster/clean.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release1 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release1 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-167-ga277982 ++ KUBEVIRT_VERSION=v0.7.0-167-ga277982 + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33198/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Cleaning up ...' Cleaning up ... + cluster/kubectl.sh get vmis --all-namespaces -o=custom-columns=NAME:.metadata.name,NAMESPACE:.metadata.namespace,FINALIZERS:.metadata.finalizers --no-headers + grep foregroundDeleteVirtualMachine + read p error: the server doesn't have a resource type "vmis" + _kubectl delete ds -l kubevirt.io -n kube-system --cascade=false --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=libvirt --force --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=virt-handler --force --grace-period 0 No resources found + namespaces=(default ${namespace}) + for i in '${namespaces[@]}' + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete deployment -l kubevirt.io No resources found + _kubectl -n default delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rs -l kubevirt.io No resources found + _kubectl -n default delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete services -l kubevirt.io No resources found + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n default delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete secrets -l kubevirt.io No resources found + _kubectl -n default delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pv -l kubevirt.io No resources found + _kubectl -n default delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pvc -l kubevirt.io No resources found + _kubectl -n default delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete ds -l kubevirt.io No resources found + _kubectl -n default delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n default delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pods -l kubevirt.io No resources found + _kubectl -n default delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n default delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rolebinding -l kubevirt.io No resources found + _kubectl -n default delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete roles -l kubevirt.io No resources found + _kubectl -n default delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterroles -l kubevirt.io No resources found + _kubectl -n default delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n default get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n default get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + for i in '${namespaces[@]}' + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete deployment -l kubevirt.io No resources found + _kubectl -n kube-system delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rs -l kubevirt.io No resources found + _kubectl -n kube-system delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete services -l kubevirt.io No resources found + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n kube-system delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete secrets -l kubevirt.io No resources found + _kubectl -n kube-system delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pv -l kubevirt.io No resources found + _kubectl -n kube-system delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pvc -l kubevirt.io No resources found + _kubectl -n kube-system delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete ds -l kubevirt.io No resources found + _kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n kube-system delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pods -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete roles -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterroles -l kubevirt.io No resources found + _kubectl -n kube-system delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + sleep 2 + echo Done Done ./cluster/deploy.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release1 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release1 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-167-ga277982 ++ KUBEVIRT_VERSION=v0.7.0-167-ga277982 + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33198/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Deploying ...' Deploying ... + [[ -z openshift-3.10-release ]] + [[ openshift-3.10-release =~ .*-dev ]] + [[ openshift-3.10-release =~ .*-release ]] + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/demo-content.yaml =~ .*demo.* ]] + continue + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml =~ .*demo.* ]] + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml clusterrole.rbac.authorization.k8s.io "kubevirt.io:admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:edit" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:view" created serviceaccount "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver-auth-delegator" created rolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created role.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-controller" created serviceaccount "kubevirt-controller" created serviceaccount "kubevirt-privileged" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller-cluster-admin" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-privileged-cluster-admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:default" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt.io:default" created service "virt-api" created deployment.extensions "virt-api" created deployment.extensions "virt-controller" created daemonset.extensions "virt-handler" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstances.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancereplicasets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancepresets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachines.kubevirt.io" created + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R persistentvolumeclaim "disk-alpine" created persistentvolume "host-path-disk-alpine" created persistentvolumeclaim "disk-custom" created persistentvolume "host-path-disk-custom" created daemonset.extensions "disks-images-provider" created serviceaccount "kubevirt-testing" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-testing-cluster-admin" created + [[ os-3.10.0 =~ os-* ]] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-controller"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-testing"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-privileged"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-apiserver"] + _kubectl adm policy add-scc-to-user privileged admin + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged admin scc "privileged" added to: ["admin"] + echo Done Done + namespaces=(kube-system default) + [[ kube-system != \k\u\b\e\-\s\y\s\t\e\m ]] + timeout=300 + sample=30 + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n kube-system --no-headers ++ grep -v Running ++ cluster/kubectl.sh get pods -n kube-system --no-headers + '[' -n 'disks-images-provider-mwfrr 0/1 ContainerCreating 0 4s disks-images-provider-zbzrs 0/1 ContainerCreating 0 3s virt-api-7d79764579-5k8nw 0/1 ContainerCreating 0 7s virt-api-7d79764579-jzf2l 0/1 ContainerCreating 0 7s virt-controller-7d57d96b65-c55ct 0/1 ContainerCreating 0 7s virt-handler-gfmtm 0/1 ContainerCreating 0 7s virt-handler-l7664 0/1 ContainerCreating 0 7s' ']' + echo 'Waiting for kubevirt pods to enter the Running state ...' Waiting for kubevirt pods to enter the Running state ... + kubectl get pods -n kube-system --no-headers + cluster/kubectl.sh get pods -n kube-system --no-headers + grep -v Running disks-images-provider-mwfrr 0/1 ContainerCreating 0 5s disks-images-provider-zbzrs 0/1 ContainerCreating 0 4s virt-api-7d79764579-5k8nw 0/1 ContainerCreating 0 8s virt-api-7d79764579-jzf2l 0/1 ContainerCreating 0 8s virt-handler-gfmtm 0/1 ContainerCreating 0 8s virt-handler-l7664 0/1 ContainerCreating 0 8s + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false + '[' -n false ']' + echo 'Waiting for KubeVirt containers to become ready ...' Waiting for KubeVirt containers to become ready ... + kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + grep false + cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers false + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n kube-system + cluster/kubectl.sh get pods -n kube-system NAME READY STATUS RESTARTS AGE disks-images-provider-mwfrr 1/1 Running 0 1m disks-images-provider-zbzrs 1/1 Running 0 1m master-api-node01 1/1 Running 1 23d master-controllers-node01 1/1 Running 1 23d master-etcd-node01 1/1 Running 1 23d virt-api-7d79764579-5k8nw 1/1 Running 1 1m virt-api-7d79764579-jzf2l 1/1 Running 0 1m virt-controller-7d57d96b65-c55ct 1/1 Running 0 1m virt-controller-7d57d96b65-fn7p9 1/1 Running 0 1m virt-handler-gfmtm 1/1 Running 0 1m virt-handler-l7664 1/1 Running 0 1m + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n default --no-headers ++ grep -v Running ++ cluster/kubectl.sh get pods -n default --no-headers + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n default + cluster/kubectl.sh get pods -n default NAME READY STATUS RESTARTS AGE docker-registry-1-rl562 1/1 Running 1 23d registry-console-1-rw9zf 1/1 Running 1 23d router-1-6cch9 1/1 Running 1 23d + kubectl version + cluster/kubectl.sh version oc v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 features: Basic-Auth GSSAPI Kerberos SPNEGO Server https://127.0.0.1:33195 openshift v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 + ginko_params='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + [[ openshift-3.10-release =~ windows.* ]] + FUNC_TEST_ARGS='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + make functest hack/dockerized "hack/build-func-tests.sh" sha256:dcf2b21fa2ed11dcf9dbba21b1cca0ee3fad521a0e9aee61c06d0b0b66a4b200 go version go1.10 linux/amd64 go version go1.10 linux/amd64 Compiling tests... compiled tests.test hack/functests.sh Running Suite: Tests Suite ========================== Random Seed: 1533024043 Will run 151 of 151 specs •• ------------------------------ • [SLOW TEST:5.694 seconds] Templates /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:42 Launching VMI from VM Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:60 with given Fedora Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:193 with given VM JSON from the Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:152 with given VM from the VM JSON /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:158 should succeed to launch a VMI using oc-patch command /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:161 ------------------------------ • [SLOW TEST:6.890 seconds] Templates /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:42 Launching VMI from VM Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:60 with given Fedora Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:193 with given VM JSON from the Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:152 with given VM from the VM JSON /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:158 with given VMI from the VM /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:163 should succeed to terminate the VMI using oc-patch command /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:166 ------------------------------ • [SLOW TEST:46.867 seconds] Health Monitoring /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:37 A VirtualMachineInstance with a watchdog device /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:56 should be shut down when the watchdog expires /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:57 ------------------------------ 2018/07/31 04:01:52 read closing down: EOF • [SLOW TEST:20.873 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:21.073 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given an vm /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:20.928 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi preset /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:21.038 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi replica set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ •• ------------------------------ • [SLOW TEST:18.597 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should update VirtualMachine once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:195 ------------------------------ •• ------------------------------ • [SLOW TEST:55.596 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if it gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:245 ------------------------------ • [SLOW TEST:48.428 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if the VirtualMachineInstance's pod gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:265 ------------------------------ • [SLOW TEST:27.642 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should stop VirtualMachineInstance if running set to false /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:325 ------------------------------ Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:11:23.076911Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:11:23.123054Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:11:23.182781Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:11:24 http: TLS handshake error from 10.128.0.1:44942: EOF level=info timestamp=2018-07-31T08:11:25.866848Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:11:28.433019Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:11:28.435243Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:11:31.912647Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:11:31.916602Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/31 08:11:34 http: TLS handshake error from 10.128.0.1:44988: EOF level=info timestamp=2018-07-31T08:11:36.092329Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:11:37.774254Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:11:40.665526Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:11:44 http: TLS handshake error from 10.128.0.1:45038: EOF level=info timestamp=2018-07-31T08:11:46.296047Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:09:39 http: TLS handshake error from 10.129.0.1:55698: EOF level=info timestamp=2018-07-31T08:09:40.118186Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:09:49 http: TLS handshake error from 10.129.0.1:55708: EOF 2018/07/31 08:09:59 http: TLS handshake error from 10.129.0.1:55718: EOF 2018/07/31 08:10:09 http: TLS handshake error from 10.129.0.1:55728: EOF level=info timestamp=2018-07-31T08:10:10.185588Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:10:19 http: TLS handshake error from 10.129.0.1:55738: EOF 2018/07/31 08:10:29 http: TLS handshake error from 10.129.0.1:55748: EOF 2018/07/31 08:10:39 http: TLS handshake error from 10.129.0.1:55758: EOF 2018/07/31 08:10:49 http: TLS handshake error from 10.129.0.1:55768: EOF 2018/07/31 08:10:59 http: TLS handshake error from 10.129.0.1:55778: EOF 2018/07/31 08:11:09 http: TLS handshake error from 10.129.0.1:55788: EOF 2018/07/31 08:11:19 http: TLS handshake error from 10.129.0.1:55798: EOF 2018/07/31 08:11:29 http: TLS handshake error from 10.129.0.1:55810: EOF 2018/07/31 08:11:39 http: TLS handshake error from 10.129.0.1:55820: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T07:59:15.290014Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-fn7p9 Pod phase: Running level=info timestamp=2018-07-31T08:06:46.436800Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c6f1b9b-9498-11e8-8b5c-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-31T08:06:46.438464Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=ab7d0d68-9498-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:06:46.438861Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=ab7d0d68-9498-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:06:46.479912Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c6f1b9b-9498-11e8-8b5c-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-31T08:06:46.480234Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c6f1b9b-9498-11e8-8b5c-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-31T08:06:46.505462Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c6f1b9b-9498-11e8-8b5c-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-31T08:06:46.505718Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c6f1b9b-9498-11e8-8b5c-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-31T08:06:46.618474Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c6f1b9b-9498-11e8-8b5c-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-31T08:06:46.618754Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c6f1b9b-9498-11e8-8b5c-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-31T08:06:46.724884Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2pmqn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2pmqn" level=info timestamp=2018-07-31T08:06:46.748479Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c6f1b9b-9498-11e8-8b5c-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-31T08:06:46.748806Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c6f1b9b-9498-11e8-8b5c-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-31T08:06:46.751252Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2pmqn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2pmqn" level=info timestamp=2018-07-31T08:07:03.762990Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c6f1b9b-9498-11e8-8b5c-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-31T08:07:03.764143Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c6f1b9b-9498-11e8-8b5c-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T07:59:26.236898Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-31T07:59:26.242726Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-31T07:59:26.243368Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-31T07:59:26.354493Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-31T07:59:26.490285Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-31T07:59:26.618366Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:06:14.358351Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:06:14.359002Z pos=vm.go:373 component=virt-handler namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c7cedb5-9498-11e8-8b5c-525500d15501 msg="Deleting domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-31T08:06:14.359845Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c7cedb5-9498-11e8-8b5c-525500d15501 msg="Removing domain and ephemeral data for finalized vmi." level=info timestamp=2018-07-31T08:06:14.360419Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c7cedb5-9498-11e8-8b5c-525500d15501 msg="Processing deletion." level=info timestamp=2018-07-31T08:06:14.363844Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c7cedb5-9498-11e8-8b5c-525500d15501 msg="Signaled deletion for testvmi2pmqn" level=info timestamp=2018-07-31T08:06:14.375219Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=8c7cedb5-9498-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:06:46.147728Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi2pmqn, existing: false\n" level=info timestamp=2018-07-31T08:06:46.148371Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:06:46.148536Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:06:46.149009Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi2pmqn kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:06:46.149126Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi2pmqn kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:06:46.149312Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi2pmqn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:07:03.756231Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi2pmqn, existing: true\n" level=info timestamp=2018-07-31T08:07:03.756483Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-31T08:07:03.756800Z pos=vm.go:325 component=virt-handler namespace=kubevirt-test-default name=testvmi2pmqn kind= uid=ab7d0d68-9498-11e8-8b5c-525500d15501 msg="Ignoring domain from an older VMI, will be handled by its own VMI." Pod name: virt-launcher-testvmi2pmqn-68m9c Pod phase: Running level=info timestamp=2018-07-31T08:06:51.419383Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-31T08:06:51.419842Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-31T08:06:51.421992Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-31T08:07:01.550345Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-31T08:07:01.620623Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi2pmqn" level=info timestamp=2018-07-31T08:07:01.623190Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-31T08:07:01.623947Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [354.062 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should start and stop VirtualMachineInstance multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:333 Timed out after 300.000s. Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:157 ------------------------------ STEP: Doing run: 0 STEP: Starting the VirtualMachineInstance STEP: VMI has the running condition STEP: Stopping the VirtualMachineInstance STEP: VMI has not the running condition STEP: Doing run: 1 STEP: Starting the VirtualMachineInstance STEP: VMI has the running condition • [SLOW TEST:43.417 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should not update the VirtualMachineInstance spec if Running /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:346 ------------------------------ • [SLOW TEST:176.697 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should survive guest shutdown, multiple times /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:387 ------------------------------ 2018/07/31 04:15:28 read closing down: EOF 2018/07/31 04:15:28 read closing down: EOF 2018/07/31 04:15:28 read closing down: EOF VM testvmitjvkr was scheduled to start • [SLOW TEST:18.087 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should start a VirtualMachineInstance once /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:436 ------------------------------ VM testvmicq64j was scheduled to stop • [SLOW TEST:24.024 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should stop a VirtualMachineInstance once /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:467 ------------------------------ •••••••••••• ------------------------------ • [SLOW TEST:7.794 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should scale /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 to five, to six and then to zero replicas /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ •• ------------------------------ • [SLOW TEST:22.180 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should update readyReplicas once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:157 ------------------------------ • [SLOW TEST:8.270 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should remove VMIs once it is marked for deletion /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:169 ------------------------------ • ------------------------------ • [SLOW TEST:5.750 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should not scale when paused and scale when resume /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:223 ------------------------------ • [SLOW TEST:6.708 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should remove the finished VM /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:279 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.014 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to start a vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:133 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.013 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to stop a running vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:139 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.010 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have correct UUID /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:192 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.009 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have pod IP /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:208 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.013 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to start a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:242 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.011 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to stop a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:250 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ 2018/07/31 04:18:15 read closing down: EOF 2018/07/31 04:18:26 read closing down: EOF 2018/07/31 04:18:36 read closing down: EOF 2018/07/31 04:18:47 read closing down: EOF 2018/07/31 04:18:48 read closing down: EOF 2018/07/31 04:18:50 read closing down: EOF • [SLOW TEST:95.489 seconds] 2018/07/31 04:18:51 read closing down: EOF 2018/07/31 04:18:51 read closing down: EOF Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/07/31 04:18:52 read closing down: EOF •2018/07/31 04:18:53 read closing down: EOF 2018/07/31 04:18:53 read closing down: EOF ------------------------------ S [SKIPPING] [0.011 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with custom MAC address [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Openshift detected: Custom MAC addresses on pod networks are not suppored /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1570 ------------------------------ 2018/07/31 04:18:54 read closing down: EOF 2018/07/31 04:18:55 read closing down: EOF •2018/07/31 04:18:55 read closing down: EOF • ------------------------------ • [SLOW TEST:5.279 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on a different node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ ••• ------------------------------ • [SLOW TEST:5.381 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a service matching the vmi exposed /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:272 should fail to reach the vmi if an invalid servicename is used /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:303 ------------------------------ •2018/07/31 04:20:06 read closing down: EOF ------------------------------ • [SLOW TEST:40.949 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom interface model /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:368 should expose the right device type to the guest /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:369 ------------------------------ 2018/07/31 04:20:07 read closing down: EOF 2018/07/31 04:20:08 read closing down: EOF •2018/07/31 04:20:09 read closing down: EOF 2018/07/31 04:23:28 read closing down: EOF Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:22:37.513991Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:22:41.716841Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:22:44 http: TLS handshake error from 10.128.0.1:48384: EOF level=info timestamp=2018-07-31T08:22:47.599192Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:22:54 http: TLS handshake error from 10.128.0.1:48430: EOF level=info timestamp=2018-07-31T08:22:57.763371Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:23:03.754548Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:23:04 http: TLS handshake error from 10.128.0.1:48476: EOF level=info timestamp=2018-07-31T08:23:05.533393Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:23:05.540614Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:23:07.986983Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:23:14 http: TLS handshake error from 10.128.0.1:48526: EOF level=info timestamp=2018-07-31T08:23:18.241725Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:23:24 http: TLS handshake error from 10.128.0.1:48572: EOF level=info timestamp=2018-07-31T08:23:28.461020Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:21:21 http: TLS handshake error from 10.129.0.1:45378: EOF 2018/07/31 08:21:31 http: TLS handshake error from 10.129.0.1:45386: EOF level=info timestamp=2018-07-31T08:21:41.206790Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:21:41 http: TLS handshake error from 10.129.0.1:45396: EOF 2018/07/31 08:21:51 http: TLS handshake error from 10.129.0.1:45406: EOF 2018/07/31 08:22:01 http: TLS handshake error from 10.129.0.1:45416: EOF 2018/07/31 08:22:11 http: TLS handshake error from 10.129.0.1:45426: EOF 2018/07/31 08:22:21 http: TLS handshake error from 10.129.0.1:45436: EOF 2018/07/31 08:22:31 http: TLS handshake error from 10.129.0.1:45446: EOF 2018/07/31 08:22:41 http: TLS handshake error from 10.129.0.1:45456: EOF 2018/07/31 08:22:51 http: TLS handshake error from 10.129.0.1:45466: EOF 2018/07/31 08:23:01 http: TLS handshake error from 10.129.0.1:45476: EOF level=info timestamp=2018-07-31T08:23:11.070256Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:23:11 http: TLS handshake error from 10.129.0.1:45486: EOF 2018/07/31 08:23:21 http: TLS handshake error from 10.129.0.1:45496: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:21:14.648156Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-fn7p9 Pod phase: Running level=info timestamp=2018-07-31T08:21:15.776124Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 level=info timestamp=2018-07-31T08:21:15.892763Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-31T08:21:15.892826Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-31T08:21:15.892850Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-31T08:21:15.892870Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-31T08:21:15.892887Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-31T08:21:15.892905Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-31T08:21:15.892959Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer limitrangeInformer" level=info timestamp=2018-07-31T08:21:15.892977Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-31T08:21:15.893037Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-31T08:21:15.905331Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-31T08:21:15.905412Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-31T08:21:15.905447Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-31T08:21:15.905508Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:17:35.700115Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: true\n" level=info timestamp=2018-07-31T08:17:35.700145Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-31T08:17:35.700206Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:17:35.700229Z pos=vm.go:331 component=virt-handler msg="Domain status: Paused, reason: StartingUp\n" level=info timestamp=2018-07-31T08:17:35.700344Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:17:35.697864Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:17:35.702107Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-31T08:17:35.733087Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:17:35.782032Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:17:35.782170Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: true\n" level=info timestamp=2018-07-31T08:17:35.782210Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:17:35.782245Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:17:35.782269Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:17:35.784472Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:17:35.791155Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:21:28.416452Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:21:28.416518Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind= uid=89f84dbd-949a-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:21:28.416581Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind= uid=89f84dbd-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:21:28.435189Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiqvrpl kind=VirtualMachineInstance uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:21:28.437562Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiqvrpl, existing: true\n" level=info timestamp=2018-07-31T08:21:28.437615Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-31T08:21:28.437676Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:21:28.437744Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:21:28.437790Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:21:28.517710Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:21:28.518433Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitvm6q, existing: true\n" level=info timestamp=2018-07-31T08:21:28.518482Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-31T08:21:28.518510Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:21:28.518630Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:21:28.518681Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: netcat7hl6l Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcat8hw2f Pod phase: Failed ++ head -n 1 +++ nc wrongservice.kubevirt-test-default 1500 -i 1 -w 1 Ncat: Could not resolve hostname "wrongservice.kubevirt-test-default": Name or service not known. QUITTING. + x= + echo '' + '[' '' = 'Hello World!' ']' + echo failed + exit 1 failed Pod name: netcatbnvsp Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatgsvrx Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcathfb78 Pod phase: Succeeded ++ head -n 1 +++ nc my-subdomain.myvmi.kubevirt-test-default 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatnsdjh Pod phase: Succeeded ++ head -n 1 +++ nc myservice.kubevirt-test-default 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcatvnhj7 Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: virt-launcher-testvmi5qjfq-zkvnd Pod phase: Running level=info timestamp=2018-07-31T08:17:34.067085Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:34.274743Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:34.277449Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 20ddae30-e03e-4bb2-9da2-8cffd0836e9a" level=info timestamp=2018-07-31T08:17:34.278469Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:34.703301Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:34.942040Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:34.944607Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:34.946820Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:34.955794Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:34.956052Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:34.966122Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:34.970354Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.302311Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 20ddae30-e03e-4bb2-9da2-8cffd0836e9a: 188" level=info timestamp=2018-07-31T08:17:35.333076Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:35.363869Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmij2v8q-252p2 Pod phase: Running level=info timestamp=2018-07-31T08:17:33.253562Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Domain defined." level=info timestamp=2018-07-31T08:17:34.582041Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:34.587136Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 6d6c26d8-a965-4ba0-a1b5-79e9993ed3f3" level=info timestamp=2018-07-31T08:17:34.587323Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:34.633396Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.593304Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 6d6c26d8-a965-4ba0-a1b5-79e9993ed3f3: 189" level=info timestamp=2018-07-31T08:17:35.610946Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:35.650576Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:35.652668Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:35.656550Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:35.703578Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.705212Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:35.723370Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:35.733865Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.789786Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmip7728-4mftv Pod phase: Failed level=info timestamp=2018-07-31T08:19:43.633481Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:19:44.423135Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:19:44.450632Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:44.564146Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 139f49f9-c671-4bc7-80e6-b303daaab306" level=info timestamp=2018-07-31T08:19:44.566752Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:19:44.969675Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:19:45.001262Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:19:45.014438Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:45.019241Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:19:45.038731Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:19:45.044794Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:19:45.055611Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:19:45.061486Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:45.136124Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:19:45.571927Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 139f49f9-c671-4bc7-80e6-b303daaab306: 184" Pod name: virt-launcher-testvmipht6z-snllx Pod phase: Running level=info timestamp=2018-07-31T08:22:01.098433Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-31T08:22:01.098764Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-31T08:22:01.101643Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-31T08:22:11.380863Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-31T08:22:11.448717Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmipht6z" level=info timestamp=2018-07-31T08:22:11.451622Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-31T08:22:11.452281Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiqvrpl-dpdqf Pod phase: Failed level=info timestamp=2018-07-31T08:17:35.402892Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:35.415771Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 94358403-9291-4524-a269-1405f1d0252b" level=info timestamp=2018-07-31T08:17:35.416664Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:35.606388Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.987156Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.037007Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:36.051593Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:36.054011Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:36.435923Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 94358403-9291-4524-a269-1405f1d0252b: 189" level=info timestamp=2018-07-31T08:17:36.686087Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:36.687194Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.700814Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.618002Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:49.309218Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:49.554269Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmitvm6q-8656c Pod phase: Failed level=info timestamp=2018-07-31T08:17:35.263855Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.621421Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:36.654800Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID a7d70eee-4108-44d0-8b04-23888fb77672" level=info timestamp=2018-07-31T08:17:36.659190Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:36.829064Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:36.831106Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:37.013574Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:37.014243Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:37.031686Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.635607Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:37.635950Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:37.673030Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.686858Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a7d70eee-4108-44d0-8b04-23888fb77672: 195" level=info timestamp=2018-07-31T08:17:37.781666Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:39.529238Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synced vmi" ------------------------------ • Failure [202.372 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:402 should configure custom MAC address [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:403 Expected error: : 180000000000 expect: timer expired after 180 seconds not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1102 ------------------------------ STEP: checking eth0 MAC address level=info timestamp=2018-07-31T08:20:09.498684Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid=89f84dbd-949a-11e8-8b5c-525500d15501 msg="Created virtual machine pod virt-launcher-testvmipht6z-snllx" level=info timestamp=2018-07-31T08:20:25.798682Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid=89f84dbd-949a-11e8-8b5c-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmipht6z-snllx" level=info timestamp=2018-07-31T08:20:28.263010Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid=89f84dbd-949a-11e8-8b5c-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-31T08:20:28.585262Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid=89f84dbd-949a-11e8-8b5c-525500d15501 msg="VirtualMachineInstance started." level=info timestamp=2018-07-31T08:23:28.923529Z pos=utils.go:1291 component=tests namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid=89f84dbd-949a-11e8-8b5c-525500d15501 msg="Login: [{2 \r\n\r\n\u001b[?7h\r\n []}]" Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:23:07.986983Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:23:14 http: TLS handshake error from 10.128.0.1:48526: EOF level=info timestamp=2018-07-31T08:23:18.241725Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:23:24 http: TLS handshake error from 10.128.0.1:48572: EOF level=info timestamp=2018-07-31T08:23:28.461020Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:23:34.271600Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:23:34 http: TLS handshake error from 10.128.0.1:48620: EOF level=info timestamp=2018-07-31T08:23:35.945166Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:23:35.986710Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:23:38.692088Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:23:41.718662Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:23:44 http: TLS handshake error from 10.128.0.1:48672: EOF level=info timestamp=2018-07-31T08:23:48.944249Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:23:54 http: TLS handshake error from 10.128.0.1:48718: EOF level=info timestamp=2018-07-31T08:23:59.185236Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:21:51 http: TLS handshake error from 10.129.0.1:45406: EOF 2018/07/31 08:22:01 http: TLS handshake error from 10.129.0.1:45416: EOF 2018/07/31 08:22:11 http: TLS handshake error from 10.129.0.1:45426: EOF 2018/07/31 08:22:21 http: TLS handshake error from 10.129.0.1:45436: EOF 2018/07/31 08:22:31 http: TLS handshake error from 10.129.0.1:45446: EOF 2018/07/31 08:22:41 http: TLS handshake error from 10.129.0.1:45456: EOF 2018/07/31 08:22:51 http: TLS handshake error from 10.129.0.1:45466: EOF 2018/07/31 08:23:01 http: TLS handshake error from 10.129.0.1:45476: EOF level=info timestamp=2018-07-31T08:23:11.070256Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:23:11 http: TLS handshake error from 10.129.0.1:45486: EOF 2018/07/31 08:23:21 http: TLS handshake error from 10.129.0.1:45496: EOF 2018/07/31 08:23:31 http: TLS handshake error from 10.129.0.1:45506: EOF 2018/07/31 08:23:41 http: TLS handshake error from 10.129.0.1:45516: EOF 2018/07/31 08:23:51 http: TLS handshake error from 10.129.0.1:45526: EOF 2018/07/31 08:24:01 http: TLS handshake error from 10.129.0.1:45536: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:21:14.648156Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-fn7p9 Pod phase: Running level=info timestamp=2018-07-31T08:21:15.892763Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-31T08:21:15.892826Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-31T08:21:15.892850Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-31T08:21:15.892870Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-31T08:21:15.892887Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-31T08:21:15.892905Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-31T08:21:15.892959Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer limitrangeInformer" level=info timestamp=2018-07-31T08:21:15.892977Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-31T08:21:15.893037Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-31T08:21:15.905331Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-31T08:21:15.905412Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-31T08:21:15.905447Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-31T08:21:15.905508Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-31T08:24:01.372308Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:24:01.373193Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:17:35.700115Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: true\n" level=info timestamp=2018-07-31T08:17:35.700145Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-31T08:17:35.700206Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:17:35.700229Z pos=vm.go:331 component=virt-handler msg="Domain status: Paused, reason: StartingUp\n" level=info timestamp=2018-07-31T08:17:35.700344Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:17:35.697864Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:17:35.702107Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-31T08:17:35.733087Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:17:35.782032Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:17:35.782170Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: true\n" level=info timestamp=2018-07-31T08:17:35.782210Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:17:35.782245Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:17:35.782269Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:17:35.784472Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:17:35.791155Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:21:28.416452Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:21:28.416518Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind= uid=89f84dbd-949a-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:21:28.416581Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind= uid=89f84dbd-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:21:28.435189Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiqvrpl kind=VirtualMachineInstance uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:21:28.437562Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiqvrpl, existing: true\n" level=info timestamp=2018-07-31T08:21:28.437615Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-31T08:21:28.437676Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:21:28.437744Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:21:28.437790Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:21:28.517710Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:21:28.518433Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitvm6q, existing: true\n" level=info timestamp=2018-07-31T08:21:28.518482Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-31T08:21:28.518510Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:21:28.518630Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:21:28.518681Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: netcat7hl6l Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcat8hw2f Pod phase: Failed ++ head -n 1 +++ nc wrongservice.kubevirt-test-default 1500 -i 1 -w 1 Ncat: Could not resolve hostname "wrongservice.kubevirt-test-default": Name or service not known. QUITTING. + x= + echo '' + '[' '' = 'Hello World!' ']' + echo failed + exit 1 failed Pod name: netcatbnvsp Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatgsvrx Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcathfb78 Pod phase: Succeeded ++ head -n 1 +++ nc my-subdomain.myvmi.kubevirt-test-default 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatnsdjh Pod phase: Succeeded ++ head -n 1 +++ nc myservice.kubevirt-test-default 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcatvnhj7 Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: virt-launcher-testvmi5qjfq-zkvnd Pod phase: Running level=info timestamp=2018-07-31T08:17:34.067085Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:34.274743Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:34.277449Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 20ddae30-e03e-4bb2-9da2-8cffd0836e9a" level=info timestamp=2018-07-31T08:17:34.278469Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:34.703301Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:34.942040Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:34.944607Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:34.946820Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:34.955794Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:34.956052Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:34.966122Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:34.970354Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.302311Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 20ddae30-e03e-4bb2-9da2-8cffd0836e9a: 188" level=info timestamp=2018-07-31T08:17:35.333076Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:35.363869Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmifrb2f-9nzzm Pod phase: Pending Pod name: virt-launcher-testvmij2v8q-252p2 Pod phase: Running level=info timestamp=2018-07-31T08:17:33.253562Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Domain defined." level=info timestamp=2018-07-31T08:17:34.582041Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:34.587136Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 6d6c26d8-a965-4ba0-a1b5-79e9993ed3f3" level=info timestamp=2018-07-31T08:17:34.587323Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:34.633396Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.593304Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 6d6c26d8-a965-4ba0-a1b5-79e9993ed3f3: 189" level=info timestamp=2018-07-31T08:17:35.610946Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:35.650576Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:35.652668Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:35.656550Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:35.703578Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.705212Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:35.723370Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:35.733865Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.789786Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmip7728-4mftv Pod phase: Failed level=info timestamp=2018-07-31T08:19:43.633481Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:19:44.423135Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:19:44.450632Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:44.564146Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 139f49f9-c671-4bc7-80e6-b303daaab306" level=info timestamp=2018-07-31T08:19:44.566752Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:19:44.969675Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:19:45.001262Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:19:45.014438Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:45.019241Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:19:45.038731Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:19:45.044794Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:19:45.055611Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:19:45.061486Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:45.136124Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:19:45.571927Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 139f49f9-c671-4bc7-80e6-b303daaab306: 184" Pod name: virt-launcher-testvmipht6z-snllx Pod phase: Running level=info timestamp=2018-07-31T08:22:01.098433Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-31T08:22:01.098764Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-31T08:22:01.101643Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-31T08:22:11.380863Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-31T08:22:11.448717Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmipht6z" level=info timestamp=2018-07-31T08:22:11.451622Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-31T08:22:11.452281Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiqvrpl-dpdqf Pod phase: Failed level=info timestamp=2018-07-31T08:17:35.402892Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:35.415771Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 94358403-9291-4524-a269-1405f1d0252b" level=info timestamp=2018-07-31T08:17:35.416664Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:35.606388Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.987156Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.037007Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:36.051593Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:36.054011Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:36.435923Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 94358403-9291-4524-a269-1405f1d0252b: 189" level=info timestamp=2018-07-31T08:17:36.686087Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:36.687194Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.700814Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.618002Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:49.309218Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:49.554269Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmitvm6q-8656c Pod phase: Failed level=info timestamp=2018-07-31T08:17:35.263855Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.621421Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:36.654800Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID a7d70eee-4108-44d0-8b04-23888fb77672" level=info timestamp=2018-07-31T08:17:36.659190Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:36.829064Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:36.831106Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:37.013574Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:37.014243Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:37.031686Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.635607Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:37.635950Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:37.673030Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.686858Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a7d70eee-4108-44d0-8b04-23888fb77672: 195" level=info timestamp=2018-07-31T08:17:37.781666Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:39.529238Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synced vmi" • Failure [32.961 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address in non-conventional format /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:414 should configure custom MAC address [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:415 Expected error: <*errors.StatusError | 0xc42016f440>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:420 ------------------------------ STEP: checking eth0 MAC address Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:24:06.280919Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:24:06.304902Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:24:09.410514Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:24:09.969294Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:24:09.973282Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:24:10.885798Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:24:10.916537Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:24:10.966603Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:24:11.837494Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:24:14 http: TLS handshake error from 10.128.0.1:48814: EOF level=info timestamp=2018-07-31T08:24:19.731667Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:24:24 http: TLS handshake error from 10.128.0.1:48860: EOF level=info timestamp=2018-07-31T08:24:29.975234Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:24:34.471431Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:24:34 http: TLS handshake error from 10.128.0.1:48906: EOF Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:22:21 http: TLS handshake error from 10.129.0.1:45436: EOF 2018/07/31 08:22:31 http: TLS handshake error from 10.129.0.1:45446: EOF 2018/07/31 08:22:41 http: TLS handshake error from 10.129.0.1:45456: EOF 2018/07/31 08:22:51 http: TLS handshake error from 10.129.0.1:45466: EOF 2018/07/31 08:23:01 http: TLS handshake error from 10.129.0.1:45476: EOF level=info timestamp=2018-07-31T08:23:11.070256Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:23:11 http: TLS handshake error from 10.129.0.1:45486: EOF 2018/07/31 08:23:21 http: TLS handshake error from 10.129.0.1:45496: EOF 2018/07/31 08:23:31 http: TLS handshake error from 10.129.0.1:45506: EOF 2018/07/31 08:23:41 http: TLS handshake error from 10.129.0.1:45516: EOF 2018/07/31 08:23:51 http: TLS handshake error from 10.129.0.1:45526: EOF 2018/07/31 08:24:01 http: TLS handshake error from 10.129.0.1:45536: EOF 2018/07/31 08:24:11 http: TLS handshake error from 10.129.0.1:45548: EOF 2018/07/31 08:24:21 http: TLS handshake error from 10.129.0.1:45558: EOF 2018/07/31 08:24:31 http: TLS handshake error from 10.129.0.1:45568: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:21:14.648156Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-fn7p9 Pod phase: Running level=info timestamp=2018-07-31T08:21:15.892850Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-31T08:21:15.892870Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-31T08:21:15.892887Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-31T08:21:15.892905Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-31T08:21:15.892959Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer limitrangeInformer" level=info timestamp=2018-07-31T08:21:15.892977Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-31T08:21:15.893037Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-31T08:21:15.905331Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-31T08:21:15.905412Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-31T08:21:15.905447Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-31T08:21:15.905508Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-31T08:24:01.372308Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:24:01.373193Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:24:34.206054Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:24:34.206791Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:17:35.700115Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: true\n" level=info timestamp=2018-07-31T08:17:35.700145Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-31T08:17:35.700206Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:17:35.700229Z pos=vm.go:331 component=virt-handler msg="Domain status: Paused, reason: StartingUp\n" level=info timestamp=2018-07-31T08:17:35.700344Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:17:35.697864Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:17:35.702107Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-31T08:17:35.733087Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:17:35.782032Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:17:35.782170Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: true\n" level=info timestamp=2018-07-31T08:17:35.782210Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:17:35.782245Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:17:35.782269Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:17:35.784472Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:17:35.791155Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:24:19.617360Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmifrb2f kind=Domain uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-31T08:24:19.727699Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:24:19.743576Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:24:19.743895Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmifrb2f, existing: true\n" level=info timestamp=2018-07-31T08:24:19.744095Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-31T08:24:19.744233Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:24:19.744310Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:24:19.744486Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:24:19.841702Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:24:19.856748Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmifrb2f, existing: true\n" level=info timestamp=2018-07-31T08:24:19.857152Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:24:19.857373Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:24:19.857533Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:24:19.857779Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:24:19.862371Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: netcat7hl6l Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcat8hw2f Pod phase: Failed ++ head -n 1 +++ nc wrongservice.kubevirt-test-default 1500 -i 1 -w 1 Ncat: Could not resolve hostname "wrongservice.kubevirt-test-default": Name or service not known. QUITTING. + x= + echo '' + '[' '' = 'Hello World!' ']' + echo failed + exit 1 failed Pod name: netcatbnvsp Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatgsvrx Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcathfb78 Pod phase: Succeeded ++ head -n 1 +++ nc my-subdomain.myvmi.kubevirt-test-default 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatnsdjh Pod phase: Succeeded ++ head -n 1 +++ nc myservice.kubevirt-test-default 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcatvnhj7 Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: virt-launcher-testvmi5qjfq-zkvnd Pod phase: Running level=info timestamp=2018-07-31T08:17:34.067085Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:34.274743Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:34.277449Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 20ddae30-e03e-4bb2-9da2-8cffd0836e9a" level=info timestamp=2018-07-31T08:17:34.278469Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:34.703301Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:34.942040Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:34.944607Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:34.946820Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:34.955794Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:34.956052Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:34.966122Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:34.970354Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.302311Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 20ddae30-e03e-4bb2-9da2-8cffd0836e9a: 188" level=info timestamp=2018-07-31T08:17:35.333076Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:35.363869Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmi6dncv-fc75j Pod phase: Pending Pod name: virt-launcher-testvmifrb2f-9nzzm Pod phase: Running level=info timestamp=2018-07-31T08:24:18.302228Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:24:19.095686Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID a092e255-c17e-437f-b563-96ab34f32dce" level=info timestamp=2018-07-31T08:24:19.096613Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:24:19.103033Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:24:19.134235Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:19.581606Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:24:19.605897Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:24:19.611218Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:19.635322Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:24:19.658852Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:24:19.673369Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:24:19.685913Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:24:19.733525Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:19.861684Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:24:20.100875Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a092e255-c17e-437f-b563-96ab34f32dce: 184" Pod name: virt-launcher-testvmij2v8q-252p2 Pod phase: Running level=info timestamp=2018-07-31T08:17:33.253562Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Domain defined." level=info timestamp=2018-07-31T08:17:34.582041Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:34.587136Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 6d6c26d8-a965-4ba0-a1b5-79e9993ed3f3" level=info timestamp=2018-07-31T08:17:34.587323Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:34.633396Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.593304Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 6d6c26d8-a965-4ba0-a1b5-79e9993ed3f3: 189" level=info timestamp=2018-07-31T08:17:35.610946Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:35.650576Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:35.652668Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:35.656550Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:35.703578Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.705212Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:35.723370Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:35.733865Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.789786Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmip7728-4mftv Pod phase: Failed level=info timestamp=2018-07-31T08:19:43.633481Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:19:44.423135Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:19:44.450632Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:44.564146Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 139f49f9-c671-4bc7-80e6-b303daaab306" level=info timestamp=2018-07-31T08:19:44.566752Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:19:44.969675Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:19:45.001262Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:19:45.014438Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:45.019241Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:19:45.038731Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:19:45.044794Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:19:45.055611Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:19:45.061486Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:45.136124Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:19:45.571927Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 139f49f9-c671-4bc7-80e6-b303daaab306: 184" Pod name: virt-launcher-testvmipht6z-snllx Pod phase: Running level=info timestamp=2018-07-31T08:22:01.098433Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-31T08:22:01.098764Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-31T08:22:01.101643Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-31T08:22:11.380863Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-31T08:22:11.448717Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmipht6z" level=info timestamp=2018-07-31T08:22:11.451622Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-31T08:22:11.452281Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiqvrpl-dpdqf Pod phase: Failed level=info timestamp=2018-07-31T08:17:35.402892Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:35.415771Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 94358403-9291-4524-a269-1405f1d0252b" level=info timestamp=2018-07-31T08:17:35.416664Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:35.606388Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.987156Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.037007Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:36.051593Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:36.054011Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:36.435923Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 94358403-9291-4524-a269-1405f1d0252b: 189" level=info timestamp=2018-07-31T08:17:36.686087Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:36.687194Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.700814Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.618002Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:49.309218Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:49.554269Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmitvm6q-8656c Pod phase: Failed level=info timestamp=2018-07-31T08:17:35.263855Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.621421Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:36.654800Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID a7d70eee-4108-44d0-8b04-23888fb77672" level=info timestamp=2018-07-31T08:17:36.659190Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:36.829064Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:36.831106Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:37.013574Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:37.014243Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:37.031686Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.635607Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:37.635950Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:37.673030Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.686858Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a7d70eee-4108-44d0-8b04-23888fb77672: 195" level=info timestamp=2018-07-31T08:17:37.781666Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:39.529238Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synced vmi" • Failure [33.057 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address and slirp interface /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:427 should configure custom MAC address [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:428 Expected error: <*errors.StatusError | 0xc420b32240>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:433 ------------------------------ STEP: checking eth0 MAC address Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running 2018/07/31 08:24:34 http: TLS handshake error from 10.128.0.1:48906: EOF level=info timestamp=2018-07-31T08:24:36.466266Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:24:36.486930Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:24:40.331693Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:24:44 http: TLS handshake error from 10.128.0.1:48956: EOF level=info timestamp=2018-07-31T08:24:50.547534Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:24:54 http: TLS handshake error from 10.128.0.1:49002: EOF level=info timestamp=2018-07-31T08:24:59.526403Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:24:59.551554Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:24:59.578323Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:25:00.762268Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:25:04.642557Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:25:04 http: TLS handshake error from 10.128.0.1:49050: EOF level=info timestamp=2018-07-31T08:25:06.742883Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:25:06.781225Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:23:01 http: TLS handshake error from 10.129.0.1:45476: EOF level=info timestamp=2018-07-31T08:23:11.070256Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:23:11 http: TLS handshake error from 10.129.0.1:45486: EOF 2018/07/31 08:23:21 http: TLS handshake error from 10.129.0.1:45496: EOF 2018/07/31 08:23:31 http: TLS handshake error from 10.129.0.1:45506: EOF 2018/07/31 08:23:41 http: TLS handshake error from 10.129.0.1:45516: EOF 2018/07/31 08:23:51 http: TLS handshake error from 10.129.0.1:45526: EOF 2018/07/31 08:24:01 http: TLS handshake error from 10.129.0.1:45536: EOF 2018/07/31 08:24:11 http: TLS handshake error from 10.129.0.1:45548: EOF 2018/07/31 08:24:21 http: TLS handshake error from 10.129.0.1:45558: EOF 2018/07/31 08:24:31 http: TLS handshake error from 10.129.0.1:45568: EOF level=info timestamp=2018-07-31T08:24:41.213899Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:24:41 http: TLS handshake error from 10.129.0.1:45578: EOF 2018/07/31 08:24:51 http: TLS handshake error from 10.129.0.1:45588: EOF 2018/07/31 08:25:01 http: TLS handshake error from 10.129.0.1:45598: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:21:14.648156Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-fn7p9 Pod phase: Running level=info timestamp=2018-07-31T08:21:15.892905Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-31T08:21:15.892959Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer limitrangeInformer" level=info timestamp=2018-07-31T08:21:15.892977Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-31T08:21:15.893037Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-31T08:21:15.905331Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-31T08:21:15.905412Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-31T08:21:15.905447Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-31T08:21:15.905508Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-31T08:24:01.372308Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:24:01.373193Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:24:34.206054Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:24:34.206791Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:25:07.347808Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:25:07.352284Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:25:07.644839Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidtv7p\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidtv7p" Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:17:35.700115Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: true\n" level=info timestamp=2018-07-31T08:17:35.700145Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-31T08:17:35.700206Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:17:35.700229Z pos=vm.go:331 component=virt-handler msg="Domain status: Paused, reason: StartingUp\n" level=info timestamp=2018-07-31T08:17:35.700344Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:17:35.697864Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:17:35.702107Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-31T08:17:35.733087Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:17:35.782032Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:17:35.782170Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: true\n" level=info timestamp=2018-07-31T08:17:35.782210Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:17:35.782245Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:17:35.782269Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:17:35.784472Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:17:35.791155Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:24:52.835377Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:24:52.848529Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:24:52.848653Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6dncv, existing: true\n" level=info timestamp=2018-07-31T08:24:52.848676Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-31T08:24:52.848756Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:24:52.848786Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:24:52.848841Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:24:52.858461Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmi6dncv kind=Domain uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-31T08:24:52.965851Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:24:52.966458Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6dncv, existing: true\n" level=info timestamp=2018-07-31T08:24:52.966736Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:24:52.967024Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:24:52.967285Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:24:52.967780Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:24:52.977474Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: netcat7hl6l Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcat8hw2f Pod phase: Failed ++ head -n 1 +++ nc wrongservice.kubevirt-test-default 1500 -i 1 -w 1 Ncat: Could not resolve hostname "wrongservice.kubevirt-test-default": Name or service not known. QUITTING. + x= + echo '' + '[' '' = 'Hello World!' ']' + echo failed + exit 1 failed Pod name: netcatbnvsp Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatgsvrx Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcathfb78 Pod phase: Succeeded ++ head -n 1 +++ nc my-subdomain.myvmi.kubevirt-test-default 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatnsdjh Pod phase: Succeeded ++ head -n 1 +++ nc myservice.kubevirt-test-default 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcatvnhj7 Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: virt-launcher-testvmi5qjfq-zkvnd Pod phase: Running level=info timestamp=2018-07-31T08:17:34.067085Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:34.274743Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:34.277449Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 20ddae30-e03e-4bb2-9da2-8cffd0836e9a" level=info timestamp=2018-07-31T08:17:34.278469Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:34.703301Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:34.942040Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:34.944607Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:34.946820Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:34.955794Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:34.956052Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:34.966122Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:34.970354Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.302311Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 20ddae30-e03e-4bb2-9da2-8cffd0836e9a: 188" level=info timestamp=2018-07-31T08:17:35.333076Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:35.363869Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmi6dncv-fc75j Pod phase: Running level=info timestamp=2018-07-31T08:24:52.492884Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 028c9143-1288-4631-9b78-a761d1068645" level=info timestamp=2018-07-31T08:24:52.493303Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:24:52.735428Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:24:52.781729Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:24:52.799872Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:52.806148Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:24:52.814196Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:24:52.829020Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:24:52.833057Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:24:52.838220Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:52.970236Z pos=converter.go:535 component=virt-launcher msg="The network interface type of default was changed to e1000 due to unsupported interface type by qemu slirp network" level=info timestamp=2018-07-31T08:24:52.971499Z pos=converter.go:751 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-31T08:24:52.971807Z pos=converter.go:752 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-31T08:24:52.975990Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:24:53.511194Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 028c9143-1288-4631-9b78-a761d1068645: 185" Pod name: virt-launcher-testvmidtv7p-rrr5l Pod phase: Pending Pod name: virt-launcher-testvmifrb2f-9nzzm Pod phase: Running level=info timestamp=2018-07-31T08:24:18.302228Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:24:19.095686Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID a092e255-c17e-437f-b563-96ab34f32dce" level=info timestamp=2018-07-31T08:24:19.096613Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:24:19.103033Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:24:19.134235Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:19.581606Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:24:19.605897Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:24:19.611218Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:19.635322Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:24:19.658852Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:24:19.673369Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:24:19.685913Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:24:19.733525Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:19.861684Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:24:20.100875Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a092e255-c17e-437f-b563-96ab34f32dce: 184" Pod name: virt-launcher-testvmij2v8q-252p2 Pod phase: Running level=info timestamp=2018-07-31T08:17:33.253562Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Domain defined." level=info timestamp=2018-07-31T08:17:34.582041Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:34.587136Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 6d6c26d8-a965-4ba0-a1b5-79e9993ed3f3" level=info timestamp=2018-07-31T08:17:34.587323Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:34.633396Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.593304Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 6d6c26d8-a965-4ba0-a1b5-79e9993ed3f3: 189" level=info timestamp=2018-07-31T08:17:35.610946Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:35.650576Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:35.652668Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:35.656550Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:35.703578Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.705212Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:35.723370Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:35.733865Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.789786Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmip7728-4mftv Pod phase: Failed level=info timestamp=2018-07-31T08:19:43.633481Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:19:44.423135Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:19:44.450632Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:44.564146Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 139f49f9-c671-4bc7-80e6-b303daaab306" level=info timestamp=2018-07-31T08:19:44.566752Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:19:44.969675Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:19:45.001262Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:19:45.014438Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:45.019241Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:19:45.038731Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:19:45.044794Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:19:45.055611Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:19:45.061486Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:45.136124Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:19:45.571927Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 139f49f9-c671-4bc7-80e6-b303daaab306: 184" Pod name: virt-launcher-testvmipht6z-snllx Pod phase: Running level=info timestamp=2018-07-31T08:22:01.098433Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-31T08:22:01.098764Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-31T08:22:01.101643Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-31T08:22:11.380863Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-31T08:22:11.448717Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmipht6z" level=info timestamp=2018-07-31T08:22:11.451622Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-31T08:22:11.452281Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiqvrpl-dpdqf Pod phase: Failed level=info timestamp=2018-07-31T08:17:35.402892Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:35.415771Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 94358403-9291-4524-a269-1405f1d0252b" level=info timestamp=2018-07-31T08:17:35.416664Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:35.606388Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.987156Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.037007Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:36.051593Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:36.054011Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:36.435923Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 94358403-9291-4524-a269-1405f1d0252b: 189" level=info timestamp=2018-07-31T08:17:36.686087Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:36.687194Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.700814Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.618002Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:49.309218Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:49.554269Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmitvm6q-8656c Pod phase: Failed level=info timestamp=2018-07-31T08:17:35.263855Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.621421Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:36.654800Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID a7d70eee-4108-44d0-8b04-23888fb77672" level=info timestamp=2018-07-31T08:17:36.659190Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:36.829064Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:36.831106Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:37.013574Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:37.014243Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:37.031686Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.635607Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:37.635950Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:37.673030Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.686858Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a7d70eee-4108-44d0-8b04-23888fb77672: 195" level=info timestamp=2018-07-31T08:17:37.781666Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:39.529238Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synced vmi" • Failure [33.254 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with disabled automatic attachment of interfaces /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:440 should not configure any external interfaces [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:441 Expected error: <*errors.StatusError | 0xc4209ea510>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:448 ------------------------------ STEP: checking loopback is the only guest interface Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running 2018/07/31 08:25:04 http: TLS handshake error from 10.128.0.1:49050: EOF level=info timestamp=2018-07-31T08:25:06.742883Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:25:06.781225Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:25:10.849464Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:25:14 http: TLS handshake error from 10.128.0.1:49100: EOF level=info timestamp=2018-07-31T08:25:20.989619Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:25:24 http: TLS handshake error from 10.128.0.1:49146: EOF level=info timestamp=2018-07-31T08:25:31.111307Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:25:32.871579Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:25:32.872840Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/31 08:25:34 http: TLS handshake error from 10.128.0.1:49192: EOF level=info timestamp=2018-07-31T08:25:34.738580Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:25:36.964251Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:25:36.983607Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:25:41.302136Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:23:31 http: TLS handshake error from 10.129.0.1:45506: EOF 2018/07/31 08:23:41 http: TLS handshake error from 10.129.0.1:45516: EOF 2018/07/31 08:23:51 http: TLS handshake error from 10.129.0.1:45526: EOF 2018/07/31 08:24:01 http: TLS handshake error from 10.129.0.1:45536: EOF 2018/07/31 08:24:11 http: TLS handshake error from 10.129.0.1:45548: EOF 2018/07/31 08:24:21 http: TLS handshake error from 10.129.0.1:45558: EOF 2018/07/31 08:24:31 http: TLS handshake error from 10.129.0.1:45568: EOF level=info timestamp=2018-07-31T08:24:41.213899Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:24:41 http: TLS handshake error from 10.129.0.1:45578: EOF 2018/07/31 08:24:51 http: TLS handshake error from 10.129.0.1:45588: EOF 2018/07/31 08:25:01 http: TLS handshake error from 10.129.0.1:45598: EOF level=info timestamp=2018-07-31T08:25:11.141095Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:25:11 http: TLS handshake error from 10.129.0.1:45608: EOF 2018/07/31 08:25:21 http: TLS handshake error from 10.129.0.1:45618: EOF 2018/07/31 08:25:31 http: TLS handshake error from 10.129.0.1:45628: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:21:14.648156Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-fn7p9 Pod phase: Running level=info timestamp=2018-07-31T08:21:15.905331Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-31T08:21:15.905412Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-31T08:21:15.905447Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-31T08:21:15.905508Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-31T08:24:01.372308Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:24:01.373193Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:24:34.206054Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:24:34.206791Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:25:07.347808Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:25:07.352284Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:25:07.644839Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidtv7p\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidtv7p" level=info timestamp=2018-07-31T08:25:40.681155Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7lmk4 kind= uid=3dc4bff8-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:25:40.682752Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7lmk4 kind= uid=3dc4bff8-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:25:40.784640Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7lmk4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7lmk4" level=info timestamp=2018-07-31T08:25:40.811811Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7lmk4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7lmk4" Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:17:35.782269Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:17:35.784472Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:17:35.791155Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:25:14.936425Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi5qjfq, existing: true\n" level=info timestamp=2018-07-31T08:25:14.938379Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:25:14.938501Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:25:14.938568Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:25:14.938901Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:25:14.937234Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: true\n" level=info timestamp=2018-07-31T08:25:14.939645Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:25:14.940451Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:25:14.940673Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:25:14.941615Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:25:14.958727Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:25:14.960951Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:25:26.301566Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="No update processing required" level=error timestamp=2018-07-31T08:25:26.331347Z pos=vm.go:431 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidtv7p\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-31T08:25:26.331866Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidtv7p\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmidtv7p" level=info timestamp=2018-07-31T08:25:26.332186Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidtv7p, existing: true\n" level=info timestamp=2018-07-31T08:25:26.332360Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:25:26.332682Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:25:26.332851Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:25:26.333177Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:25:26.343506Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:25:26.344098Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidtv7p, existing: true\n" level=info timestamp=2018-07-31T08:25:26.344328Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:25:26.344585Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:25:26.344783Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:25:26.345138Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:25:26.351507Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: netcat7hl6l Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcat8hw2f Pod phase: Failed ++ head -n 1 +++ nc wrongservice.kubevirt-test-default 1500 -i 1 -w 1 Ncat: Could not resolve hostname "wrongservice.kubevirt-test-default": Name or service not known. QUITTING. + x= + echo '' + '[' '' = 'Hello World!' ']' + echo failed + exit 1 failed Pod name: netcatbnvsp Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatgsvrx Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcathfb78 Pod phase: Succeeded ++ head -n 1 +++ nc my-subdomain.myvmi.kubevirt-test-default 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatnsdjh Pod phase: Succeeded ++ head -n 1 +++ nc myservice.kubevirt-test-default 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcatvnhj7 Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: virt-launcher-testvmi5qjfq-zkvnd Pod phase: Running level=info timestamp=2018-07-31T08:17:34.274743Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:34.277449Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 20ddae30-e03e-4bb2-9da2-8cffd0836e9a" level=info timestamp=2018-07-31T08:17:34.278469Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:34.703301Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:34.942040Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:34.944607Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:34.946820Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:34.955794Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:34.956052Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:34.966122Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:34.970354Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.302311Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 20ddae30-e03e-4bb2-9da2-8cffd0836e9a: 188" level=info timestamp=2018-07-31T08:17:35.333076Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:35.363869Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:25:14.956951Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmi6dncv-fc75j Pod phase: Running level=info timestamp=2018-07-31T08:24:52.492884Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 028c9143-1288-4631-9b78-a761d1068645" level=info timestamp=2018-07-31T08:24:52.493303Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:24:52.735428Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:24:52.781729Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:24:52.799872Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:52.806148Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:24:52.814196Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:24:52.829020Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:24:52.833057Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:24:52.838220Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:52.970236Z pos=converter.go:535 component=virt-launcher msg="The network interface type of default was changed to e1000 due to unsupported interface type by qemu slirp network" level=info timestamp=2018-07-31T08:24:52.971499Z pos=converter.go:751 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-31T08:24:52.971807Z pos=converter.go:752 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-31T08:24:52.975990Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:24:53.511194Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 028c9143-1288-4631-9b78-a761d1068645: 185" Pod name: virt-launcher-testvmi7lmk4-cptfg Pod phase: Pending Pod name: virt-launcher-testvmidtv7p-rrr5l Pod phase: Running level=info timestamp=2018-07-31T08:25:25.900095Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:25:25.903895Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 7be33a30-1221-48f3-8b64-3c33f21a8cff" level=info timestamp=2018-07-31T08:25:25.904598Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:25:25.916261Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:25:26.169005Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:25:26.216710Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:25:26.228330Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:25:26.236358Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:25:26.257643Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:25:26.257852Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:25:26.285656Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:25:26.296898Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:25:26.337431Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:25:26.350069Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:25:26.908658Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 7be33a30-1221-48f3-8b64-3c33f21a8cff: 185" Pod name: virt-launcher-testvmifrb2f-9nzzm Pod phase: Running level=info timestamp=2018-07-31T08:24:18.302228Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:24:19.095686Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID a092e255-c17e-437f-b563-96ab34f32dce" level=info timestamp=2018-07-31T08:24:19.096613Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:24:19.103033Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:24:19.134235Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:19.581606Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:24:19.605897Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:24:19.611218Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:19.635322Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:24:19.658852Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:24:19.673369Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:24:19.685913Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:24:19.733525Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:19.861684Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:24:20.100875Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a092e255-c17e-437f-b563-96ab34f32dce: 184" Pod name: virt-launcher-testvmij2v8q-252p2 Pod phase: Running level=info timestamp=2018-07-31T08:17:34.582041Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:34.587136Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 6d6c26d8-a965-4ba0-a1b5-79e9993ed3f3" level=info timestamp=2018-07-31T08:17:34.587323Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:34.633396Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.593304Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 6d6c26d8-a965-4ba0-a1b5-79e9993ed3f3: 189" level=info timestamp=2018-07-31T08:17:35.610946Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:35.650576Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:35.652668Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:35.656550Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:35.703578Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.705212Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:35.723370Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:35.733865Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.789786Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:25:14.951753Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmip7728-4mftv Pod phase: Failed level=info timestamp=2018-07-31T08:19:43.633481Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:19:44.423135Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:19:44.450632Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:44.564146Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 139f49f9-c671-4bc7-80e6-b303daaab306" level=info timestamp=2018-07-31T08:19:44.566752Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:19:44.969675Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:19:45.001262Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:19:45.014438Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:45.019241Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:19:45.038731Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:19:45.044794Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:19:45.055611Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:19:45.061486Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:45.136124Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:19:45.571927Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 139f49f9-c671-4bc7-80e6-b303daaab306: 184" Pod name: virt-launcher-testvmipht6z-snllx Pod phase: Running level=info timestamp=2018-07-31T08:22:01.098433Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-31T08:22:01.098764Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-31T08:22:01.101643Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-31T08:22:11.380863Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-31T08:22:11.448717Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmipht6z" level=info timestamp=2018-07-31T08:22:11.451622Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-31T08:22:11.452281Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiqvrpl-dpdqf Pod phase: Failed level=info timestamp=2018-07-31T08:17:35.402892Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:35.415771Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 94358403-9291-4524-a269-1405f1d0252b" level=info timestamp=2018-07-31T08:17:35.416664Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:35.606388Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.987156Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.037007Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:36.051593Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:36.054011Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:36.435923Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 94358403-9291-4524-a269-1405f1d0252b: 189" level=info timestamp=2018-07-31T08:17:36.686087Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:36.687194Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.700814Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.618002Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:49.309218Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:49.554269Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmitvm6q-8656c Pod phase: Failed level=info timestamp=2018-07-31T08:17:35.263855Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.621421Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:36.654800Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID a7d70eee-4108-44d0-8b04-23888fb77672" level=info timestamp=2018-07-31T08:17:36.659190Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:36.829064Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:36.831106Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:37.013574Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:37.014243Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:37.031686Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.635607Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:37.635950Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:37.673030Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.686858Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a7d70eee-4108-44d0-8b04-23888fb77672: 195" level=info timestamp=2018-07-31T08:17:37.781666Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:39.529238Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synced vmi" • Failure in Spec Setup (BeforeEach) [33.549 seconds] Slirp /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:39 should be able to [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 VirtualMachineInstance with slirp interface /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc420b333b0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:58 ------------------------------ Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:25:34.738580Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:25:36.964251Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:25:36.983607Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:25:41.302136Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:25:42.049377Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:25:44 http: TLS handshake error from 10.128.0.1:49242: EOF level=info timestamp=2018-07-31T08:25:51.532063Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:25:54 http: TLS handshake error from 10.128.0.1:49294: EOF level=info timestamp=2018-07-31T08:26:01.846534Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:26:04 http: TLS handshake error from 10.128.0.1:49340: EOF level=info timestamp=2018-07-31T08:26:04.899570Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:26:07.144673Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:26:07.160034Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:26:12.040584Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:26:14 http: TLS handshake error from 10.128.0.1:49390: EOF Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:24:21 http: TLS handshake error from 10.129.0.1:45558: EOF 2018/07/31 08:24:31 http: TLS handshake error from 10.129.0.1:45568: EOF level=info timestamp=2018-07-31T08:24:41.213899Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:24:41 http: TLS handshake error from 10.129.0.1:45578: EOF 2018/07/31 08:24:51 http: TLS handshake error from 10.129.0.1:45588: EOF 2018/07/31 08:25:01 http: TLS handshake error from 10.129.0.1:45598: EOF level=info timestamp=2018-07-31T08:25:11.141095Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:25:11 http: TLS handshake error from 10.129.0.1:45608: EOF 2018/07/31 08:25:21 http: TLS handshake error from 10.129.0.1:45618: EOF 2018/07/31 08:25:31 http: TLS handshake error from 10.129.0.1:45628: EOF 2018/07/31 08:25:41 http: TLS handshake error from 10.129.0.1:45638: EOF 2018/07/31 08:25:51 http: TLS handshake error from 10.129.0.1:45648: EOF 2018/07/31 08:26:01 http: TLS handshake error from 10.129.0.1:45658: EOF level=info timestamp=2018-07-31T08:26:11.133096Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:26:11 http: TLS handshake error from 10.129.0.1:45668: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:21:14.648156Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-fn7p9 Pod phase: Running level=info timestamp=2018-07-31T08:21:15.905447Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-31T08:21:15.905508Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-31T08:24:01.372308Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:24:01.373193Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:24:34.206054Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:24:34.206791Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:25:07.347808Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:25:07.352284Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:25:07.644839Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidtv7p\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidtv7p" level=info timestamp=2018-07-31T08:25:40.681155Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7lmk4 kind= uid=3dc4bff8-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:25:40.682752Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7lmk4 kind= uid=3dc4bff8-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:25:40.784640Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7lmk4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7lmk4" level=info timestamp=2018-07-31T08:25:40.811811Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7lmk4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7lmk4" level=info timestamp=2018-07-31T08:26:14.073186Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidhtc6 kind= uid=51c47c7a-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:26:14.073709Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidhtc6 kind= uid=51c47c7a-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:17:35.782269Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:17:35.784472Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:17:35.791155Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:25:14.936425Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi5qjfq, existing: true\n" level=info timestamp=2018-07-31T08:25:14.938379Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:25:14.938501Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:25:14.938568Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:25:14.938901Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:25:14.937234Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: true\n" level=info timestamp=2018-07-31T08:25:14.939645Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:25:14.940451Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:25:14.940673Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:25:14.941615Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:25:14.958727Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:25:14.960951Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:25:58.721773Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi7lmk4 kind= uid=3dc4bff8-949b-11e8-8b5c-525500d15501 msg="No update processing required" level=error timestamp=2018-07-31T08:25:58.731532Z pos=vm.go:431 component=virt-handler namespace=kubevirt-test-default name=testvmi7lmk4 kind= uid=3dc4bff8-949b-11e8-8b5c-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7lmk4\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-31T08:25:58.731699Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7lmk4\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi7lmk4" level=info timestamp=2018-07-31T08:25:58.731774Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi7lmk4, existing: true\n" level=info timestamp=2018-07-31T08:25:58.731796Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:25:58.731824Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:25:58.731844Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:25:58.731993Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi7lmk4 kind= uid=3dc4bff8-949b-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:25:58.737273Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi7lmk4 kind= uid=3dc4bff8-949b-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:25:58.751647Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi7lmk4, existing: true\n" level=info timestamp=2018-07-31T08:25:58.751823Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:25:58.751885Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:25:58.751906Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:25:58.752049Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi7lmk4 kind= uid=3dc4bff8-949b-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:25:58.757571Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi7lmk4 kind= uid=3dc4bff8-949b-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: netcat7hl6l Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcat8hw2f Pod phase: Failed ++ head -n 1 +++ nc wrongservice.kubevirt-test-default 1500 -i 1 -w 1 Ncat: Could not resolve hostname "wrongservice.kubevirt-test-default": Name or service not known. QUITTING. + x= + echo '' + '[' '' = 'Hello World!' ']' + echo failed + exit 1 failed Pod name: netcatbnvsp Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatgsvrx Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcathfb78 Pod phase: Succeeded ++ head -n 1 +++ nc my-subdomain.myvmi.kubevirt-test-default 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatnsdjh Pod phase: Succeeded ++ head -n 1 +++ nc myservice.kubevirt-test-default 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcatvnhj7 Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.45 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: virt-launcher-testvmi5qjfq-zkvnd Pod phase: Running level=info timestamp=2018-07-31T08:17:34.274743Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:34.277449Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 20ddae30-e03e-4bb2-9da2-8cffd0836e9a" level=info timestamp=2018-07-31T08:17:34.278469Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:34.703301Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:34.942040Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:34.944607Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:34.946820Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:34.955794Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:34.956052Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:34.966122Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:34.970354Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.302311Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 20ddae30-e03e-4bb2-9da2-8cffd0836e9a: 188" level=info timestamp=2018-07-31T08:17:35.333076Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:35.363869Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind= uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:25:14.956951Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid=22d23ca2-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmi6dncv-fc75j Pod phase: Running level=info timestamp=2018-07-31T08:24:52.492884Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 028c9143-1288-4631-9b78-a761d1068645" level=info timestamp=2018-07-31T08:24:52.493303Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:24:52.735428Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:24:52.781729Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:24:52.799872Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:52.806148Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:24:52.814196Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:24:52.829020Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:24:52.833057Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:24:52.838220Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:52.970236Z pos=converter.go:535 component=virt-launcher msg="The network interface type of default was changed to e1000 due to unsupported interface type by qemu slirp network" level=info timestamp=2018-07-31T08:24:52.971499Z pos=converter.go:751 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-31T08:24:52.971807Z pos=converter.go:752 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-31T08:24:52.975990Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6dncv kind= uid=163e2818-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:24:53.511194Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 028c9143-1288-4631-9b78-a761d1068645: 185" Pod name: virt-launcher-testvmi7lmk4-cptfg Pod phase: Running level=info timestamp=2018-07-31T08:25:58.664286Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi7lmk4 kind= uid=3dc4bff8-949b-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:25:58.665807Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi7lmk4 kind= uid=3dc4bff8-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:25:58.671003Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:25:58.671168Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:25:58.703310Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:25:58.716629Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:25:58.733075Z pos=converter.go:535 component=virt-launcher msg="The network interface type of default was changed to e1000 due to unsupported interface type by qemu slirp network" level=info timestamp=2018-07-31T08:25:58.733544Z pos=converter.go:751 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-31T08:25:58.733582Z pos=converter.go:752 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-31T08:25:58.736626Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi7lmk4 kind= uid=3dc4bff8-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:25:58.753145Z pos=converter.go:535 component=virt-launcher msg="The network interface type of default was changed to e1000 due to unsupported interface type by qemu slirp network" level=info timestamp=2018-07-31T08:25:58.753453Z pos=converter.go:751 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-31T08:25:58.753493Z pos=converter.go:752 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-31T08:25:58.757063Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi7lmk4 kind= uid=3dc4bff8-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:25:59.362170Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 4959f899-9d42-4e03-bcf7-d6108f59bb5a: 180" Pod name: virt-launcher-testvmidhtc6-gdwzw Pod phase: Pending Pod name: virt-launcher-testvmidtv7p-rrr5l Pod phase: Running level=info timestamp=2018-07-31T08:25:25.900095Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:25:25.903895Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 7be33a30-1221-48f3-8b64-3c33f21a8cff" level=info timestamp=2018-07-31T08:25:25.904598Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:25:25.916261Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:25:26.169005Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:25:26.216710Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:25:26.228330Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:25:26.236358Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:25:26.257643Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:25:26.257852Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:25:26.285656Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:25:26.296898Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:25:26.337431Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:25:26.350069Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmidtv7p kind= uid=29f2ce9b-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:25:26.908658Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 7be33a30-1221-48f3-8b64-3c33f21a8cff: 185" Pod name: virt-launcher-testvmifrb2f-9nzzm Pod phase: Running level=info timestamp=2018-07-31T08:24:18.302228Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:24:19.095686Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID a092e255-c17e-437f-b563-96ab34f32dce" level=info timestamp=2018-07-31T08:24:19.096613Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:24:19.103033Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:24:19.134235Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:19.581606Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:24:19.605897Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:24:19.611218Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:19.635322Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:24:19.658852Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:24:19.673369Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:24:19.685913Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:24:19.733525Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:24:19.861684Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmifrb2f kind= uid=0298a1ac-949b-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:24:20.100875Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a092e255-c17e-437f-b563-96ab34f32dce: 184" Pod name: virt-launcher-testvmij2v8q-252p2 Pod phase: Running level=info timestamp=2018-07-31T08:17:34.582041Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:34.587136Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 6d6c26d8-a965-4ba0-a1b5-79e9993ed3f3" level=info timestamp=2018-07-31T08:17:34.587323Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:34.633396Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.593304Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 6d6c26d8-a965-4ba0-a1b5-79e9993ed3f3: 189" level=info timestamp=2018-07-31T08:17:35.610946Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:35.650576Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:35.652668Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:35.656550Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:35.703578Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.705212Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:35.723370Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:35.733865Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.789786Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind= uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:25:14.951753Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid=230060fc-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmip7728-4mftv Pod phase: Failed level=info timestamp=2018-07-31T08:19:43.633481Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:19:44.423135Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:19:44.450632Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:44.564146Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 139f49f9-c671-4bc7-80e6-b303daaab306" level=info timestamp=2018-07-31T08:19:44.566752Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:19:44.969675Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:19:45.001262Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:19:45.014438Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:45.019241Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:19:45.038731Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:19:45.044794Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:19:45.055611Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:19:45.061486Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:19:45.136124Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmip7728 kind= uid=70945e1c-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:19:45.571927Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 139f49f9-c671-4bc7-80e6-b303daaab306: 184" Pod name: virt-launcher-testvmipht6z-snllx Pod phase: Running level=info timestamp=2018-07-31T08:22:01.098433Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-31T08:22:01.098764Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-31T08:22:01.101643Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-31T08:22:11.380863Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-31T08:22:11.448717Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmipht6z" level=info timestamp=2018-07-31T08:22:11.451622Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-31T08:22:11.452281Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiqvrpl-dpdqf Pod phase: Failed level=info timestamp=2018-07-31T08:17:35.402892Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:35.415771Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 94358403-9291-4524-a269-1405f1d0252b" level=info timestamp=2018-07-31T08:17:35.416664Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:35.606388Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:35.987156Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.037007Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:36.051593Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:36.054011Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:36.435923Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 94358403-9291-4524-a269-1405f1d0252b: 189" level=info timestamp=2018-07-31T08:17:36.686087Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:36.687194Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.700814Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.618002Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:49.309218Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:49.554269Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiqvrpl kind= uid=22f91677-949a-11e8-8b5c-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmitvm6q-8656c Pod phase: Failed level=info timestamp=2018-07-31T08:17:35.263855Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:17:36.621421Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:17:36.654800Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID a7d70eee-4108-44d0-8b04-23888fb77672" level=info timestamp=2018-07-31T08:17:36.659190Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:17:36.829064Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:17:36.831106Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:17:37.013574Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:37.014243Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:17:37.031686Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.635607Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:37.635950Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:17:37.673030Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:17:37.686858Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a7d70eee-4108-44d0-8b04-23888fb77672: 195" level=info timestamp=2018-07-31T08:17:37.781666Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:17:39.529238Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitvm6q kind= uid=22cfb5da-949a-11e8-8b5c-525500d15501 msg="Synced vmi" • Failure in Spec Setup (BeforeEach) [33.674 seconds] Slirp /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:39 should be able to [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 VirtualMachineInstance with slirp interface with custom MAC address /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc420b326c0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:58 ------------------------------ Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:26:35.147586Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:26:37.649305Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:26:37.652107Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:26:43.667795Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:26:44 http: TLS handshake error from 10.128.0.1:49532: EOF level=info timestamp=2018-07-31T08:26:53.845378Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:26:54 http: TLS handshake error from 10.128.0.1:49578: EOF level=info timestamp=2018-07-31T08:27:04.011549Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:27:04 http: TLS handshake error from 10.128.0.1:49624: EOF level=info timestamp=2018-07-31T08:27:05.346155Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:27:07.854087Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:27:07.859031Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:27:12.074238Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:27:12.118915Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:27:12.158682Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:25:21 http: TLS handshake error from 10.129.0.1:45618: EOF 2018/07/31 08:25:31 http: TLS handshake error from 10.129.0.1:45628: EOF 2018/07/31 08:25:41 http: TLS handshake error from 10.129.0.1:45638: EOF 2018/07/31 08:25:51 http: TLS handshake error from 10.129.0.1:45648: EOF 2018/07/31 08:26:01 http: TLS handshake error from 10.129.0.1:45658: EOF level=info timestamp=2018-07-31T08:26:11.133096Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:26:11 http: TLS handshake error from 10.129.0.1:45668: EOF 2018/07/31 08:26:21 http: TLS handshake error from 10.129.0.1:45680: EOF 2018/07/31 08:26:31 http: TLS handshake error from 10.129.0.1:45690: EOF level=info timestamp=2018-07-31T08:26:41.179107Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:26:41 http: TLS handshake error from 10.129.0.1:45696: EOF 2018/07/31 08:26:51 http: TLS handshake error from 10.129.0.1:45706: EOF 2018/07/31 08:27:01 http: TLS handshake error from 10.129.0.1:45716: EOF level=info timestamp=2018-07-31T08:27:11.204268Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:27:11 http: TLS handshake error from 10.129.0.1:45728: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:26:42.090702Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-31T08:26:42.093339Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-31T08:26:42.100542Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-31T08:26:42.100703Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-31T08:26:42.100826Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer limitrangeInformer" level=info timestamp=2018-07-31T08:26:42.107133Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-31T08:26:42.109453Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-31T08:26:42.115262Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-31T08:26:42.118099Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-31T08:26:42.119058Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-31T08:26:42.119761Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-31T08:26:42.120743Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-31T08:27:12.827706Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2kl5x kind= uid=74c4a6e9-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:27:12.829057Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2kl5x kind= uid=74c4a6e9-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:27:13.090093Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2kl5x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2kl5x" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:26:19.604757Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.604849Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.605247Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.629982Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidtv7p, existing: false\n" level=info timestamp=2018-07-31T08:26:19.631005Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.631697Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.632505Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.723326Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipht6z, existing: false\n" level=info timestamp=2018-07-31T08:26:19.723492Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.723710Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.724008Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.804767Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitvm6q, existing: false\n" level=info timestamp=2018-07-31T08:26:19.804840Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.804910Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.805011Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2kl5x-jxqvm Pod phase: Pending • Failure [55.751 seconds] LeaderElection /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:43 Start a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:53 when the controller pod is not running /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:54 should success [It] /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:55 Expected <*errors.StatusError | 0xc42016ef30>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:91 ------------------------------ STEP: Destroying the leading controller pod STEP: Starting a new VirtualMachineInstance Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:27:05.346155Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:27:07.854087Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:27:07.859031Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:27:12.074238Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:27:12.118915Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:27:12.158682Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:27:14.127481Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:27:14 http: TLS handshake error from 10.128.0.1:49674: EOF level=info timestamp=2018-07-31T08:27:24.286446Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:27:24 http: TLS handshake error from 10.128.0.1:49720: EOF level=info timestamp=2018-07-31T08:27:34.436044Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:27:34 http: TLS handshake error from 10.128.0.1:49766: EOF level=info timestamp=2018-07-31T08:27:35.544175Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:27:38.012316Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:27:38.020547Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:26:01 http: TLS handshake error from 10.129.0.1:45658: EOF level=info timestamp=2018-07-31T08:26:11.133096Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:26:11 http: TLS handshake error from 10.129.0.1:45668: EOF 2018/07/31 08:26:21 http: TLS handshake error from 10.129.0.1:45680: EOF 2018/07/31 08:26:31 http: TLS handshake error from 10.129.0.1:45690: EOF level=info timestamp=2018-07-31T08:26:41.179107Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:26:41 http: TLS handshake error from 10.129.0.1:45696: EOF 2018/07/31 08:26:51 http: TLS handshake error from 10.129.0.1:45706: EOF 2018/07/31 08:27:01 http: TLS handshake error from 10.129.0.1:45716: EOF level=info timestamp=2018-07-31T08:27:11.204268Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:27:11 http: TLS handshake error from 10.129.0.1:45728: EOF 2018/07/31 08:27:21 http: TLS handshake error from 10.129.0.1:45738: EOF 2018/07/31 08:27:31 http: TLS handshake error from 10.129.0.1:45748: EOF level=info timestamp=2018-07-31T08:27:41.242671Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:27:41 http: TLS handshake error from 10.129.0.1:45758: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:26:42.107133Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-31T08:26:42.109453Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-31T08:26:42.115262Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-31T08:26:42.118099Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-31T08:26:42.119058Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-31T08:26:42.119761Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-31T08:26:42.120743Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-31T08:27:12.827706Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2kl5x kind= uid=74c4a6e9-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:27:12.829057Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2kl5x kind= uid=74c4a6e9-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:27:13.090093Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2kl5x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2kl5x" level=info timestamp=2018-07-31T08:27:13.541019Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2kl5x\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2kl5x, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 74c4a6e9-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2kl5x" level=info timestamp=2018-07-31T08:27:43.803298Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fthb kind= uid=873754bf-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:27:43.803660Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fthb kind= uid=873754bf-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:27:44.092304Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9fthb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fthb" level=info timestamp=2018-07-31T08:27:44.153142Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9fthb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fthb" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:26:19.604757Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.604849Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.605247Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.629982Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidtv7p, existing: false\n" level=info timestamp=2018-07-31T08:26:19.631005Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.631697Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.632505Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.723326Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipht6z, existing: false\n" level=info timestamp=2018-07-31T08:26:19.723492Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.723710Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.724008Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.804767Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitvm6q, existing: false\n" level=info timestamp=2018-07-31T08:26:19.804840Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.804910Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.805011Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi9fthb-f49pt Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.890 seconds] VNC /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:54 with VNC connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:62 should allow accessing the VNC device /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:64 Expected success, but got an error: <*errors.StatusError | 0xc420b33440>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:58 ------------------------------ Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running 2018/07/31 08:27:44 http: TLS handshake error from 10.128.0.1:49816: EOF level=info timestamp=2018-07-31T08:27:44.817657Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:27:54 http: TLS handshake error from 10.128.0.1:49862: EOF level=info timestamp=2018-07-31T08:27:55.036823Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:00.597662Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:00.645244Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:00.682074Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:28:04 http: TLS handshake error from 10.128.0.1:49910: EOF level=info timestamp=2018-07-31T08:28:05.317218Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:05.674726Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:08.400652Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:08.454853Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:11.797877Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:28:14 http: TLS handshake error from 10.128.0.1:49960: EOF level=info timestamp=2018-07-31T08:28:15.556170Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:26:21 http: TLS handshake error from 10.129.0.1:45680: EOF 2018/07/31 08:26:31 http: TLS handshake error from 10.129.0.1:45690: EOF level=info timestamp=2018-07-31T08:26:41.179107Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:26:41 http: TLS handshake error from 10.129.0.1:45696: EOF 2018/07/31 08:26:51 http: TLS handshake error from 10.129.0.1:45706: EOF 2018/07/31 08:27:01 http: TLS handshake error from 10.129.0.1:45716: EOF level=info timestamp=2018-07-31T08:27:11.204268Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:27:11 http: TLS handshake error from 10.129.0.1:45728: EOF 2018/07/31 08:27:21 http: TLS handshake error from 10.129.0.1:45738: EOF 2018/07/31 08:27:31 http: TLS handshake error from 10.129.0.1:45748: EOF level=info timestamp=2018-07-31T08:27:41.242671Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:27:41 http: TLS handshake error from 10.129.0.1:45758: EOF 2018/07/31 08:27:51 http: TLS handshake error from 10.129.0.1:45768: EOF 2018/07/31 08:28:01 http: TLS handshake error from 10.129.0.1:45778: EOF 2018/07/31 08:28:11 http: TLS handshake error from 10.129.0.1:45788: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:26:42.118099Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-31T08:26:42.119058Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-31T08:26:42.119761Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-31T08:26:42.120743Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-31T08:27:12.827706Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2kl5x kind= uid=74c4a6e9-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:27:12.829057Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2kl5x kind= uid=74c4a6e9-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:27:13.090093Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2kl5x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2kl5x" level=info timestamp=2018-07-31T08:27:13.541019Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2kl5x\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2kl5x, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 74c4a6e9-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2kl5x" level=info timestamp=2018-07-31T08:27:43.803298Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fthb kind= uid=873754bf-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:27:43.803660Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fthb kind= uid=873754bf-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:27:44.092304Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9fthb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fthb" level=info timestamp=2018-07-31T08:27:44.153142Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9fthb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fthb" level=info timestamp=2018-07-31T08:28:15.003398Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigczkl kind= uid=99d7541b-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:28:15.004177Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigczkl kind= uid=99d7541b-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:28:15.238429Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigczkl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigczkl" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:26:19.604757Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.604849Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.605247Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.629982Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidtv7p, existing: false\n" level=info timestamp=2018-07-31T08:26:19.631005Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.631697Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.632505Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.723326Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipht6z, existing: false\n" level=info timestamp=2018-07-31T08:26:19.723492Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.723710Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.724008Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.804767Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitvm6q, existing: false\n" level=info timestamp=2018-07-31T08:26:19.804840Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.804910Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.805011Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmigczkl-fjlq9 Pod phase: Pending • Failure in Spec Setup (BeforeEach) [31.234 seconds] VNC /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:54 should upgrade subresource connections if an origin header is given [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 for vnc /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected success, but got an error: <*errors.StatusError | 0xc420b323f0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:58 ------------------------------ Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:28:08.454853Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:11.797877Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:28:14 http: TLS handshake error from 10.128.0.1:49960: EOF level=info timestamp=2018-07-31T08:28:15.556170Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:28:24 http: TLS handshake error from 10.128.0.1:50006: EOF level=info timestamp=2018-07-31T08:28:25.882462Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:32.911720Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:28:32.917278Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/31 08:28:34 http: TLS handshake error from 10.128.0.1:50052: EOF level=info timestamp=2018-07-31T08:28:35.906329Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:36.055790Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:38.760626Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:38.778325Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:28:44 http: TLS handshake error from 10.128.0.1:50104: EOF level=info timestamp=2018-07-31T08:28:46.270616Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:26:51 http: TLS handshake error from 10.129.0.1:45706: EOF 2018/07/31 08:27:01 http: TLS handshake error from 10.129.0.1:45716: EOF level=info timestamp=2018-07-31T08:27:11.204268Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:27:11 http: TLS handshake error from 10.129.0.1:45728: EOF 2018/07/31 08:27:21 http: TLS handshake error from 10.129.0.1:45738: EOF 2018/07/31 08:27:31 http: TLS handshake error from 10.129.0.1:45748: EOF level=info timestamp=2018-07-31T08:27:41.242671Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:27:41 http: TLS handshake error from 10.129.0.1:45758: EOF 2018/07/31 08:27:51 http: TLS handshake error from 10.129.0.1:45768: EOF 2018/07/31 08:28:01 http: TLS handshake error from 10.129.0.1:45778: EOF 2018/07/31 08:28:11 http: TLS handshake error from 10.129.0.1:45788: EOF 2018/07/31 08:28:21 http: TLS handshake error from 10.129.0.1:45798: EOF 2018/07/31 08:28:31 http: TLS handshake error from 10.129.0.1:45808: EOF level=info timestamp=2018-07-31T08:28:41.058313Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:28:41 http: TLS handshake error from 10.129.0.1:45818: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:27:12.829057Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2kl5x kind= uid=74c4a6e9-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:27:13.090093Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2kl5x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2kl5x" level=info timestamp=2018-07-31T08:27:13.541019Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2kl5x\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2kl5x, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 74c4a6e9-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2kl5x" level=info timestamp=2018-07-31T08:27:43.803298Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fthb kind= uid=873754bf-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:27:43.803660Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fthb kind= uid=873754bf-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:27:44.092304Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9fthb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fthb" level=info timestamp=2018-07-31T08:27:44.153142Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9fthb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fthb" level=info timestamp=2018-07-31T08:28:15.003398Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigczkl kind= uid=99d7541b-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:28:15.004177Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigczkl kind= uid=99d7541b-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:28:15.238429Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigczkl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigczkl" level=info timestamp=2018-07-31T08:28:15.845830Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigczkl\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmigczkl, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 99d7541b-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigczkl" level=info timestamp=2018-07-31T08:28:46.047071Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi76c5r kind= uid=ac591ad9-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:28:46.047870Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi76c5r kind= uid=ac591ad9-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:28:46.302733Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi76c5r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi76c5r" level=info timestamp=2018-07-31T08:28:46.424637Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi76c5r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi76c5r" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:26:19.604757Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.604849Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.605247Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.629982Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidtv7p, existing: false\n" level=info timestamp=2018-07-31T08:26:19.631005Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.631697Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.632505Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.723326Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipht6z, existing: false\n" level=info timestamp=2018-07-31T08:26:19.723492Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.723710Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.724008Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.804767Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitvm6q, existing: false\n" level=info timestamp=2018-07-31T08:26:19.804840Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.804910Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.805011Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi76c5r-zh8p9 Pod phase: Pending • Failure in Spec Setup (BeforeEach) [31.086 seconds] VNC /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:54 should upgrade subresource connections if an origin header is given [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 for serial console /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected success, but got an error: <*errors.StatusError | 0xc42016fa70>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:58 ------------------------------ Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:28:35.906329Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:36.055790Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:38.760626Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:28:38.778325Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:28:44 http: TLS handshake error from 10.128.0.1:50104: EOF level=info timestamp=2018-07-31T08:28:46.270616Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:28:54 http: TLS handshake error from 10.128.0.1:50150: EOF level=info timestamp=2018-07-31T08:28:56.528343Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:29:04 http: TLS handshake error from 10.128.0.1:50196: EOF level=info timestamp=2018-07-31T08:29:06.196732Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:29:06.762714Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:29:09.109294Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:29:09.123562Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:29:14 http: TLS handshake error from 10.128.0.1:50246: EOF level=info timestamp=2018-07-31T08:29:16.961256Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:27:21 http: TLS handshake error from 10.129.0.1:45738: EOF 2018/07/31 08:27:31 http: TLS handshake error from 10.129.0.1:45748: EOF level=info timestamp=2018-07-31T08:27:41.242671Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:27:41 http: TLS handshake error from 10.129.0.1:45758: EOF 2018/07/31 08:27:51 http: TLS handshake error from 10.129.0.1:45768: EOF 2018/07/31 08:28:01 http: TLS handshake error from 10.129.0.1:45778: EOF 2018/07/31 08:28:11 http: TLS handshake error from 10.129.0.1:45788: EOF 2018/07/31 08:28:21 http: TLS handshake error from 10.129.0.1:45798: EOF 2018/07/31 08:28:31 http: TLS handshake error from 10.129.0.1:45808: EOF level=info timestamp=2018-07-31T08:28:41.058313Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:28:41 http: TLS handshake error from 10.129.0.1:45818: EOF 2018/07/31 08:28:51 http: TLS handshake error from 10.129.0.1:45828: EOF 2018/07/31 08:29:01 http: TLS handshake error from 10.129.0.1:45838: EOF level=info timestamp=2018-07-31T08:29:11.055910Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:29:11 http: TLS handshake error from 10.129.0.1:45848: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:27:43.803298Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fthb kind= uid=873754bf-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:27:43.803660Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9fthb kind= uid=873754bf-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:27:44.092304Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9fthb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fthb" level=info timestamp=2018-07-31T08:27:44.153142Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9fthb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9fthb" level=info timestamp=2018-07-31T08:28:15.003398Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigczkl kind= uid=99d7541b-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:28:15.004177Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigczkl kind= uid=99d7541b-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:28:15.238429Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigczkl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigczkl" level=info timestamp=2018-07-31T08:28:15.845830Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigczkl\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmigczkl, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 99d7541b-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigczkl" level=info timestamp=2018-07-31T08:28:46.047071Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi76c5r kind= uid=ac591ad9-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:28:46.047870Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi76c5r kind= uid=ac591ad9-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:28:46.302733Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi76c5r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi76c5r" level=info timestamp=2018-07-31T08:28:46.424637Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi76c5r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi76c5r" level=info timestamp=2018-07-31T08:29:17.079907Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiskfw4 kind= uid=bed4755c-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:29:17.081447Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiskfw4 kind= uid=bed4755c-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:29:17.427199Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiskfw4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiskfw4" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:26:19.604757Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.604849Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.605247Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.629982Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidtv7p, existing: false\n" level=info timestamp=2018-07-31T08:26:19.631005Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.631697Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.632505Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.723326Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipht6z, existing: false\n" level=info timestamp=2018-07-31T08:26:19.723492Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.723710Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.724008Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.804767Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitvm6q, existing: false\n" level=info timestamp=2018-07-31T08:26:19.804840Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.804910Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.805011Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiskfw4-56fjv Pod phase: Pending • Failure [31.017 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a cirros image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:67 should return that we are running cirros [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:68 Expected success, but got an error: <*errors.StatusError | 0xc420a13050>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:51 ------------------------------ STEP: Creating a new VirtualMachineInstance Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:29:06.196732Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:29:06.762714Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:29:09.109294Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:29:09.123562Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:29:14 http: TLS handshake error from 10.128.0.1:50246: EOF level=info timestamp=2018-07-31T08:29:16.961256Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:29:24 http: TLS handshake error from 10.128.0.1:50292: EOF level=info timestamp=2018-07-31T08:29:27.245380Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:29:34 http: TLS handshake error from 10.128.0.1:50338: EOF level=info timestamp=2018-07-31T08:29:36.441764Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:29:37.537696Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:29:39.505563Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:29:39.555066Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:29:44 http: TLS handshake error from 10.128.0.1:50388: EOF level=info timestamp=2018-07-31T08:29:47.692002Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:27:51 http: TLS handshake error from 10.129.0.1:45768: EOF 2018/07/31 08:28:01 http: TLS handshake error from 10.129.0.1:45778: EOF 2018/07/31 08:28:11 http: TLS handshake error from 10.129.0.1:45788: EOF 2018/07/31 08:28:21 http: TLS handshake error from 10.129.0.1:45798: EOF 2018/07/31 08:28:31 http: TLS handshake error from 10.129.0.1:45808: EOF level=info timestamp=2018-07-31T08:28:41.058313Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:28:41 http: TLS handshake error from 10.129.0.1:45818: EOF 2018/07/31 08:28:51 http: TLS handshake error from 10.129.0.1:45828: EOF 2018/07/31 08:29:01 http: TLS handshake error from 10.129.0.1:45838: EOF level=info timestamp=2018-07-31T08:29:11.055910Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:29:11 http: TLS handshake error from 10.129.0.1:45848: EOF 2018/07/31 08:29:21 http: TLS handshake error from 10.129.0.1:45858: EOF 2018/07/31 08:29:31 http: TLS handshake error from 10.129.0.1:45868: EOF level=info timestamp=2018-07-31T08:29:41.088259Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:29:41 http: TLS handshake error from 10.129.0.1:45878: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:28:15.003398Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigczkl kind= uid=99d7541b-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:28:15.004177Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigczkl kind= uid=99d7541b-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:28:15.238429Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigczkl\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigczkl" level=info timestamp=2018-07-31T08:28:15.845830Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigczkl\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmigczkl, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 99d7541b-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigczkl" level=info timestamp=2018-07-31T08:28:46.047071Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi76c5r kind= uid=ac591ad9-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:28:46.047870Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi76c5r kind= uid=ac591ad9-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:28:46.302733Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi76c5r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi76c5r" level=info timestamp=2018-07-31T08:28:46.424637Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi76c5r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi76c5r" level=info timestamp=2018-07-31T08:29:17.079907Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiskfw4 kind= uid=bed4755c-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:29:17.081447Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiskfw4 kind= uid=bed4755c-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:29:17.427199Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiskfw4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiskfw4" level=info timestamp=2018-07-31T08:29:48.201673Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmit7rwr kind= uid=d16066d6-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:29:48.202138Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmit7rwr kind= uid=d16066d6-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:29:48.440463Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmit7rwr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmit7rwr" level=info timestamp=2018-07-31T08:29:48.574033Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmit7rwr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmit7rwr" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:26:19.604757Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.604849Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.605247Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.629982Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidtv7p, existing: false\n" level=info timestamp=2018-07-31T08:26:19.631005Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.631697Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.632505Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.723326Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipht6z, existing: false\n" level=info timestamp=2018-07-31T08:26:19.723492Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.723710Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.724008Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.804767Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitvm6q, existing: false\n" level=info timestamp=2018-07-31T08:26:19.804840Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.804910Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.805011Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmit7rwr-xc5np Pod phase: Pending • Failure [31.053 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a fedora image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:77 should return that we are running fedora [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:78 Expected success, but got an error: <*errors.StatusError | 0xc420b32510>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:51 ------------------------------ STEP: Creating a new VirtualMachineInstance Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running 2018/07/31 08:29:44 http: TLS handshake error from 10.128.0.1:50388: EOF level=info timestamp=2018-07-31T08:29:47.692002Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:29:54 http: TLS handshake error from 10.128.0.1:50434: EOF level=info timestamp=2018-07-31T08:29:58.026415Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:30:04 http: TLS handshake error from 10.128.0.1:50480: EOF level=info timestamp=2018-07-31T08:30:06.545150Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:30:08.318002Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:30:09.895574Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:30:09.898323Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:30:10.040673Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:30:13.185437Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:30:13.217691Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:30:13.247930Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:30:14 http: TLS handshake error from 10.128.0.1:50538: EOF level=info timestamp=2018-07-31T08:30:18.481364Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running level=info timestamp=2018-07-31T08:28:41.058313Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:28:41 http: TLS handshake error from 10.129.0.1:45818: EOF 2018/07/31 08:28:51 http: TLS handshake error from 10.129.0.1:45828: EOF 2018/07/31 08:29:01 http: TLS handshake error from 10.129.0.1:45838: EOF level=info timestamp=2018-07-31T08:29:11.055910Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:29:11 http: TLS handshake error from 10.129.0.1:45848: EOF 2018/07/31 08:29:21 http: TLS handshake error from 10.129.0.1:45858: EOF 2018/07/31 08:29:31 http: TLS handshake error from 10.129.0.1:45868: EOF level=info timestamp=2018-07-31T08:29:41.088259Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:29:41 http: TLS handshake error from 10.129.0.1:45878: EOF 2018/07/31 08:29:51 http: TLS handshake error from 10.129.0.1:45888: EOF 2018/07/31 08:30:01 http: TLS handshake error from 10.129.0.1:45898: EOF level=info timestamp=2018-07-31T08:30:09.308597Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:30:11.208906Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:30:11 http: TLS handshake error from 10.129.0.1:45908: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:28:46.047071Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi76c5r kind= uid=ac591ad9-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:28:46.047870Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi76c5r kind= uid=ac591ad9-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:28:46.302733Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi76c5r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi76c5r" level=info timestamp=2018-07-31T08:28:46.424637Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi76c5r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi76c5r" level=info timestamp=2018-07-31T08:29:17.079907Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiskfw4 kind= uid=bed4755c-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:29:17.081447Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiskfw4 kind= uid=bed4755c-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:29:17.427199Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiskfw4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiskfw4" level=info timestamp=2018-07-31T08:29:48.201673Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmit7rwr kind= uid=d16066d6-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:29:48.202138Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmit7rwr kind= uid=d16066d6-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:29:48.440463Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmit7rwr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmit7rwr" level=info timestamp=2018-07-31T08:29:48.574033Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmit7rwr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmit7rwr" level=info timestamp=2018-07-31T08:29:48.842689Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmit7rwr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmit7rwr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d16066d6-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmit7rwr" level=info timestamp=2018-07-31T08:30:19.053611Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisddm2 kind= uid=e3c3fd48-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:30:19.054293Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisddm2 kind= uid=e3c3fd48-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:30:19.618070Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisddm2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisddm2" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:26:19.604757Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.604849Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.605247Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.629982Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidtv7p, existing: false\n" level=info timestamp=2018-07-31T08:26:19.631005Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.631697Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.632505Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.723326Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipht6z, existing: false\n" level=info timestamp=2018-07-31T08:26:19.723492Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.723710Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.724008Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.804767Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitvm6q, existing: false\n" level=info timestamp=2018-07-31T08:26:19.804840Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.804910Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.805011Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmisddm2-j6gxm Pod phase: Pending • Failure [31.204 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should be able to reconnect to console multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:87 Expected success, but got an error: <*errors.StatusError | 0xc42016f680>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:91 ------------------------------ STEP: Creating a new VirtualMachineInstance Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:30:13.247930Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:30:14 http: TLS handshake error from 10.128.0.1:50538: EOF level=info timestamp=2018-07-31T08:30:18.481364Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:30:24 http: TLS handshake error from 10.128.0.1:50584: EOF level=info timestamp=2018-07-31T08:30:28.610301Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:30:31.929531Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:30:31.935386Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/31 08:30:34 http: TLS handshake error from 10.128.0.1:50630: EOF level=info timestamp=2018-07-31T08:30:36.780557Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:30:38.830213Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:30:40.239541Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:30:40.252361Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:30:41.883551Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:30:44 http: TLS handshake error from 10.128.0.1:50680: EOF level=info timestamp=2018-07-31T08:30:49.024881Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:29:01 http: TLS handshake error from 10.129.0.1:45838: EOF level=info timestamp=2018-07-31T08:29:11.055910Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:29:11 http: TLS handshake error from 10.129.0.1:45848: EOF 2018/07/31 08:29:21 http: TLS handshake error from 10.129.0.1:45858: EOF 2018/07/31 08:29:31 http: TLS handshake error from 10.129.0.1:45868: EOF level=info timestamp=2018-07-31T08:29:41.088259Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:29:41 http: TLS handshake error from 10.129.0.1:45878: EOF 2018/07/31 08:29:51 http: TLS handshake error from 10.129.0.1:45888: EOF 2018/07/31 08:30:01 http: TLS handshake error from 10.129.0.1:45898: EOF level=info timestamp=2018-07-31T08:30:09.308597Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:30:11.208906Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:30:11 http: TLS handshake error from 10.129.0.1:45908: EOF 2018/07/31 08:30:21 http: TLS handshake error from 10.129.0.1:45920: EOF 2018/07/31 08:30:31 http: TLS handshake error from 10.129.0.1:45930: EOF 2018/07/31 08:30:41 http: TLS handshake error from 10.129.0.1:45940: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:29:17.081447Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiskfw4 kind= uid=bed4755c-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:29:17.427199Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiskfw4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiskfw4" level=info timestamp=2018-07-31T08:29:48.201673Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmit7rwr kind= uid=d16066d6-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:29:48.202138Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmit7rwr kind= uid=d16066d6-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:29:48.440463Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmit7rwr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmit7rwr" level=info timestamp=2018-07-31T08:29:48.574033Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmit7rwr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmit7rwr" level=info timestamp=2018-07-31T08:29:48.842689Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmit7rwr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmit7rwr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d16066d6-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmit7rwr" level=info timestamp=2018-07-31T08:30:19.053611Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisddm2 kind= uid=e3c3fd48-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:30:19.054293Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisddm2 kind= uid=e3c3fd48-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:30:19.618070Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisddm2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisddm2" level=info timestamp=2018-07-31T08:30:19.702877Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisddm2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisddm2" level=info timestamp=2018-07-31T08:30:20.030254Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisddm2\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmisddm2, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: e3c3fd48-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisddm2" level=info timestamp=2018-07-31T08:30:50.261182Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi67p2v kind= uid=f65c988d-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:30:50.264261Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi67p2v kind= uid=f65c988d-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:30:50.540548Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi67p2v\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi67p2v" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:26:19.604757Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.604849Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.605247Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.629982Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidtv7p, existing: false\n" level=info timestamp=2018-07-31T08:26:19.631005Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.631697Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.632505Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.723326Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipht6z, existing: false\n" level=info timestamp=2018-07-31T08:26:19.723492Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.723710Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.724008Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.804767Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitvm6q, existing: false\n" level=info timestamp=2018-07-31T08:26:19.804840Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.804910Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.805011Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi67p2v-lbv6p Pod phase: Pending • Failure [30.880 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should wait until the virtual machine is in running state and return a stream interface [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:103 Expected success, but got an error: <*errors.StatusError | 0xc4209eaa20>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:106 ------------------------------ STEP: Creating a new VirtualMachineInstance Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running 2018/07/31 08:30:44 http: TLS handshake error from 10.128.0.1:50680: EOF level=info timestamp=2018-07-31T08:30:49.024881Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:30:54 http: TLS handshake error from 10.128.0.1:50726: EOF level=info timestamp=2018-07-31T08:30:59.253915Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:01.772569Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:01.845896Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:01.877381Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:31:04 http: TLS handshake error from 10.128.0.1:50774: EOF level=info timestamp=2018-07-31T08:31:07.028284Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:09.517216Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:10.558465Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:10.587119Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:11.765002Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:31:14 http: TLS handshake error from 10.128.0.1:50824: EOF level=info timestamp=2018-07-31T08:31:19.786466Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:29:21 http: TLS handshake error from 10.129.0.1:45858: EOF 2018/07/31 08:29:31 http: TLS handshake error from 10.129.0.1:45868: EOF level=info timestamp=2018-07-31T08:29:41.088259Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:29:41 http: TLS handshake error from 10.129.0.1:45878: EOF 2018/07/31 08:29:51 http: TLS handshake error from 10.129.0.1:45888: EOF 2018/07/31 08:30:01 http: TLS handshake error from 10.129.0.1:45898: EOF level=info timestamp=2018-07-31T08:30:09.308597Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:30:11.208906Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:30:11 http: TLS handshake error from 10.129.0.1:45908: EOF 2018/07/31 08:30:21 http: TLS handshake error from 10.129.0.1:45920: EOF 2018/07/31 08:30:31 http: TLS handshake error from 10.129.0.1:45930: EOF 2018/07/31 08:30:41 http: TLS handshake error from 10.129.0.1:45940: EOF 2018/07/31 08:30:51 http: TLS handshake error from 10.129.0.1:45950: EOF 2018/07/31 08:31:01 http: TLS handshake error from 10.129.0.1:45960: EOF 2018/07/31 08:31:11 http: TLS handshake error from 10.129.0.1:45970: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:29:48.574033Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmit7rwr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmit7rwr" level=info timestamp=2018-07-31T08:29:48.842689Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmit7rwr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmit7rwr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d16066d6-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmit7rwr" level=info timestamp=2018-07-31T08:30:19.053611Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisddm2 kind= uid=e3c3fd48-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:30:19.054293Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisddm2 kind= uid=e3c3fd48-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:30:19.618070Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisddm2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisddm2" level=info timestamp=2018-07-31T08:30:19.702877Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisddm2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisddm2" level=info timestamp=2018-07-31T08:30:20.030254Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisddm2\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmisddm2, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: e3c3fd48-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisddm2" level=info timestamp=2018-07-31T08:30:50.261182Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi67p2v kind= uid=f65c988d-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:30:50.264261Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi67p2v kind= uid=f65c988d-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:30:50.540548Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi67p2v\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi67p2v" level=info timestamp=2018-07-31T08:30:50.959073Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi67p2v\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi67p2v, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: f65c988d-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi67p2v" level=info timestamp=2018-07-31T08:31:21.170714Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2cl9l kind= uid=08c63391-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:31:21.171360Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2cl9l kind= uid=08c63391-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:31:21.410483Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2cl9l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2cl9l" level=info timestamp=2018-07-31T08:31:21.457189Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2cl9l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2cl9l" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:26:19.604757Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.604849Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.605247Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.629982Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidtv7p, existing: false\n" level=info timestamp=2018-07-31T08:26:19.631005Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.631697Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.632505Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.723326Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipht6z, existing: false\n" level=info timestamp=2018-07-31T08:26:19.723492Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.723710Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.724008Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.804767Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitvm6q, existing: false\n" level=info timestamp=2018-07-31T08:26:19.804840Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.804910Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.805011Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2cl9l-np8cm Pod phase: Pending • Failure [30.796 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the virtual machine instance to be running [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:111 Expected success, but got an error: <*errors.StatusError | 0xc420b32240>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:128 ------------------------------ STEP: Creating a new VirtualMachineInstance Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:31:11.765002Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:31:14 http: TLS handshake error from 10.128.0.1:50824: EOF level=info timestamp=2018-07-31T08:31:19.786466Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:31:24 http: TLS handshake error from 10.128.0.1:50870: EOF level=info timestamp=2018-07-31T08:31:29.999075Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:31.942624Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:31:31.947590Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/31 08:31:34 http: TLS handshake error from 10.128.0.1:50916: EOF level=info timestamp=2018-07-31T08:31:37.412671Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:40.208781Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:40.934839Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:40.956149Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:41.731876Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:31:44 http: TLS handshake error from 10.128.0.1:50966: EOF level=info timestamp=2018-07-31T08:31:50.436420Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:29:51 http: TLS handshake error from 10.129.0.1:45888: EOF 2018/07/31 08:30:01 http: TLS handshake error from 10.129.0.1:45898: EOF level=info timestamp=2018-07-31T08:30:09.308597Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:30:11.208906Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:30:11 http: TLS handshake error from 10.129.0.1:45908: EOF 2018/07/31 08:30:21 http: TLS handshake error from 10.129.0.1:45920: EOF 2018/07/31 08:30:31 http: TLS handshake error from 10.129.0.1:45930: EOF 2018/07/31 08:30:41 http: TLS handshake error from 10.129.0.1:45940: EOF 2018/07/31 08:30:51 http: TLS handshake error from 10.129.0.1:45950: EOF 2018/07/31 08:31:01 http: TLS handshake error from 10.129.0.1:45960: EOF 2018/07/31 08:31:11 http: TLS handshake error from 10.129.0.1:45970: EOF 2018/07/31 08:31:21 http: TLS handshake error from 10.129.0.1:45980: EOF 2018/07/31 08:31:31 http: TLS handshake error from 10.129.0.1:45990: EOF 2018/07/31 08:31:41 http: TLS handshake error from 10.129.0.1:46000: EOF 2018/07/31 08:31:51 http: TLS handshake error from 10.129.0.1:46010: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:30:19.054293Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisddm2 kind= uid=e3c3fd48-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:30:19.618070Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisddm2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisddm2" level=info timestamp=2018-07-31T08:30:19.702877Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisddm2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisddm2" level=info timestamp=2018-07-31T08:30:20.030254Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisddm2\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmisddm2, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: e3c3fd48-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisddm2" level=info timestamp=2018-07-31T08:30:50.261182Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi67p2v kind= uid=f65c988d-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:30:50.264261Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi67p2v kind= uid=f65c988d-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:30:50.540548Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi67p2v\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi67p2v" level=info timestamp=2018-07-31T08:30:50.959073Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi67p2v\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi67p2v, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: f65c988d-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi67p2v" level=info timestamp=2018-07-31T08:31:21.170714Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2cl9l kind= uid=08c63391-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:31:21.171360Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2cl9l kind= uid=08c63391-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:31:21.410483Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2cl9l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2cl9l" level=info timestamp=2018-07-31T08:31:21.457189Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2cl9l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2cl9l" level=info timestamp=2018-07-31T08:31:21.834657Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2cl9l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2cl9l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 08c63391-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2cl9l" level=info timestamp=2018-07-31T08:31:52.217738Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zbkh kind= uid=1b4ed705-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:31:52.218423Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zbkh kind= uid=1b4ed705-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:26:19.604757Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.604849Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.605247Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.629982Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidtv7p, existing: false\n" level=info timestamp=2018-07-31T08:26:19.631005Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.631697Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.632505Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.723326Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipht6z, existing: false\n" level=info timestamp=2018-07-31T08:26:19.723492Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.723710Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.724008Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.804767Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitvm6q, existing: false\n" level=info timestamp=2018-07-31T08:26:19.804840Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.804910Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.805011Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi4zbkh-xlrpv Pod phase: Pending • Failure [31.076 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the expecter [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:134 Expected success, but got an error: <*errors.StatusError | 0xc420a133b0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:151 ------------------------------ STEP: Creating a new VirtualMachineInstance Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:31:40.208781Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:40.934839Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:40.956149Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:31:41.731876Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:31:44 http: TLS handshake error from 10.128.0.1:50966: EOF level=info timestamp=2018-07-31T08:31:50.436420Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:31:54 http: TLS handshake error from 10.128.0.1:51012: EOF level=info timestamp=2018-07-31T08:32:00.668422Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:32:04 http: TLS handshake error from 10.128.0.1:51058: EOF level=info timestamp=2018-07-31T08:32:07.526894Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:32:10.866800Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:32:11.296306Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:32:11.306673Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:32:14 http: TLS handshake error from 10.128.0.1:51108: EOF level=info timestamp=2018-07-31T08:32:21.192828Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:30:11 http: TLS handshake error from 10.129.0.1:45908: EOF 2018/07/31 08:30:21 http: TLS handshake error from 10.129.0.1:45920: EOF 2018/07/31 08:30:31 http: TLS handshake error from 10.129.0.1:45930: EOF 2018/07/31 08:30:41 http: TLS handshake error from 10.129.0.1:45940: EOF 2018/07/31 08:30:51 http: TLS handshake error from 10.129.0.1:45950: EOF 2018/07/31 08:31:01 http: TLS handshake error from 10.129.0.1:45960: EOF 2018/07/31 08:31:11 http: TLS handshake error from 10.129.0.1:45970: EOF 2018/07/31 08:31:21 http: TLS handshake error from 10.129.0.1:45980: EOF 2018/07/31 08:31:31 http: TLS handshake error from 10.129.0.1:45990: EOF 2018/07/31 08:31:41 http: TLS handshake error from 10.129.0.1:46000: EOF 2018/07/31 08:31:51 http: TLS handshake error from 10.129.0.1:46010: EOF 2018/07/31 08:32:01 http: TLS handshake error from 10.129.0.1:46020: EOF level=info timestamp=2018-07-31T08:32:11.197837Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:32:11 http: TLS handshake error from 10.129.0.1:46030: EOF 2018/07/31 08:32:21 http: TLS handshake error from 10.129.0.1:46040: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:30:50.261182Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi67p2v kind= uid=f65c988d-949b-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:30:50.264261Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi67p2v kind= uid=f65c988d-949b-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:30:50.540548Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi67p2v\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi67p2v" level=info timestamp=2018-07-31T08:30:50.959073Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi67p2v\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi67p2v, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: f65c988d-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi67p2v" level=info timestamp=2018-07-31T08:31:21.170714Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2cl9l kind= uid=08c63391-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:31:21.171360Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2cl9l kind= uid=08c63391-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:31:21.410483Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2cl9l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2cl9l" level=info timestamp=2018-07-31T08:31:21.457189Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2cl9l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2cl9l" level=info timestamp=2018-07-31T08:31:21.834657Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2cl9l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2cl9l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 08c63391-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2cl9l" level=info timestamp=2018-07-31T08:31:52.217738Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zbkh kind= uid=1b4ed705-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:31:52.218423Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zbkh kind= uid=1b4ed705-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:22.793324Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:32:22.793766Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:23.151607Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisvg74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisvg74" level=info timestamp=2018-07-31T08:32:23.231016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisvg74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisvg74" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:26:19.604757Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.604849Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.605247Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.629982Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidtv7p, existing: false\n" level=info timestamp=2018-07-31T08:26:19.631005Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.631697Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.632505Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidtv7p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.723326Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipht6z, existing: false\n" level=info timestamp=2018-07-31T08:26:19.723492Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.723710Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.724008Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipht6z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:19.804767Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitvm6q, existing: false\n" level=info timestamp=2018-07-31T08:26:19.804840Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:19.804910Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.805011Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitvm6q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi4zbkh-xlrpv Pod phase: Pending Pod name: virt-launcher-testvmisvg74-mn7dc Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.781 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose ClusterIP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:68 Should expose a Cluster IP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:71 Expected error: <*errors.StatusError | 0xc4206f2240>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:32:07.526894Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:32:10.866800Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:32:11.296306Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:32:11.306673Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:32:14 http: TLS handshake error from 10.128.0.1:51108: EOF level=info timestamp=2018-07-31T08:32:21.192828Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:32:24 http: TLS handshake error from 10.128.0.1:51154: EOF level=info timestamp=2018-07-31T08:32:31.330152Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:32:34 http: TLS handshake error from 10.128.0.1:51200: EOF level=info timestamp=2018-07-31T08:32:37.632771Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:32:41.805501Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:32:41.853854Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:32:41.854913Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:32:44 http: TLS handshake error from 10.128.0.1:51250: EOF level=info timestamp=2018-07-31T08:32:52.010243Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:30:51 http: TLS handshake error from 10.129.0.1:45950: EOF 2018/07/31 08:31:01 http: TLS handshake error from 10.129.0.1:45960: EOF 2018/07/31 08:31:11 http: TLS handshake error from 10.129.0.1:45970: EOF 2018/07/31 08:31:21 http: TLS handshake error from 10.129.0.1:45980: EOF 2018/07/31 08:31:31 http: TLS handshake error from 10.129.0.1:45990: EOF 2018/07/31 08:31:41 http: TLS handshake error from 10.129.0.1:46000: EOF 2018/07/31 08:31:51 http: TLS handshake error from 10.129.0.1:46010: EOF 2018/07/31 08:32:01 http: TLS handshake error from 10.129.0.1:46020: EOF level=info timestamp=2018-07-31T08:32:11.197837Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:32:11 http: TLS handshake error from 10.129.0.1:46030: EOF 2018/07/31 08:32:21 http: TLS handshake error from 10.129.0.1:46040: EOF 2018/07/31 08:32:31 http: TLS handshake error from 10.129.0.1:46050: EOF level=info timestamp=2018-07-31T08:32:41.165150Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:32:41 http: TLS handshake error from 10.129.0.1:46060: EOF 2018/07/31 08:32:51 http: TLS handshake error from 10.129.0.1:46070: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:30:50.959073Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi67p2v\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi67p2v, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: f65c988d-949b-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi67p2v" level=info timestamp=2018-07-31T08:31:21.170714Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2cl9l kind= uid=08c63391-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:31:21.171360Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2cl9l kind= uid=08c63391-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:31:21.410483Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2cl9l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2cl9l" level=info timestamp=2018-07-31T08:31:21.457189Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2cl9l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2cl9l" level=info timestamp=2018-07-31T08:31:21.834657Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2cl9l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2cl9l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 08c63391-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2cl9l" level=info timestamp=2018-07-31T08:31:52.217738Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zbkh kind= uid=1b4ed705-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:31:52.218423Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zbkh kind= uid=1b4ed705-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:22.793324Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:32:22.793766Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:23.151607Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisvg74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisvg74" level=info timestamp=2018-07-31T08:32:23.231016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisvg74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisvg74" level=info timestamp=2018-07-31T08:32:53.523366Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:32:53.523995Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:53.717467Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2dx72\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2dx72" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:32:40.986509Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="No update processing required" level=error timestamp=2018-07-31T08:32:41.020148Z pos=vm.go:431 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisvg74\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-31T08:32:41.020302Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisvg74\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmisvg74" level=info timestamp=2018-07-31T08:32:41.020370Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisvg74, existing: true\n" level=info timestamp=2018-07-31T08:32:41.020391Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:32:41.020438Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:32:41.020456Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:32:41.020566Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:32:41.040467Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:32:41.040694Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisvg74, existing: true\n" level=info timestamp=2018-07-31T08:32:41.041329Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:32:41.041779Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:32:41.042281Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:32:41.043265Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:32:41.049180Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2dx72-fmt8f Pod phase: Pending Pod name: virt-launcher-testvmi4zbkh-xlrpv Pod phase: Pending Pod name: virt-launcher-testvmisvg74-mn7dc Pod phase: Running level=info timestamp=2018-07-31T08:32:40.078298Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:32:40.121722Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:40.219732Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID f2e97e9b-ecfd-4f72-a77f-71928d2390fd" level=info timestamp=2018-07-31T08:32:40.311905Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:32:40.665727Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:32:40.717882Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:32:40.718672Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:32:40.726802Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:40.842672Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:40.843072Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:32:40.907700Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:32:40.916305Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:41.032566Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:41.047744Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:41.332756Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f2e97e9b-ecfd-4f72-a77f-71928d2390fd: 182" • Failure in Spec Setup (BeforeEach) [30.645 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose ClusterIP service with string target-port [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:98 Should expose a ClusterIP service and connect to the vm on port 80 /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:101 Expected error: <*errors.StatusError | 0xc420b32fc0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running 2018/07/31 08:32:44 http: TLS handshake error from 10.128.0.1:51250: EOF level=info timestamp=2018-07-31T08:32:52.010243Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:32:54 http: TLS handshake error from 10.128.0.1:51296: EOF level=info timestamp=2018-07-31T08:33:02.249914Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:33:04 http: TLS handshake error from 10.128.0.1:51342: EOF level=info timestamp=2018-07-31T08:33:07.723373Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:33:12.132880Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:33:12.152713Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:33:12.402375Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:33:14.220672Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:33:14.299623Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:33:14.319667Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:33:14 http: TLS handshake error from 10.128.0.1:51392: EOF level=info timestamp=2018-07-31T08:33:22.598417Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:33:24 http: TLS handshake error from 10.128.0.1:51438: EOF Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:31:31 http: TLS handshake error from 10.129.0.1:45990: EOF 2018/07/31 08:31:41 http: TLS handshake error from 10.129.0.1:46000: EOF 2018/07/31 08:31:51 http: TLS handshake error from 10.129.0.1:46010: EOF 2018/07/31 08:32:01 http: TLS handshake error from 10.129.0.1:46020: EOF level=info timestamp=2018-07-31T08:32:11.197837Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:32:11 http: TLS handshake error from 10.129.0.1:46030: EOF 2018/07/31 08:32:21 http: TLS handshake error from 10.129.0.1:46040: EOF 2018/07/31 08:32:31 http: TLS handshake error from 10.129.0.1:46050: EOF level=info timestamp=2018-07-31T08:32:41.165150Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:32:41 http: TLS handshake error from 10.129.0.1:46060: EOF 2018/07/31 08:32:51 http: TLS handshake error from 10.129.0.1:46070: EOF 2018/07/31 08:33:01 http: TLS handshake error from 10.129.0.1:46080: EOF level=info timestamp=2018-07-31T08:33:11.256451Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:33:11 http: TLS handshake error from 10.129.0.1:46090: EOF 2018/07/31 08:33:21 http: TLS handshake error from 10.129.0.1:46102: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:31:21.457189Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2cl9l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2cl9l" level=info timestamp=2018-07-31T08:31:21.834657Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2cl9l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2cl9l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 08c63391-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2cl9l" level=info timestamp=2018-07-31T08:31:52.217738Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zbkh kind= uid=1b4ed705-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:31:52.218423Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zbkh kind= uid=1b4ed705-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:22.793324Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:32:22.793766Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:23.151607Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisvg74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisvg74" level=info timestamp=2018-07-31T08:32:23.231016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisvg74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisvg74" level=info timestamp=2018-07-31T08:32:53.523366Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:32:53.523995Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:53.717467Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2dx72\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2dx72" level=info timestamp=2018-07-31T08:33:24.187407Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:33:24.196781Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:33:24.438571Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibk64z\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibk64z" level=info timestamp=2018-07-31T08:33:24.603466Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibk64z\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibk64z" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:33:11.811902Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmi2dx72 kind=Domain uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-31T08:33:11.879867Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:33:11.893729Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:33:11.920440Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi2dx72, existing: true\n" level=info timestamp=2018-07-31T08:33:11.920549Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-31T08:33:11.920617Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:33:11.920693Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:33:11.922682Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:33:11.997444Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:33:12.086305Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi2dx72, existing: true\n" level=info timestamp=2018-07-31T08:33:12.086404Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:33:12.086436Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:33:12.086457Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:33:12.086546Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:33:12.100473Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2dx72-fmt8f Pod phase: Running level=info timestamp=2018-07-31T08:33:10.757651Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.357726Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:33:11.371673Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:11.717085Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.796653Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:11.807768Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID bba4b181-ab8b-45e3-b733-daa05c8771dc" level=info timestamp=2018-07-31T08:33:11.818682Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:11.823179Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:33:11.833181Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.859120Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:33:11.870223Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:11.875836Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:11.921889Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:12.099578Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:12.837713Z pos=monitor.go:222 component=virt-launcher msg="Found PID for bba4b181-ab8b-45e3-b733-daa05c8771dc: 187" Pod name: virt-launcher-testvmi4zbkh-xlrpv Pod phase: Pending Pod name: virt-launcher-testvmibk64z-kmqsg Pod phase: Pending Pod name: virt-launcher-testvmisvg74-mn7dc Pod phase: Running level=info timestamp=2018-07-31T08:32:40.078298Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:32:40.121722Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:40.219732Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID f2e97e9b-ecfd-4f72-a77f-71928d2390fd" level=info timestamp=2018-07-31T08:32:40.311905Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:32:40.665727Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:32:40.717882Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:32:40.718672Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:32:40.726802Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:40.842672Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:40.843072Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:32:40.907700Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:32:40.916305Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:41.032566Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:41.047744Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:41.332756Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f2e97e9b-ecfd-4f72-a77f-71928d2390fd: 182" • Failure in Spec Setup (BeforeEach) [30.843 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose NodePort service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:124 Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:129 Expected error: <*errors.StatusError | 0xc420a125a0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:33:14.319667Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:33:14 http: TLS handshake error from 10.128.0.1:51392: EOF level=info timestamp=2018-07-31T08:33:22.598417Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:33:24 http: TLS handshake error from 10.128.0.1:51438: EOF level=info timestamp=2018-07-31T08:33:31.958760Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:33:31.963501Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:33:32.858564Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:33:34 http: TLS handshake error from 10.128.0.1:51484: EOF level=info timestamp=2018-07-31T08:33:37.871095Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:33:42.305107Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:33:42.342870Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:33:43.086209Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:33:44 http: TLS handshake error from 10.128.0.1:51534: EOF level=info timestamp=2018-07-31T08:33:53.319541Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:33:54 http: TLS handshake error from 10.128.0.1:51580: EOF Pod name: virt-api-7d79764579-jzf2l Pod phase: Running level=info timestamp=2018-07-31T08:32:11.197837Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:32:11 http: TLS handshake error from 10.129.0.1:46030: EOF 2018/07/31 08:32:21 http: TLS handshake error from 10.129.0.1:46040: EOF 2018/07/31 08:32:31 http: TLS handshake error from 10.129.0.1:46050: EOF level=info timestamp=2018-07-31T08:32:41.165150Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:32:41 http: TLS handshake error from 10.129.0.1:46060: EOF 2018/07/31 08:32:51 http: TLS handshake error from 10.129.0.1:46070: EOF 2018/07/31 08:33:01 http: TLS handshake error from 10.129.0.1:46080: EOF level=info timestamp=2018-07-31T08:33:11.256451Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:33:11 http: TLS handshake error from 10.129.0.1:46090: EOF 2018/07/31 08:33:21 http: TLS handshake error from 10.129.0.1:46102: EOF 2018/07/31 08:33:31 http: TLS handshake error from 10.129.0.1:46112: EOF level=info timestamp=2018-07-31T08:33:41.325574Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:33:41 http: TLS handshake error from 10.129.0.1:46122: EOF 2018/07/31 08:33:51 http: TLS handshake error from 10.129.0.1:46132: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:31:52.217738Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zbkh kind= uid=1b4ed705-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:31:52.218423Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4zbkh kind= uid=1b4ed705-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:22.793324Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:32:22.793766Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:23.151607Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisvg74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisvg74" level=info timestamp=2018-07-31T08:32:23.231016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisvg74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisvg74" level=info timestamp=2018-07-31T08:32:53.523366Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:32:53.523995Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:53.717467Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2dx72\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2dx72" level=info timestamp=2018-07-31T08:33:24.187407Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:33:24.196781Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:33:24.438571Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibk64z\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibk64z" level=info timestamp=2018-07-31T08:33:24.603466Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibk64z\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibk64z" level=info timestamp=2018-07-31T08:33:55.039897Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:33:55.140458Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:33:43.636156Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=Domain uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-31T08:33:43.707352Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:33:43.723529Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:33:43.724983Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibk64z, existing: true\n" level=info timestamp=2018-07-31T08:33:43.725770Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-31T08:33:43.727279Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:33:43.728178Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:33:43.729466Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:33:43.801980Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:33:43.813365Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibk64z, existing: true\n" level=info timestamp=2018-07-31T08:33:43.814254Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:33:43.815009Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:33:43.815897Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:33:43.817016Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:33:43.837400Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2dx72-fmt8f Pod phase: Running level=info timestamp=2018-07-31T08:33:10.757651Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.357726Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:33:11.371673Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:11.717085Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.796653Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:11.807768Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID bba4b181-ab8b-45e3-b733-daa05c8771dc" level=info timestamp=2018-07-31T08:33:11.818682Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:11.823179Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:33:11.833181Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.859120Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:33:11.870223Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:11.875836Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:11.921889Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:12.099578Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:12.837713Z pos=monitor.go:222 component=virt-launcher msg="Found PID for bba4b181-ab8b-45e3-b733-daa05c8771dc: 187" Pod name: virt-launcher-testvmi4zbkh-xlrpv Pod phase: Pending Pod name: virt-launcher-testvmibk64z-kmqsg Pod phase: Running level=info timestamp=2018-07-31T08:33:41.804648Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Domain defined." level=info timestamp=2018-07-31T08:33:43.114367Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:33:43.119220Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID f6f02e34-fdfd-41ba-bf78-c8d87c6eef64" level=info timestamp=2018-07-31T08:33:43.119578Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:33:43.128339Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:43.579255Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:33:43.613431Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:43.633989Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:33:43.640532Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:43.649479Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:43.650273Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:33:43.695970Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:43.713697Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:43.825698Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:44.151968Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f6f02e34-fdfd-41ba-bf78-c8d87c6eef64: 194" Pod name: virt-launcher-testvmicfssj-lgntl Pod phase: Pending Pod name: virt-launcher-testvmisvg74-mn7dc Pod phase: Running level=info timestamp=2018-07-31T08:32:40.078298Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:32:40.121722Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:40.219732Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID f2e97e9b-ecfd-4f72-a77f-71928d2390fd" level=info timestamp=2018-07-31T08:32:40.311905Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:32:40.665727Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:32:40.717882Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:32:40.718672Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:32:40.726802Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:40.842672Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:40.843072Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:32:40.907700Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:32:40.916305Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:41.032566Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:41.047744Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:41.332756Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f2e97e9b-ecfd-4f72-a77f-71928d2390fd: 182" • Failure in Spec Setup (BeforeEach) [31.089 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose UDP service on a VMI /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:166 Expose ClusterIP UDP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:173 Should expose a ClusterIP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:177 Expected error: <*errors.StatusError | 0xc420a12fc0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:33:53.319541Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:33:54 http: TLS handshake error from 10.128.0.1:51580: EOF level=info timestamp=2018-07-31T08:34:02.808697Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:34:02.838811Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:34:02.876671Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:34:03.570800Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:34:04 http: TLS handshake error from 10.128.0.1:51628: EOF level=info timestamp=2018-07-31T08:34:08.161774Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:34:12.027747Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:34:12.682864Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:34:12.704665Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:34:13.866926Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:34:14 http: TLS handshake error from 10.128.0.1:51678: EOF level=info timestamp=2018-07-31T08:34:24.228185Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:34:24 http: TLS handshake error from 10.128.0.1:51724: EOF Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:32:31 http: TLS handshake error from 10.129.0.1:46050: EOF level=info timestamp=2018-07-31T08:32:41.165150Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:32:41 http: TLS handshake error from 10.129.0.1:46060: EOF 2018/07/31 08:32:51 http: TLS handshake error from 10.129.0.1:46070: EOF 2018/07/31 08:33:01 http: TLS handshake error from 10.129.0.1:46080: EOF level=info timestamp=2018-07-31T08:33:11.256451Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:33:11 http: TLS handshake error from 10.129.0.1:46090: EOF 2018/07/31 08:33:21 http: TLS handshake error from 10.129.0.1:46102: EOF 2018/07/31 08:33:31 http: TLS handshake error from 10.129.0.1:46112: EOF level=info timestamp=2018-07-31T08:33:41.325574Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:33:41 http: TLS handshake error from 10.129.0.1:46122: EOF 2018/07/31 08:33:51 http: TLS handshake error from 10.129.0.1:46132: EOF 2018/07/31 08:34:02 http: TLS handshake error from 10.129.0.1:46142: EOF 2018/07/31 08:34:11 http: TLS handshake error from 10.129.0.1:46152: EOF 2018/07/31 08:34:21 http: TLS handshake error from 10.129.0.1:46162: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:32:22.793324Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:32:22.793766Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:23.151607Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisvg74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisvg74" level=info timestamp=2018-07-31T08:32:23.231016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisvg74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisvg74" level=info timestamp=2018-07-31T08:32:53.523366Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:32:53.523995Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:53.717467Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2dx72\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2dx72" level=info timestamp=2018-07-31T08:33:24.187407Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:33:24.196781Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:33:24.438571Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibk64z\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibk64z" level=info timestamp=2018-07-31T08:33:24.603466Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibk64z\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibk64z" level=info timestamp=2018-07-31T08:33:55.039897Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:33:55.140458Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:34:26.171617Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:34:26.172466Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:26:19.879661Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:19.879892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5qjfq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.321287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.321500Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:26:20.321596Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:26:20.321781Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:26:20.321907Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:26:20.325880Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmij2v8q" level=info timestamp=2018-07-31T08:26:20.332521Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:26:20.333326Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:26:20.335852Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:26:20.336052Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmij2v8q, existing: false\n" level=info timestamp=2018-07-31T08:26:20.336198Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:26:20.336386Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:26:20.336747Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmij2v8q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:34:12.997360Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind=Domain uid=64835040-949c-11e8-8b5c-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-31T08:34:13.005488Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:34:13.009531Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:34:13.018681Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicfssj, existing: true\n" level=info timestamp=2018-07-31T08:34:13.018796Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-31T08:34:13.018828Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:34:13.018856Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:34:13.018959Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:34:13.142739Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:34:13.168199Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicfssj, existing: true\n" level=info timestamp=2018-07-31T08:34:13.178856Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:34:13.179027Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:34:13.179056Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:34:13.179151Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:34:13.226802Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2dx72-fmt8f Pod phase: Running level=info timestamp=2018-07-31T08:33:10.757651Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.357726Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:33:11.371673Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:11.717085Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.796653Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:11.807768Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID bba4b181-ab8b-45e3-b733-daa05c8771dc" level=info timestamp=2018-07-31T08:33:11.818682Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:11.823179Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:33:11.833181Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.859120Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:33:11.870223Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:11.875836Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:11.921889Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:12.099578Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:12.837713Z pos=monitor.go:222 component=virt-launcher msg="Found PID for bba4b181-ab8b-45e3-b733-daa05c8771dc: 187" Pod name: virt-launcher-testvmi4zbkh-xlrpv Pod phase: Pending Pod name: virt-launcher-testvmi6h2c4-ksdlv Pod phase: Pending Pod name: virt-launcher-testvmibk64z-kmqsg Pod phase: Running level=info timestamp=2018-07-31T08:33:41.804648Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Domain defined." level=info timestamp=2018-07-31T08:33:43.114367Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:33:43.119220Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID f6f02e34-fdfd-41ba-bf78-c8d87c6eef64" level=info timestamp=2018-07-31T08:33:43.119578Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:33:43.128339Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:43.579255Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:33:43.613431Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:43.633989Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:33:43.640532Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:43.649479Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:43.650273Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:33:43.695970Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:43.713697Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:43.825698Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:44.151968Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f6f02e34-fdfd-41ba-bf78-c8d87c6eef64: 194" Pod name: virt-launcher-testvmicfssj-lgntl Pod phase: Running level=info timestamp=2018-07-31T08:34:12.118179Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:34:12.640217Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:34:12.655078Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 52c51977-ea80-44ee-b92a-71e5ced8a08a" level=info timestamp=2018-07-31T08:34:12.655817Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:34:12.659822Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:12.935144Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:34:12.954164Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:34:12.966676Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:34:12.968722Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:34:12.983212Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:12.983848Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:34:12.999384Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:34:13.006707Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:13.193679Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:34:13.672718Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 52c51977-ea80-44ee-b92a-71e5ced8a08a: 184" Pod name: virt-launcher-testvmisvg74-mn7dc Pod phase: Running level=info timestamp=2018-07-31T08:32:40.078298Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:32:40.121722Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:40.219732Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID f2e97e9b-ecfd-4f72-a77f-71928d2390fd" level=info timestamp=2018-07-31T08:32:40.311905Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:32:40.665727Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:32:40.717882Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:32:40.718672Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:32:40.726802Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:40.842672Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:40.843072Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:32:40.907700Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:32:40.916305Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:41.032566Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:41.047744Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:41.332756Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f2e97e9b-ecfd-4f72-a77f-71928d2390fd: 182" • Failure in Spec Setup (BeforeEach) [31.251 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose UDP service on a VMI /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:166 Expose NodePort UDP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:205 Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:210 Expected error: <*errors.StatusError | 0xc4206f2d80>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:34:12.704665Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:34:13.866926Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:34:14 http: TLS handshake error from 10.128.0.1:51678: EOF level=info timestamp=2018-07-31T08:34:24.228185Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:34:24 http: TLS handshake error from 10.128.0.1:51724: EOF level=info timestamp=2018-07-31T08:34:34.404601Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:34:34 http: TLS handshake error from 10.128.0.1:51770: EOF level=info timestamp=2018-07-31T08:34:38.368093Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:34:41.968854Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:34:43.122496Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:34:43.128096Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:34:44 http: TLS handshake error from 10.128.0.1:51820: EOF level=info timestamp=2018-07-31T08:34:44.767395Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:34:54 http: TLS handshake error from 10.128.0.1:51866: EOF level=info timestamp=2018-07-31T08:34:55.061598Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:32:51 http: TLS handshake error from 10.129.0.1:46070: EOF 2018/07/31 08:33:01 http: TLS handshake error from 10.129.0.1:46080: EOF level=info timestamp=2018-07-31T08:33:11.256451Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:33:11 http: TLS handshake error from 10.129.0.1:46090: EOF 2018/07/31 08:33:21 http: TLS handshake error from 10.129.0.1:46102: EOF 2018/07/31 08:33:31 http: TLS handshake error from 10.129.0.1:46112: EOF level=info timestamp=2018-07-31T08:33:41.325574Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:33:41 http: TLS handshake error from 10.129.0.1:46122: EOF 2018/07/31 08:33:51 http: TLS handshake error from 10.129.0.1:46132: EOF 2018/07/31 08:34:02 http: TLS handshake error from 10.129.0.1:46142: EOF 2018/07/31 08:34:11 http: TLS handshake error from 10.129.0.1:46152: EOF 2018/07/31 08:34:21 http: TLS handshake error from 10.129.0.1:46162: EOF 2018/07/31 08:34:31 http: TLS handshake error from 10.129.0.1:46172: EOF 2018/07/31 08:34:41 http: TLS handshake error from 10.129.0.1:46182: EOF 2018/07/31 08:34:51 http: TLS handshake error from 10.129.0.1:46192: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:32:22.793324Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:32:22.793766Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:23.151607Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisvg74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisvg74" level=info timestamp=2018-07-31T08:32:23.231016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisvg74\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisvg74" level=info timestamp=2018-07-31T08:32:53.523366Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:32:53.523995Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:32:53.717467Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2dx72\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2dx72" level=info timestamp=2018-07-31T08:33:24.187407Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:33:24.196781Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:33:24.438571Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibk64z\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibk64z" level=info timestamp=2018-07-31T08:33:24.603466Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibk64z\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibk64z" level=info timestamp=2018-07-31T08:33:55.039897Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:33:55.140458Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:34:26.171617Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:34:26.172466Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:34:44.647634Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="No update processing required" level=error timestamp=2018-07-31T08:34:44.703563Z pos=vm.go:431 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6h2c4\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-31T08:34:44.705453Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6h2c4\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6h2c4" level=info timestamp=2018-07-31T08:34:44.719210Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: true\n" level=info timestamp=2018-07-31T08:34:44.719549Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:34:44.719766Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:34:44.719991Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:34:44.720459Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:34:44.724859Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:34:44.725262Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: true\n" level=info timestamp=2018-07-31T08:34:44.725437Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:34:44.725635Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:34:44.725821Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:34:44.726133Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:34:44.730724Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:34:12.997360Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind=Domain uid=64835040-949c-11e8-8b5c-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-31T08:34:13.005488Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:34:13.009531Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:34:13.018681Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicfssj, existing: true\n" level=info timestamp=2018-07-31T08:34:13.018796Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-31T08:34:13.018828Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:34:13.018856Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:34:13.018959Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:34:13.142739Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:34:13.168199Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicfssj, existing: true\n" level=info timestamp=2018-07-31T08:34:13.178856Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:34:13.179027Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:34:13.179056Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:34:13.179151Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:34:13.226802Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2dx72-fmt8f Pod phase: Running level=info timestamp=2018-07-31T08:33:10.757651Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.357726Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:33:11.371673Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:11.717085Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.796653Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:11.807768Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID bba4b181-ab8b-45e3-b733-daa05c8771dc" level=info timestamp=2018-07-31T08:33:11.818682Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:11.823179Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:33:11.833181Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.859120Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:33:11.870223Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:11.875836Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:11.921889Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:12.099578Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:12.837713Z pos=monitor.go:222 component=virt-launcher msg="Found PID for bba4b181-ab8b-45e3-b733-daa05c8771dc: 187" Pod name: virt-launcher-testvmi4zbkh-xlrpv Pod phase: Pending Pod name: virt-launcher-testvmi6h2c4-ksdlv Pod phase: Running level=info timestamp=2018-07-31T08:34:44.094519Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:34:44.100037Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 541754d8-0ba4-44bc-8805-e8105f22f72a" level=info timestamp=2018-07-31T08:34:44.102177Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:34:44.127625Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:44.435124Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:34:44.509709Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:34:44.513831Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:34:44.518732Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:34:44.543367Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:44.543548Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:34:44.554595Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:34:44.563010Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:44.724143Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:34:44.729470Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:34:45.112198Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 541754d8-0ba4-44bc-8805-e8105f22f72a: 181" Pod name: virt-launcher-testvmibk64z-kmqsg Pod phase: Running level=info timestamp=2018-07-31T08:33:41.804648Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Domain defined." level=info timestamp=2018-07-31T08:33:43.114367Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:33:43.119220Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID f6f02e34-fdfd-41ba-bf78-c8d87c6eef64" level=info timestamp=2018-07-31T08:33:43.119578Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:33:43.128339Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:43.579255Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:33:43.613431Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:43.633989Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:33:43.640532Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:43.649479Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:43.650273Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:33:43.695970Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:43.713697Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:43.825698Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:44.151968Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f6f02e34-fdfd-41ba-bf78-c8d87c6eef64: 194" Pod name: virt-launcher-testvmicfssj-lgntl Pod phase: Running level=info timestamp=2018-07-31T08:34:12.118179Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:34:12.640217Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:34:12.655078Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 52c51977-ea80-44ee-b92a-71e5ced8a08a" level=info timestamp=2018-07-31T08:34:12.655817Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:34:12.659822Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:12.935144Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:34:12.954164Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:34:12.966676Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:34:12.968722Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:34:12.983212Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:12.983848Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:34:12.999384Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:34:13.006707Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:13.193679Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:34:13.672718Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 52c51977-ea80-44ee-b92a-71e5ced8a08a: 184" Pod name: virt-launcher-testvmisvg74-mn7dc Pod phase: Running level=info timestamp=2018-07-31T08:32:40.078298Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:32:40.121722Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:40.219732Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID f2e97e9b-ecfd-4f72-a77f-71928d2390fd" level=info timestamp=2018-07-31T08:32:40.311905Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:32:40.665727Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:32:40.717882Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:32:40.718672Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:32:40.726802Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:40.842672Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:40.843072Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:32:40.907700Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:32:40.916305Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:41.032566Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:41.047744Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:41.332756Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f2e97e9b-ecfd-4f72-a77f-71928d2390fd: 182" • Failure in Spec Setup (BeforeEach) [31.694 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VMI replica set /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:253 Expose ClusterIP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:286 Should create a ClusterIP service on VMRS and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:290 Expected error: <*errors.StatusError | 0xc42016f3b0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:264 ------------------------------ STEP: Creating a VMRS object with 2 replicas STEP: Start the replica set Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:34:43.128096Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:34:44 http: TLS handshake error from 10.128.0.1:51820: EOF level=info timestamp=2018-07-31T08:34:44.767395Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:34:54 http: TLS handshake error from 10.128.0.1:51866: EOF level=info timestamp=2018-07-31T08:34:55.061598Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:35:04 http: TLS handshake error from 10.128.0.1:51912: EOF level=info timestamp=2018-07-31T08:35:05.357794Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:35:08.652021Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:35:12.007905Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:35:13.952486Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:35:13.954379Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:35:14 http: TLS handshake error from 10.128.0.1:51962: EOF level=info timestamp=2018-07-31T08:35:15.421737Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:35:24 http: TLS handshake error from 10.128.0.1:52008: EOF level=info timestamp=2018-07-31T08:35:25.494352Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:33:11 http: TLS handshake error from 10.129.0.1:46090: EOF 2018/07/31 08:33:21 http: TLS handshake error from 10.129.0.1:46102: EOF 2018/07/31 08:33:31 http: TLS handshake error from 10.129.0.1:46112: EOF level=info timestamp=2018-07-31T08:33:41.325574Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:33:41 http: TLS handshake error from 10.129.0.1:46122: EOF 2018/07/31 08:33:51 http: TLS handshake error from 10.129.0.1:46132: EOF 2018/07/31 08:34:02 http: TLS handshake error from 10.129.0.1:46142: EOF 2018/07/31 08:34:11 http: TLS handshake error from 10.129.0.1:46152: EOF 2018/07/31 08:34:21 http: TLS handshake error from 10.129.0.1:46162: EOF 2018/07/31 08:34:31 http: TLS handshake error from 10.129.0.1:46172: EOF 2018/07/31 08:34:41 http: TLS handshake error from 10.129.0.1:46182: EOF 2018/07/31 08:34:51 http: TLS handshake error from 10.129.0.1:46192: EOF 2018/07/31 08:35:01 http: TLS handshake error from 10.129.0.1:46202: EOF 2018/07/31 08:35:11 http: TLS handshake error from 10.129.0.1:46212: EOF 2018/07/31 08:35:21 http: TLS handshake error from 10.129.0.1:46222: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:34:26.172466Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=error timestamp=2018-07-31T08:35:27.858683Z pos=replicaset.go:225 component=virt-controller service=http namespace=kubevirt-test-default name=replicasetrz26n kind= uid=89ac8b7d-949c-11e8-8b5c-525500d15501 msg="Scaling the replicaset failed." level=info timestamp=2018-07-31T08:35:28.074540Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czkptmj kind= uid=9bd7148e-949c-11e8-8b5c-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-31T08:35:28.075250Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czkptmj kind= uid=9bd7148e-949c-11e8-8b5c-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmicd9czkptmj" level=info timestamp=2018-07-31T08:35:28.075692Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czkptmj kind= uid=9bd7148e-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:35:28.076417Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czkptmj kind= uid=9bd7148e-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:35:28.105423Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czfzqk7 kind= uid=9bd51987-949c-11e8-8b5c-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-31T08:35:28.105632Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czfzqk7 kind= uid=9bd51987-949c-11e8-8b5c-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmicd9czfzqk7" level=info timestamp=2018-07-31T08:35:28.105685Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czfzqk7 kind= uid=9bd51987-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:35:28.106155Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czfzqk7 kind= uid=9bd51987-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:35:28.577829Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicd9czkptmj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicd9czkptmj" level=info timestamp=2018-07-31T08:35:28.676681Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicd9czfzqk7\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicd9czfzqk7" level=info timestamp=2018-07-31T08:35:29.073997Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t6td kind= uid=9c913be7-949c-11e8-8b5c-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-31T08:35:29.074250Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t6td kind= uid=9c913be7-949c-11e8-8b5c-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-31T08:35:29.074329Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:34:44.647634Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="No update processing required" level=error timestamp=2018-07-31T08:34:44.703563Z pos=vm.go:431 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6h2c4\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-31T08:34:44.705453Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6h2c4\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6h2c4" level=info timestamp=2018-07-31T08:34:44.719210Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: true\n" level=info timestamp=2018-07-31T08:34:44.719549Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:34:44.719766Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:34:44.719991Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:34:44.720459Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:34:44.724859Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:34:44.725262Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: true\n" level=info timestamp=2018-07-31T08:34:44.725437Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:34:44.725635Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:34:44.725821Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:34:44.726133Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:34:44.730724Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:34:12.997360Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind=Domain uid=64835040-949c-11e8-8b5c-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-31T08:34:13.005488Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-31T08:34:13.009531Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:34:13.018681Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicfssj, existing: true\n" level=info timestamp=2018-07-31T08:34:13.018796Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-31T08:34:13.018828Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:34:13.018856Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:34:13.018959Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="No update processing required" level=info timestamp=2018-07-31T08:34:13.142739Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:34:13.168199Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicfssj, existing: true\n" level=info timestamp=2018-07-31T08:34:13.178856Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-31T08:34:13.179027Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:34:13.179056Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-31T08:34:13.179151Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-31T08:34:13.226802Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2dx72-fmt8f Pod phase: Running level=info timestamp=2018-07-31T08:33:10.757651Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.357726Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:33:11.371673Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:11.717085Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.796653Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:11.807768Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID bba4b181-ab8b-45e3-b733-daa05c8771dc" level=info timestamp=2018-07-31T08:33:11.818682Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:11.823179Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:33:11.833181Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:33:11.859120Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:33:11.870223Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:11.875836Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:11.921889Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:12.099578Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2dx72 kind= uid=3fdc9132-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:12.837713Z pos=monitor.go:222 component=virt-launcher msg="Found PID for bba4b181-ab8b-45e3-b733-daa05c8771dc: 187" Pod name: virt-launcher-testvmi4zbkh-xlrpv Pod phase: Pending Pod name: virt-launcher-testvmi6h2c4-ksdlv Pod phase: Running level=info timestamp=2018-07-31T08:34:44.094519Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:34:44.100037Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 541754d8-0ba4-44bc-8805-e8105f22f72a" level=info timestamp=2018-07-31T08:34:44.102177Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:34:44.127625Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:44.435124Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:34:44.509709Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:34:44.513831Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:34:44.518732Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:34:44.543367Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:44.543548Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:34:44.554595Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:34:44.563010Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:44.724143Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:34:44.729470Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:34:45.112198Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 541754d8-0ba4-44bc-8805-e8105f22f72a: 181" Pod name: virt-launcher-testvmibk64z-kmqsg Pod phase: Running level=info timestamp=2018-07-31T08:33:41.804648Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Domain defined." level=info timestamp=2018-07-31T08:33:43.114367Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:33:43.119220Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID f6f02e34-fdfd-41ba-bf78-c8d87c6eef64" level=info timestamp=2018-07-31T08:33:43.119578Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:33:43.128339Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:43.579255Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:33:43.613431Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:43.633989Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:33:43.640532Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:43.649479Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:43.650273Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:33:43.695970Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:33:43.713697Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:33:43.825698Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibk64z kind= uid=5221209c-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:33:44.151968Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f6f02e34-fdfd-41ba-bf78-c8d87c6eef64: 194" Pod name: virt-launcher-testvmicd9czfzqk7-msclx Pod phase: Pending Pod name: virt-launcher-testvmicd9czkptmj-989xl Pod phase: Pending Pod name: virt-launcher-testvmicfssj-lgntl Pod phase: Running level=info timestamp=2018-07-31T08:34:12.118179Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-31T08:34:12.640217Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:34:12.655078Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 52c51977-ea80-44ee-b92a-71e5ced8a08a" level=info timestamp=2018-07-31T08:34:12.655817Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:34:12.659822Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:12.935144Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:34:12.954164Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:34:12.966676Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:34:12.968722Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:34:12.983212Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:12.983848Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:34:12.999384Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:34:13.006707Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:34:13.193679Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicfssj kind= uid=64835040-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:34:13.672718Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 52c51977-ea80-44ee-b92a-71e5ced8a08a: 184" Pod name: virt-launcher-testvmisvg74-mn7dc Pod phase: Running level=info timestamp=2018-07-31T08:32:40.078298Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-31T08:32:40.121722Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:40.219732Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID f2e97e9b-ecfd-4f72-a77f-71928d2390fd" level=info timestamp=2018-07-31T08:32:40.311905Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-31T08:32:40.665727Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-31T08:32:40.717882Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Domain started." level=info timestamp=2018-07-31T08:32:40.718672Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:32:40.726802Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:40.842672Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:40.843072Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-31T08:32:40.907700Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-31T08:32:40.916305Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-31T08:32:41.032566Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:41.047744Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisvg74 kind= uid=2d83e30a-949c-11e8-8b5c-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-31T08:32:41.332756Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f2e97e9b-ecfd-4f72-a77f-71928d2390fd: 182" • Failure in Spec Setup (BeforeEach) [31.655 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on an VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:318 Expose ClusterIP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:362 Connect to ClusterIP services that was set when VM was offline /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:363 Expected error: <*errors.StatusError | 0xc420a12750>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:331 ------------------------------ STEP: Creating an VM object STEP: Creating the VM Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running 2018/07/31 08:35:14 http: TLS handshake error from 10.128.0.1:51962: EOF level=info timestamp=2018-07-31T08:35:15.421737Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:35:24 http: TLS handshake error from 10.128.0.1:52008: EOF level=info timestamp=2018-07-31T08:35:25.494352Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:35:33.055352Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:35:33.063173Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/31 08:35:34 http: TLS handshake error from 10.128.0.1:52054: EOF level=info timestamp=2018-07-31T08:35:35.732346Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:35:38.883760Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:35:44.231945Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:35:44.246523Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:35:44 http: TLS handshake error from 10.128.0.1:52104: EOF level=info timestamp=2018-07-31T08:35:45.939023Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:35:54 http: TLS handshake error from 10.128.0.1:52156: EOF level=info timestamp=2018-07-31T08:35:56.181945Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:33:51 http: TLS handshake error from 10.129.0.1:46132: EOF 2018/07/31 08:34:02 http: TLS handshake error from 10.129.0.1:46142: EOF 2018/07/31 08:34:11 http: TLS handshake error from 10.129.0.1:46152: EOF 2018/07/31 08:34:21 http: TLS handshake error from 10.129.0.1:46162: EOF 2018/07/31 08:34:31 http: TLS handshake error from 10.129.0.1:46172: EOF 2018/07/31 08:34:41 http: TLS handshake error from 10.129.0.1:46182: EOF 2018/07/31 08:34:51 http: TLS handshake error from 10.129.0.1:46192: EOF 2018/07/31 08:35:01 http: TLS handshake error from 10.129.0.1:46202: EOF 2018/07/31 08:35:11 http: TLS handshake error from 10.129.0.1:46212: EOF 2018/07/31 08:35:21 http: TLS handshake error from 10.129.0.1:46222: EOF 2018/07/31 08:35:31 http: TLS handshake error from 10.129.0.1:46232: EOF level=info timestamp=2018-07-31T08:35:41.369305Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:35:41 http: TLS handshake error from 10.129.0.1:46242: EOF 2018/07/31 08:35:51 http: TLS handshake error from 10.129.0.1:46252: EOF 2018/07/31 08:36:01 http: TLS handshake error from 10.129.0.1:46262: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:35:28.074540Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czkptmj kind= uid=9bd7148e-949c-11e8-8b5c-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-31T08:35:28.075250Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czkptmj kind= uid=9bd7148e-949c-11e8-8b5c-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmicd9czkptmj" level=info timestamp=2018-07-31T08:35:28.075692Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czkptmj kind= uid=9bd7148e-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:35:28.076417Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czkptmj kind= uid=9bd7148e-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:35:28.105423Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czfzqk7 kind= uid=9bd51987-949c-11e8-8b5c-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-31T08:35:28.105632Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czfzqk7 kind= uid=9bd51987-949c-11e8-8b5c-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmicd9czfzqk7" level=info timestamp=2018-07-31T08:35:28.105685Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czfzqk7 kind= uid=9bd51987-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:35:28.106155Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czfzqk7 kind= uid=9bd51987-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:35:28.577829Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicd9czkptmj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicd9czkptmj" level=info timestamp=2018-07-31T08:35:28.676681Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicd9czfzqk7\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicd9czfzqk7" level=info timestamp=2018-07-31T08:35:29.073997Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t6td kind= uid=9c913be7-949c-11e8-8b5c-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-31T08:35:29.074250Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t6td kind= uid=9c913be7-949c-11e8-8b5c-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-31T08:35:29.074329Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-31T08:36:02.448251Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2w2vl kind= uid=b0782bfa-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:36:02.449049Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2w2vl kind= uid=b0782bfa-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:35:32.199662Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Signaled deletion for testvmi6h2c4" level=info timestamp=2018-07-31T08:35:32.209441Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.264737Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: false\n" level=info timestamp=2018-07-31T08:35:32.265557Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:35:32.265641Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:35:32.265953Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:35:32.266125Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:35:32.267884Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6h2c4" level=info timestamp=2018-07-31T08:35:32.270254Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.280615Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:35:32.281697Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.305072Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: false\n" level=info timestamp=2018-07-31T08:35:32.305142Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.305225Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.305492Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:35:32.065480Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.065788Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisvg74, existing: false\n" level=info timestamp=2018-07-31T08:35:32.065847Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.066087Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.066493Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.068442Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisvg74, existing: false\n" level=info timestamp=2018-07-31T08:35:32.068553Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.068688Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.072059Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.140682Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:35:32.141118Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.141243Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibk64z, existing: false\n" level=info timestamp=2018-07-31T08:35:32.141294Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.141443Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.141858Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2w2vl-tdn9w Pod phase: Pending • Failure [32.267 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should successfully start with hook sidecar annotation [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:60 Expected error: <*errors.StatusError | 0xc42016fd40>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:63 ------------------------------ STEP: Starting a VMI Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:35:56.181945Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:36:04 http: TLS handshake error from 10.128.0.1:52202: EOF level=info timestamp=2018-07-31T08:36:06.357114Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:36:09.053602Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:36:14.524221Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:36:14.535265Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:36:14 http: TLS handshake error from 10.128.0.1:52252: EOF level=info timestamp=2018-07-31T08:36:15.366878Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:36:15.404384Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:36:15.463415Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:36:16.553581Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:36:24 http: TLS handshake error from 10.128.0.1:52298: EOF level=info timestamp=2018-07-31T08:36:26.755484Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:36:33.075696Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:36:33.092107Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:34:31 http: TLS handshake error from 10.129.0.1:46172: EOF 2018/07/31 08:34:41 http: TLS handshake error from 10.129.0.1:46182: EOF 2018/07/31 08:34:51 http: TLS handshake error from 10.129.0.1:46192: EOF 2018/07/31 08:35:01 http: TLS handshake error from 10.129.0.1:46202: EOF 2018/07/31 08:35:11 http: TLS handshake error from 10.129.0.1:46212: EOF 2018/07/31 08:35:21 http: TLS handshake error from 10.129.0.1:46222: EOF 2018/07/31 08:35:31 http: TLS handshake error from 10.129.0.1:46232: EOF level=info timestamp=2018-07-31T08:35:41.369305Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:35:41 http: TLS handshake error from 10.129.0.1:46242: EOF 2018/07/31 08:35:51 http: TLS handshake error from 10.129.0.1:46252: EOF 2018/07/31 08:36:01 http: TLS handshake error from 10.129.0.1:46262: EOF level=info timestamp=2018-07-31T08:36:11.285914Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:36:11 http: TLS handshake error from 10.129.0.1:46272: EOF 2018/07/31 08:36:21 http: TLS handshake error from 10.129.0.1:46284: EOF 2018/07/31 08:36:31 http: TLS handshake error from 10.129.0.1:46294: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:35:28.105423Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czfzqk7 kind= uid=9bd51987-949c-11e8-8b5c-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-31T08:35:28.105632Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czfzqk7 kind= uid=9bd51987-949c-11e8-8b5c-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmicd9czfzqk7" level=info timestamp=2018-07-31T08:35:28.105685Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czfzqk7 kind= uid=9bd51987-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:35:28.106155Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicd9czfzqk7 kind= uid=9bd51987-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:35:28.577829Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicd9czkptmj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicd9czkptmj" level=info timestamp=2018-07-31T08:35:28.676681Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicd9czfzqk7\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicd9czfzqk7" level=info timestamp=2018-07-31T08:35:29.073997Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t6td kind= uid=9c913be7-949c-11e8-8b5c-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-31T08:35:29.074250Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t6td kind= uid=9c913be7-949c-11e8-8b5c-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-31T08:35:29.074329Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-31T08:36:02.448251Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2w2vl kind= uid=b0782bfa-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:36:02.449049Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2w2vl kind= uid=b0782bfa-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:36:03.040099Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2w2vl\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2w2vl, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b0782bfa-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2w2vl" level=info timestamp=2018-07-31T08:36:33.493108Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimx9ct kind= uid=c2ee4d0e-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:36:33.493810Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimx9ct kind= uid=c2ee4d0e-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:36:33.683442Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimx9ct\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimx9ct" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:35:32.199662Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Signaled deletion for testvmi6h2c4" level=info timestamp=2018-07-31T08:35:32.209441Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.264737Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: false\n" level=info timestamp=2018-07-31T08:35:32.265557Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:35:32.265641Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:35:32.265953Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:35:32.266125Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:35:32.267884Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6h2c4" level=info timestamp=2018-07-31T08:35:32.270254Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.280615Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:35:32.281697Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.305072Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: false\n" level=info timestamp=2018-07-31T08:35:32.305142Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.305225Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.305492Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:35:32.065480Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.065788Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisvg74, existing: false\n" level=info timestamp=2018-07-31T08:35:32.065847Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.066087Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.066493Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.068442Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisvg74, existing: false\n" level=info timestamp=2018-07-31T08:35:32.068553Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.068688Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.072059Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.140682Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:35:32.141118Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.141243Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibk64z, existing: false\n" level=info timestamp=2018-07-31T08:35:32.141294Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.141443Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.141858Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmimx9ct-s54rr Pod phase: Pending • Failure [31.057 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should call Collect and OnDefineDomain on the hook sidecar [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:67 Expected error: <*errors.StatusError | 0xc420a13dd0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:70 ------------------------------ STEP: Getting hook-sidecar logs Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:36:33.075696Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:36:33.092107Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/31 08:36:34 http: TLS handshake error from 10.128.0.1:52344: EOF level=info timestamp=2018-07-31T08:36:36.961910Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:36:39.320153Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:36:44 http: TLS handshake error from 10.128.0.1:52394: EOF level=info timestamp=2018-07-31T08:36:44.821389Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:36:44.833683Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:36:47.263203Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:36:54 http: TLS handshake error from 10.128.0.1:52440: EOF level=info timestamp=2018-07-31T08:36:57.400027Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:37:03.685725Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:37:03.720204Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:37:03.779432Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:37:04 http: TLS handshake error from 10.128.0.1:52488: EOF Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:35:11 http: TLS handshake error from 10.129.0.1:46212: EOF 2018/07/31 08:35:21 http: TLS handshake error from 10.129.0.1:46222: EOF 2018/07/31 08:35:31 http: TLS handshake error from 10.129.0.1:46232: EOF level=info timestamp=2018-07-31T08:35:41.369305Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:35:41 http: TLS handshake error from 10.129.0.1:46242: EOF 2018/07/31 08:35:51 http: TLS handshake error from 10.129.0.1:46252: EOF 2018/07/31 08:36:01 http: TLS handshake error from 10.129.0.1:46262: EOF level=info timestamp=2018-07-31T08:36:11.285914Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:36:11 http: TLS handshake error from 10.129.0.1:46272: EOF 2018/07/31 08:36:21 http: TLS handshake error from 10.129.0.1:46284: EOF 2018/07/31 08:36:31 http: TLS handshake error from 10.129.0.1:46294: EOF level=info timestamp=2018-07-31T08:36:41.049210Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:36:41 http: TLS handshake error from 10.129.0.1:46304: EOF 2018/07/31 08:36:51 http: TLS handshake error from 10.129.0.1:46314: EOF 2018/07/31 08:37:01 http: TLS handshake error from 10.129.0.1:46324: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:35:28.577829Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicd9czkptmj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicd9czkptmj" level=info timestamp=2018-07-31T08:35:28.676681Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicd9czfzqk7\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicd9czfzqk7" level=info timestamp=2018-07-31T08:35:29.073997Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t6td kind= uid=9c913be7-949c-11e8-8b5c-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-31T08:35:29.074250Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2t6td kind= uid=9c913be7-949c-11e8-8b5c-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-31T08:35:29.074329Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-31T08:36:02.448251Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2w2vl kind= uid=b0782bfa-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:36:02.449049Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2w2vl kind= uid=b0782bfa-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:36:03.040099Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2w2vl\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2w2vl, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b0782bfa-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2w2vl" level=info timestamp=2018-07-31T08:36:33.493108Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimx9ct kind= uid=c2ee4d0e-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:36:33.493810Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimx9ct kind= uid=c2ee4d0e-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:36:33.683442Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimx9ct\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimx9ct" level=info timestamp=2018-07-31T08:36:34.160198Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimx9ct\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmimx9ct, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c2ee4d0e-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimx9ct" level=info timestamp=2018-07-31T08:37:04.404410Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijmlh4 kind= uid=d5636fb5-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:37:04.408790Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijmlh4 kind= uid=d5636fb5-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:37:04.724173Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijmlh4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijmlh4" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:35:32.199662Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Signaled deletion for testvmi6h2c4" level=info timestamp=2018-07-31T08:35:32.209441Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.264737Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: false\n" level=info timestamp=2018-07-31T08:35:32.265557Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:35:32.265641Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:35:32.265953Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:35:32.266125Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:35:32.267884Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6h2c4" level=info timestamp=2018-07-31T08:35:32.270254Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.280615Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:35:32.281697Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.305072Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: false\n" level=info timestamp=2018-07-31T08:35:32.305142Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.305225Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.305492Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:35:32.065480Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.065788Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisvg74, existing: false\n" level=info timestamp=2018-07-31T08:35:32.065847Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.066087Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.066493Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.068442Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisvg74, existing: false\n" level=info timestamp=2018-07-31T08:35:32.068553Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.068688Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.072059Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.140682Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:35:32.141118Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.141243Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibk64z, existing: false\n" level=info timestamp=2018-07-31T08:35:32.141294Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.141443Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.141858Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmijmlh4-qtg62 Pod phase: Pending • Failure [31.009 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should update domain XML with SM BIOS properties [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:83 Expected error: <*errors.errorString | 0xc4209f99b0>: { s: "resource name may not be empty", } resource name may not be empty not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1045 ------------------------------ STEP: Reading domain XML using virsh Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running 2018/07/31 08:36:54 http: TLS handshake error from 10.128.0.1:52440: EOF level=info timestamp=2018-07-31T08:36:57.400027Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:37:03.685725Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:37:03.720204Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:37:03.779432Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:37:04 http: TLS handshake error from 10.128.0.1:52488: EOF level=info timestamp=2018-07-31T08:37:07.507853Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:37:09.571895Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:37:14 http: TLS handshake error from 10.128.0.1:52538: EOF level=info timestamp=2018-07-31T08:37:15.195329Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:37:15.196317Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:37:17.590189Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:37:24 http: TLS handshake error from 10.128.0.1:52584: EOF level=info timestamp=2018-07-31T08:37:27.714103Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:37:34 http: TLS handshake error from 10.128.0.1:52630: EOF Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:35:41 http: TLS handshake error from 10.129.0.1:46242: EOF 2018/07/31 08:35:51 http: TLS handshake error from 10.129.0.1:46252: EOF 2018/07/31 08:36:01 http: TLS handshake error from 10.129.0.1:46262: EOF level=info timestamp=2018-07-31T08:36:11.285914Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:36:11 http: TLS handshake error from 10.129.0.1:46272: EOF 2018/07/31 08:36:21 http: TLS handshake error from 10.129.0.1:46284: EOF 2018/07/31 08:36:31 http: TLS handshake error from 10.129.0.1:46294: EOF level=info timestamp=2018-07-31T08:36:41.049210Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:36:41 http: TLS handshake error from 10.129.0.1:46304: EOF 2018/07/31 08:36:51 http: TLS handshake error from 10.129.0.1:46314: EOF 2018/07/31 08:37:01 http: TLS handshake error from 10.129.0.1:46324: EOF level=info timestamp=2018-07-31T08:37:11.160766Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:37:11 http: TLS handshake error from 10.129.0.1:46334: EOF 2018/07/31 08:37:21 http: TLS handshake error from 10.129.0.1:46344: EOF 2018/07/31 08:37:31 http: TLS handshake error from 10.129.0.1:46354: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:35:29.074329Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-31T08:36:02.448251Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2w2vl kind= uid=b0782bfa-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:36:02.449049Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2w2vl kind= uid=b0782bfa-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:36:03.040099Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2w2vl\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2w2vl, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b0782bfa-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2w2vl" level=info timestamp=2018-07-31T08:36:33.493108Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimx9ct kind= uid=c2ee4d0e-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:36:33.493810Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimx9ct kind= uid=c2ee4d0e-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:36:33.683442Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimx9ct\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimx9ct" level=info timestamp=2018-07-31T08:36:34.160198Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimx9ct\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmimx9ct, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c2ee4d0e-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimx9ct" level=info timestamp=2018-07-31T08:37:04.404410Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijmlh4 kind= uid=d5636fb5-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:37:04.408790Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijmlh4 kind= uid=d5636fb5-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:37:04.724173Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijmlh4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijmlh4" level=info timestamp=2018-07-31T08:37:05.133881Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijmlh4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmijmlh4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d5636fb5-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijmlh4" level=info timestamp=2018-07-31T08:37:35.317606Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipjqmm kind= uid=e7cfcc7f-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:37:35.318463Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipjqmm kind= uid=e7cfcc7f-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:37:35.671863Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipjqmm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipjqmm" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:35:32.199662Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Signaled deletion for testvmi6h2c4" level=info timestamp=2018-07-31T08:35:32.209441Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.264737Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: false\n" level=info timestamp=2018-07-31T08:35:32.265557Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:35:32.265641Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:35:32.265953Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:35:32.266125Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:35:32.267884Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6h2c4" level=info timestamp=2018-07-31T08:35:32.270254Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.280615Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:35:32.281697Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.305072Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: false\n" level=info timestamp=2018-07-31T08:35:32.305142Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.305225Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.305492Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:35:32.065480Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.065788Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisvg74, existing: false\n" level=info timestamp=2018-07-31T08:35:32.065847Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.066087Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.066493Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.068442Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisvg74, existing: false\n" level=info timestamp=2018-07-31T08:35:32.068553Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.068688Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.072059Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.140682Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:35:32.141118Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.141243Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibk64z, existing: false\n" level=info timestamp=2018-07-31T08:35:32.141294Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.141443Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.141858Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmipjqmm-mqq7m Pod phase: Pending • Failure [30.871 seconds] VMIDefaults /root/go/src/kubevirt.io/kubevirt/tests/vmidefaults_test.go:33 Disk defaults /root/go/src/kubevirt.io/kubevirt/tests/vmidefaults_test.go:66 Should be applied to VMIs [It] /root/go/src/kubevirt.io/kubevirt/tests/vmidefaults_test.go:68 Expected error: <*errors.StatusError | 0xc420a13050>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmidefaults_test.go:71 ------------------------------ Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running level=info timestamp=2018-07-31T08:37:15.195329Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:37:15.196317Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:37:17.590189Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:37:24 http: TLS handshake error from 10.128.0.1:52584: EOF level=info timestamp=2018-07-31T08:37:27.714103Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:37:34 http: TLS handshake error from 10.128.0.1:52630: EOF level=info timestamp=2018-07-31T08:37:37.849683Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:37:39.815544Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:37:44 http: TLS handshake error from 10.128.0.1:52680: EOF level=info timestamp=2018-07-31T08:37:45.513355Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:37:45.512898Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:37:48.027289Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:37:54 http: TLS handshake error from 10.128.0.1:52726: EOF level=info timestamp=2018-07-31T08:37:58.118415Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:38:04 http: TLS handshake error from 10.128.0.1:52772: EOF Pod name: virt-api-7d79764579-jzf2l Pod phase: Running 2018/07/31 08:36:11 http: TLS handshake error from 10.129.0.1:46272: EOF 2018/07/31 08:36:21 http: TLS handshake error from 10.129.0.1:46284: EOF 2018/07/31 08:36:31 http: TLS handshake error from 10.129.0.1:46294: EOF level=info timestamp=2018-07-31T08:36:41.049210Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:36:41 http: TLS handshake error from 10.129.0.1:46304: EOF 2018/07/31 08:36:51 http: TLS handshake error from 10.129.0.1:46314: EOF 2018/07/31 08:37:01 http: TLS handshake error from 10.129.0.1:46324: EOF level=info timestamp=2018-07-31T08:37:11.160766Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:37:11 http: TLS handshake error from 10.129.0.1:46334: EOF 2018/07/31 08:37:21 http: TLS handshake error from 10.129.0.1:46344: EOF 2018/07/31 08:37:31 http: TLS handshake error from 10.129.0.1:46354: EOF level=info timestamp=2018-07-31T08:37:41.256463Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:37:41 http: TLS handshake error from 10.129.0.1:46364: EOF 2018/07/31 08:37:51 http: TLS handshake error from 10.129.0.1:46374: EOF 2018/07/31 08:38:01 http: TLS handshake error from 10.129.0.1:46384: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:36:03.040099Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2w2vl\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2w2vl, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b0782bfa-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2w2vl" level=info timestamp=2018-07-31T08:36:33.493108Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimx9ct kind= uid=c2ee4d0e-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:36:33.493810Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimx9ct kind= uid=c2ee4d0e-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:36:33.683442Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimx9ct\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimx9ct" level=info timestamp=2018-07-31T08:36:34.160198Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimx9ct\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmimx9ct, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c2ee4d0e-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimx9ct" level=info timestamp=2018-07-31T08:37:04.404410Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijmlh4 kind= uid=d5636fb5-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:37:04.408790Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijmlh4 kind= uid=d5636fb5-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:37:04.724173Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijmlh4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijmlh4" level=info timestamp=2018-07-31T08:37:05.133881Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijmlh4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmijmlh4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d5636fb5-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijmlh4" level=info timestamp=2018-07-31T08:37:35.317606Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipjqmm kind= uid=e7cfcc7f-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:37:35.318463Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipjqmm kind= uid=e7cfcc7f-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:37:35.671863Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipjqmm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipjqmm" level=info timestamp=2018-07-31T08:37:36.074366Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipjqmm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmipjqmm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: e7cfcc7f-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipjqmm" level=info timestamp=2018-07-31T08:38:06.644693Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin2zwm kind= uid=fa75575c-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:38:06.645247Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin2zwm kind= uid=fa75575c-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:35:32.199662Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Signaled deletion for testvmi6h2c4" level=info timestamp=2018-07-31T08:35:32.209441Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.264737Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: false\n" level=info timestamp=2018-07-31T08:35:32.265557Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:35:32.265641Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:35:32.265953Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:35:32.266125Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:35:32.267884Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6h2c4" level=info timestamp=2018-07-31T08:35:32.270254Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.280615Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:35:32.281697Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.305072Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: false\n" level=info timestamp=2018-07-31T08:35:32.305142Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.305225Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.305492Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:35:32.065480Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.065788Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisvg74, existing: false\n" level=info timestamp=2018-07-31T08:35:32.065847Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.066087Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.066493Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.068442Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisvg74, existing: false\n" level=info timestamp=2018-07-31T08:35:32.068553Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.068688Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.072059Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.140682Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:35:32.141118Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.141243Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibk64z, existing: false\n" level=info timestamp=2018-07-31T08:35:32.141294Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.141443Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.141858Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmin2zwm-t77zs Pod phase: Pending • Failure [31.316 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should success [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:75 Expected <*errors.StatusError | 0xc4206f3290>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:77 ------------------------------ Pod name: disks-images-provider-mwfrr Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-zbzrs Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5k8nw Pod phase: Running 2018/07/31 08:37:54 http: TLS handshake error from 10.128.0.1:52726: EOF level=info timestamp=2018-07-31T08:37:58.118415Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:38:04 http: TLS handshake error from 10.128.0.1:52772: EOF level=info timestamp=2018-07-31T08:38:08.281876Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:38:10.102210Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:38:11.899539Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:38:14 http: TLS handshake error from 10.128.0.1:52822: EOF level=info timestamp=2018-07-31T08:38:15.908070Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:38:15.907913Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:38:18.513253Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/31 08:38:24 http: TLS handshake error from 10.128.0.1:52868: EOF level=info timestamp=2018-07-31T08:38:28.731086Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-31T08:38:33.105697Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-31T08:38:33.114003Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/31 08:38:34 http: TLS handshake error from 10.128.0.1:52914: EOF Pod name: virt-api-7d79764579-jzf2l Pod phase: Running level=info timestamp=2018-07-31T08:36:41.049210Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:36:41 http: TLS handshake error from 10.129.0.1:46304: EOF 2018/07/31 08:36:51 http: TLS handshake error from 10.129.0.1:46314: EOF 2018/07/31 08:37:01 http: TLS handshake error from 10.129.0.1:46324: EOF level=info timestamp=2018-07-31T08:37:11.160766Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:37:11 http: TLS handshake error from 10.129.0.1:46334: EOF 2018/07/31 08:37:21 http: TLS handshake error from 10.129.0.1:46344: EOF 2018/07/31 08:37:31 http: TLS handshake error from 10.129.0.1:46354: EOF level=info timestamp=2018-07-31T08:37:41.256463Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/31 08:37:41 http: TLS handshake error from 10.129.0.1:46364: EOF 2018/07/31 08:37:51 http: TLS handshake error from 10.129.0.1:46374: EOF 2018/07/31 08:38:01 http: TLS handshake error from 10.129.0.1:46384: EOF 2018/07/31 08:38:11 http: TLS handshake error from 10.129.0.1:46394: EOF 2018/07/31 08:38:21 http: TLS handshake error from 10.129.0.1:46404: EOF 2018/07/31 08:38:31 http: TLS handshake error from 10.129.0.1:46414: EOF Pod name: virt-controller-7d57d96b65-c55ct Pod phase: Running level=info timestamp=2018-07-31T08:36:34.160198Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimx9ct\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmimx9ct, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c2ee4d0e-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimx9ct" level=info timestamp=2018-07-31T08:37:04.404410Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijmlh4 kind= uid=d5636fb5-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:37:04.408790Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijmlh4 kind= uid=d5636fb5-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:37:04.724173Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijmlh4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijmlh4" level=info timestamp=2018-07-31T08:37:05.133881Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijmlh4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmijmlh4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d5636fb5-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijmlh4" level=info timestamp=2018-07-31T08:37:35.317606Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipjqmm kind= uid=e7cfcc7f-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:37:35.318463Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipjqmm kind= uid=e7cfcc7f-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:37:35.671863Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipjqmm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipjqmm" level=info timestamp=2018-07-31T08:37:36.074366Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipjqmm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmipjqmm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: e7cfcc7f-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipjqmm" level=info timestamp=2018-07-31T08:38:06.644693Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin2zwm kind= uid=fa75575c-949c-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:38:06.645247Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin2zwm kind= uid=fa75575c-949c-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:38:07.327315Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmin2zwm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmin2zwm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: fa75575c-949c-11e8-8b5c-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmin2zwm" level=info timestamp=2018-07-31T08:38:37.570054Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi25pdh kind= uid=0ce2a442-949d-11e8-8b5c-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-31T08:38:37.570742Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi25pdh kind= uid=0ce2a442-949d-11e8-8b5c-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-31T08:38:37.875569Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi25pdh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi25pdh" Pod name: virt-controller-7d57d96b65-fpmvw Pod phase: Running level=info timestamp=2018-07-31T08:26:27.274099Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-gfmtm Pod phase: Running level=info timestamp=2018-07-31T08:35:32.199662Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Signaled deletion for testvmi6h2c4" level=info timestamp=2018-07-31T08:35:32.209441Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind= uid=770b83e5-949c-11e8-8b5c-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.264737Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: false\n" level=info timestamp=2018-07-31T08:35:32.265557Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-31T08:35:32.265641Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-31T08:35:32.265953Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-31T08:35:32.266125Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-31T08:35:32.267884Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6h2c4" level=info timestamp=2018-07-31T08:35:32.270254Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.280615Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:35:32.281697Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.305072Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6h2c4, existing: false\n" level=info timestamp=2018-07-31T08:35:32.305142Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.305225Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.305492Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6h2c4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-l7664 Pod phase: Running level=info timestamp=2018-07-31T08:35:32.065480Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.065788Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisvg74, existing: false\n" level=info timestamp=2018-07-31T08:35:32.065847Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.066087Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.066493Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.068442Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisvg74, existing: false\n" level=info timestamp=2018-07-31T08:35:32.068553Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.068688Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.072059Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmisvg74 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-31T08:35:32.140682Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-31T08:35:32.141118Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-31T08:35:32.141243Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibk64z, existing: false\n" level=info timestamp=2018-07-31T08:35:32.141294Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-31T08:35:32.141443Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-31T08:35:32.141858Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibk64z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi25pdh-sxswb Pod phase: Pending • Failure [30.959 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should start it [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:80 Expected <*errors.StatusError | 0xc420b32900>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:82 ------------------------------ • [SLOW TEST:42.443 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should attach virt-launcher to it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:86 ------------------------------ ••••2018/07/31 04:39:58 read closing down: EOF ------------------------------ • [SLOW TEST:35.992 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:174 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Alpine as first boot /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/07/31 04:40:25 read closing down: EOF • [SLOW TEST:27.222 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:174 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Cirros as first boot /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:15.869 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:205 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:206 should retry starting the VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:207 ------------------------------ • [SLOW TEST:18.332 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:205 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:206 should log warning and proceed once the secret is there /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:237 ------------------------------ • [SLOW TEST:43.831 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-launcher crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:285 should be stopped and have Failed phase /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:286 ------------------------------ • [SLOW TEST:29.231 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:309 should recover and continue management /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:310 ------------------------------ • [SLOW TEST:35.000 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler is responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:340 should indicate that a node is ready for vmis /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:341 ------------------------------ • [SLOW TEST:89.357 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler is not responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:371 the node controller should react /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:410 ------------------------------ • [SLOW TEST:18.656 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with node tainted /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:463 the vmi with tolerations should be scheduled /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:485 ------------------------------ • ------------------------------ • [SLOW TEST:26.093 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:535 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-default /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:39.602 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:535 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-alternative /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.479 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should enable emulation in virt-launcher [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:604 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.248 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should be reflected in domain XML [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:641 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.216 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should request a TUN device but not KVM [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:685 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ •••• ------------------------------ • [SLOW TEST:18.223 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance's Pod /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:837 should result in the VirtualMachineInstance moving to a finalized state /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:838 ------------------------------ • [SLOW TEST:42.490 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with an active pod. /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:870 should result in pod being terminated /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:871 ------------------------------ 2018/07/31 04:47:27 read closing down: EOF • [SLOW TEST:40.838 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with ACPI and 0 grace period seconds /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:895 should result in vmi status failed /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:896 ------------------------------ 2018/07/31 04:48:12 read closing down: EOF • [SLOW TEST:47.703 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with ACPI and some grace period seconds /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:920 should result in vmi status succeeded /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:921 ------------------------------ • [SLOW TEST:25.288 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with grace period greater than 0 /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:945 should run graceful shutdown /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:946 ------------------------------ • [SLOW TEST:33.441 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Killed VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:997 should be in Failed phase /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:998 ------------------------------ • [SLOW TEST:29.243 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Killed VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:997 should be left alone by virt-handler /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:1025 ------------------------------ • [SLOW TEST:67.890 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting and stopping the same VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:90 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:91 should success multiple times /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:92 ------------------------------ • [SLOW TEST:18.229 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:111 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:112 should not modify the spec on status update /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:113 ------------------------------ • [SLOW TEST:31.253 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting multiple VMIs /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:129 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:130 should success /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:131 ------------------------------ • [SLOW TEST:42.351 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance 2018/07/31 04:52:23 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 should have cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:82 ------------------------------ • [SLOW TEST:102.767 seconds] CloudInit UserData 2018/07/31 04:54:06 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 with injected ssh-key /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:92 should have ssh-key under authorized keys /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:93 ------------------------------ 2018/07/31 04:54:48 read closing down: EOF • [SLOW TEST:52.739 seconds] CloudInit UserData 2018/07/31 04:54:59 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userData source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:118 should process provided cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:119 ------------------------------ 2018/07/31 04:55:42 read closing down: EOF • [SLOW TEST:43.136 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 should take user-data from k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:162 ------------------------------ •volumedisk0 compute ------------------------------ • [SLOW TEST:37.899 seconds] Configurations 2018/07/31 04:56:20 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with 3 CPU cores /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:56 should report 3 cpu cores under guest OS /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:62 ------------------------------ • ------------------------------ • [SLOW TEST:20.001 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:164 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-2Mi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ S [SKIPPING] [0.731 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:164 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-1Gi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 No node with hugepages hugepages-1Gi capacity /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:216 ------------------------------ • ------------------------------ • [SLOW TEST:102.224 seconds] Configurations 2018/07/31 04:58:26 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model defined /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:340 should report defined CPU model /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:341 ------------------------------ • [SLOW TEST:103.449 seconds] 2018/07/31 05:00:09 read closing down: EOF Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model equals to passthrough /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:368 should report exactly the same model as node CPU /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:369 ------------------------------ 2018/07/31 05:01:57 read closing down: EOF • [SLOW TEST:107.233 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model not defined /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:392 should report CPU model from libvirt capabilities /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:393 ------------------------------ • [SLOW TEST:47.079 seconds] Configurations 2018/07/31 05:02:44 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 New VirtualMachineInstance with all supported drives /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:413 should have all the device nodes /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:436 ------------------------------ 2018/07/31 05:03:18 read closing down: EOF • [SLOW TEST:34.172 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/07/31 05:06:36 read closing down: EOF Get https://127.0.0.1:33195/api/v1/namespaces/kube-system/pods?labelSelector=kubevirt.io: unexpected EOF • Failure [198.272 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected : 180000000000 to be nil /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:79 ------------------------------ STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-07-31T09:03:19.128229Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiw95bt kind=VirtualMachineInstance uid=9181c04d-94a0-11e8-8b5c-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiw95bt-89jbt" level=info timestamp=2018-07-31T09:03:34.872258Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiw95bt kind=VirtualMachineInstance uid=9181c04d-94a0-11e8-8b5c-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmiw95bt-89jbt" level=info timestamp=2018-07-31T09:03:36.370377Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiw95bt kind=VirtualMachineInstance uid=9181c04d-94a0-11e8-8b5c-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-31T09:03:36.414010Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiw95bt kind=VirtualMachineInstance uid=9181c04d-94a0-11e8-8b5c-525500d15501 msg="VirtualMachineInstance started." STEP: Checking that the VirtualMachineInstance console has expected output level=info timestamp=2018-07-31T09:06:36.646233Z pos=utils.go:1291 component=tests namespace=kubevirt-test-default name=testvmiw95bt kind=VirtualMachineInstance uid= msg="Login: [{2 \r\n\r\n\r\nISOLINUX 6.04 6.04-pre1 Copyright (C) 1994-2015 H. Peter Anvin et al\r\nboot: \u001b[?7h\r\n []}]" 2018/07/31 05:08:04 read closing down: EOF • [SLOW TEST:103.272 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/07/31 05:10:03 read closing down: EOF • [SLOW TEST:136.556 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:43.554 seconds] 2018/07/31 05:11:20 read closing down: EOF Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:113 should create a writeable emptyDisk with the right capacity /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:115 ------------------------------ 2018/07/31 05:12:02 read closing down: EOF • [SLOW TEST:42.759 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined and a specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:163 should create a writeable emptyDisk with the specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:165 ------------------------------ 2018/07/31 05:12:33 read closing down: EOF • [SLOW TEST:30.810 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should be successfully started /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:207 ------------------------------ 2018/07/31 05:14:21 read closing down: EOF • [SLOW TEST:108.168 seconds] 2018/07/31 05:14:21 read closing down: EOF Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should not persist data /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:218 ------------------------------ 2018/07/31 05:17:09 read closing down: EOF • [SLOW TEST:168.284 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With VirtualMachineInstance with two PVCs /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:266 should start vmi multiple times /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:278 ------------------------------ • [SLOW TEST:5.467 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:48 with correct permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:51 should be allowed to access subresource endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:52 ------------------------------ •• ------------------------------ • [SLOW TEST:5.430 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization For Version Command /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:63 Without permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:71 should be able to access subresource version endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:72 ------------------------------ Waiting for namespace kubevirt-test-default to be removed, this can take a while ... Waiting for namespace kubevirt-test-alternative to be removed, this can take a while ... Summarizing 31 Failures: [Fail] VirtualMachine A valid VirtualMachine given [It] should start and stop VirtualMachineInstance multiple times /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:157 [Fail] Networking VirtualMachineInstance with custom MAC address [It] should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1102 [Fail] Networking VirtualMachineInstance with custom MAC address in non-conventional format [It] should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:420 [Fail] Networking VirtualMachineInstance with custom MAC address and slirp interface [It] should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:433 [Fail] Networking VirtualMachineInstance with disabled automatic attachment of interfaces [It] should not configure any external interfaces /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:448 [Fail] Slirp [BeforeEach] should be able to VirtualMachineInstance with slirp interface /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:58 [Fail] Slirp [BeforeEach] should be able to VirtualMachineInstance with slirp interface with custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:58 [Fail] LeaderElection Start a VirtualMachineInstance when the controller pod is not running [It] should success /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:91 [Fail] VNC A new VirtualMachineInstance [BeforeEach] with VNC connection should allow accessing the VNC device /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:58 [Fail] VNC A new VirtualMachineInstance [BeforeEach] should upgrade subresource connections if an origin header is given for vnc /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:58 [Fail] VNC A new VirtualMachineInstance [BeforeEach] should upgrade subresource connections if an origin header is given for serial console /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:58 [Fail] Console A new VirtualMachineInstance with a serial console with a cirros image [It] should return that we are running cirros /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:51 [Fail] Console A new VirtualMachineInstance with a serial console with a fedora image [It] should return that we are running fedora /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:51 [Fail] Console A new VirtualMachineInstance with a serial console [It] should be able to reconnect to console multiple times /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:91 [Fail] Console A new VirtualMachineInstance with a serial console [It] should wait until the virtual machine is in running state and return a stream interface /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:106 [Fail] Console A new VirtualMachineInstance with a serial console [It] should fail waiting for the virtual machine instance to be running /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:128 [Fail] Console A new VirtualMachineInstance with a serial console [It] should fail waiting for the expecter /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:151 [Fail] Expose Expose service on a VM [BeforeEach] Expose ClusterIP service Should expose a Cluster IP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose service on a VM [BeforeEach] Expose ClusterIP service with string target-port Should expose a ClusterIP service and connect to the vm on port 80 /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose service on a VM [BeforeEach] Expose NodePort service Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose UDP service on a VMI [BeforeEach] Expose ClusterIP UDP service Should expose a ClusterIP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose UDP service on a VMI [BeforeEach] Expose NodePort UDP service Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose service on a VMI replica set [BeforeEach] Expose ClusterIP service Should create a ClusterIP service on VMRS and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:264 [Fail] Expose Expose service on an VM [BeforeEach] Expose ClusterIP service Connect to ClusterIP services that was set when VM was offline /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:331 [Fail] HookSidecars VMI definition with SM BIOS hook sidecar [It] should successfully start with hook sidecar annotation /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:63 [Fail] HookSidecars VMI definition with SM BIOS hook sidecar [It] should call Collect and OnDefineDomain on the hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:70 [Fail] HookSidecars VMI definition with SM BIOS hook sidecar [It] should update domain XML with SM BIOS properties /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1045 [Fail] VMIDefaults Disk defaults [It] Should be applied to VMIs /root/go/src/kubevirt.io/kubevirt/tests/vmidefaults_test.go:71 [Fail] VMIlifecycle Creating a VirtualMachineInstance [It] should success /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:77 [Fail] VMIlifecycle Creating a VirtualMachineInstance [It] should start it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:82 [Fail] Storage Starting a VirtualMachineInstance with Alpine PVC should be successfully started [It] with CDRom PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:79 Ran 140 of 151 Specs in 4635.987 seconds FAIL! -- 109 Passed | 31 Failed | 0 Pending | 11 Skipped --- FAIL: TestTests (4636.00s) FAIL make: *** [functest] Error 1 + make cluster-down ./cluster/down.sh