+ export WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + [[ openshift-3.10-release =~ openshift-.* ]] + [[ openshift-3.10-release =~ .*-crio-.* ]] + export KUBEVIRT_PROVIDER=os-3.10.0 + KUBEVIRT_PROVIDER=os-3.10.0 + export KUBEVIRT_NUM_NODES=2 + KUBEVIRT_NUM_NODES=2 + export NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + export NAMESPACE=kube-system + NAMESPACE=kube-system + trap '{ make cluster-down; }' EXIT SIGINT SIGTERM SIGSTOP + make cluster-down ./cluster/down.sh + make cluster-up ./cluster/up.sh Downloading ................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................ Downloading ....... 2018/07/26 10:23:32 Waiting for host: 192.168.66.102:22 2018/07/26 10:23:35 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 10:23:43 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 10:23:51 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 10:23:59 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 10:24:11 Connected to tcp://192.168.66.102:22 + systemctl stop origin-node.service + rm -rf /etc/origin/ /etc/etcd/ /var/lib/origin /var/lib/etcd/ ++ docker ps -q + containers= + '[' -n '' ']' ++ docker ps -q -a + containers='2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' + '[' -n '2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' ']' + docker rm -f 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2018/07/26 10:24:14 Waiting for host: 192.168.66.101:22 2018/07/26 10:24:17 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 10:24:25 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 10:24:33 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/26 10:24:45 Connected to tcp://192.168.66.101:22 + inventory_file=/root/inventory + openshift_ansible=/root/openshift-ansible + echo '[new_nodes]' + sed -i '/\[OSEv3:children\]/a new_nodes' /root/inventory + nodes_found=false ++ seq 2 100 + for i in '$(seq 2 100)' ++ printf node%02d 2 + node=node02 ++ printf 192.168.66.1%02d 2 + node_ip=192.168.66.102 + set +e + ping 192.168.66.102 -c 1 PING 192.168.66.102 (192.168.66.102) 56(84) bytes of data. 64 bytes from 192.168.66.102: icmp_seq=1 ttl=64 time=2.59 ms --- 192.168.66.102 ping statistics --- 1 packets transmitted, 1 received, 0% packet loss, time 0ms rtt min/avg/max/mdev = 2.598/2.598/2.598/0.000 ms Found node02. Adding it to the inventory. + '[' 0 -ne 0 ']' + nodes_found=true + set -e + echo '192.168.66.102 node02' + echo 'Found node02. Adding it to the inventory.' + echo 'node02 openshift_node_group_name="node-config-compute" openshift_schedulable=true openshift_ip=192.168.66.102' + for i in '$(seq 2 100)' ++ printf node%02d 3 + node=node03 ++ printf 192.168.66.1%02d 3 + node_ip=192.168.66.103 + set +e + ping 192.168.66.103 -c 1 PING 192.168.66.103 (192.168.66.103) 56(84) bytes of data. From 192.168.66.101 icmp_seq=1 Destination Host Unreachable --- 192.168.66.103 ping statistics --- 1 packets transmitted, 0 received, +1 errors, 100% packet loss, time 0ms + '[' 1 -ne 0 ']' + break + '[' true = true ']' + ansible-playbook -i /root/inventory /root/openshift-ansible/playbooks/openshift-node/scaleup.yml PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) PLAY [Ensure there are new_nodes] ********************************************** TASK [fail] ******************************************************************** skipping: [localhost] TASK [fail] ******************************************************************** skipping: [localhost] PLAY [Initialization Checkpoint Start] ***************************************** TASK [Set install initialization 'In Progress'] ******************************** ok: [node01] PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) [WARNING]: Could not match supplied host pattern, ignoring: oo_lb_to_config PLAY [Ensure that all non-node hosts are accessible] *************************** TASK [Gathering Facts] ********************************************************* ok: [node01] PLAY [Initialize basic host facts] ********************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml for node01, node02 TASK [openshift_sanitize_inventory : Check for usage of deprecated variables] *** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : debug] ************************************ skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_stats] ******************************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Assign deprecated variables to correct counterparts] *** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml for node01, node02 included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml for node01, node02 TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Standardize on latest variable names] ***** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Normalize openshift_release] ************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Abort when openshift_release is invalid] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml for node01, node02 TASK [openshift_sanitize_inventory : Ensure that openshift_use_dnsmasq is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that openshift_node_dnsmasq_install_network_manager_hook is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* skipping: [node01] => (item=openshift_hosted_etcd_storage_kind) skipping: [node02] => (item=openshift_hosted_etcd_storage_kind) TASK [openshift_sanitize_inventory : Ensure that dynamic provisioning is set if using dynamic storage] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure clusterid is set along with the cloudprovider] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure ansible_service_broker_remove and ansible_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure template_service_broker_remove and template_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that all requires vsphere configuration variables are set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : ensure provider configuration variables are defined] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure removed web console extension variables are not set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that web console port matches API server port] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : At least one master is schedulable] ******* skipping: [node01] skipping: [node02] TASK [Detecting Operating System from ostree_booted] *************************** ok: [node01] ok: [node02] TASK [set openshift_deployment_type if unset] ********************************** skipping: [node01] skipping: [node02] TASK [check for node already bootstrapped] ************************************* ok: [node02] ok: [node01] TASK [initialize_facts set fact openshift_is_bootstrapped] ********************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_atomic and openshift_is_containerized] *** ok: [node01] ok: [node02] TASK [Determine Atomic Host Docker Version] ************************************ skipping: [node01] skipping: [node02] TASK [assert atomic host docker version is 1.12 or later] ********************** skipping: [node01] skipping: [node02] PLAY [Retrieve existing master configs and validate] *************************** TASK [openshift_control_plane : stat] ****************************************** ok: [node01] TASK [openshift_control_plane : slurp] ***************************************** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [openshift_control_plane : Check for file paths outside of /etc/origin/master in master's config] *** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Initialize special first-master variables] ******************************* TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Disable web console if required] ***************************************** TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Setup yum repositories for all hosts] ************************************ TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Install Red Hat Subscription manager] ******************* skipping: [node02] TASK [rhel_subscribe : Is host already registered?] **************************** skipping: [node02] TASK [rhel_subscribe : Register host] ****************************************** skipping: [node02] TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Determine if OpenShift Pool Already Attached] *********** skipping: [node02] TASK [rhel_subscribe : Attach to OpenShift Pool] ******************************* skipping: [node02] TASK [rhel_subscribe : Satellite preparation] ********************************** skipping: [node02] TASK [openshift_repos : openshift_repos detect ostree] ************************* ok: [node02] TASK [openshift_repos : Ensure libselinux-python is installed] ***************** ok: [node02] TASK [openshift_repos : Remove openshift_additional.repo file] ***************** ok: [node02] TASK [openshift_repos : Create any additional repos that are defined] ********** TASK [openshift_repos : include_tasks] ***************************************** skipping: [node02] TASK [openshift_repos : include_tasks] ***************************************** included: /root/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml for node02 TASK [openshift_repos : Configure origin gpg keys] ***************************** ok: [node02] TASK [openshift_repos : Configure correct origin release repository] *********** ok: [node02] => (item=/root/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin.repo.j2) TASK [openshift_repos : Ensure clean repo cache in the event repos have been changed manually] *** changed: [node02] => { "msg": "First run of openshift_repos" } TASK [openshift_repos : Record that openshift_repos already ran] *************** ok: [node02] RUNNING HANDLER [openshift_repos : refresh cache] ****************************** changed: [node02] PLAY [Install packages necessary for installer] ******************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Determine if chrony is installed] **************************************** [WARNING]: Consider using the yum, dnf or zypper module rather than running rpm. If you need to use command because yum, dnf or zypper is insufficient you can add warn=False to this command task or set command_warnings=False in ansible.cfg to get rid of this message. changed: [node02] TASK [Install ntp package] ***************************************************** skipping: [node02] TASK [Start and enable ntpd/chronyd] ******************************************* changed: [node02] TASK [Ensure openshift-ansible installer package deps are installed] *********** ok: [node02] => (item=iproute) ok: [node02] => (item=dbus-python) ok: [node02] => (item=PyYAML) ok: [node02] => (item=python-ipaddress) ok: [node02] => (item=libsemanage-python) ok: [node02] => (item=yum-utils) ok: [node02] => (item=python-docker) PLAY [Initialize cluster facts] ************************************************ TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [get openshift_current_version] ******************************************* ok: [node02] ok: [node01] TASK [set_fact openshift_portal_net if present on masters] ********************* ok: [node01] ok: [node02] TASK [Gather Cluster facts] **************************************************** changed: [node02] changed: [node01] TASK [Set fact of no_proxy_internal_hostnames] ********************************* skipping: [node01] skipping: [node02] TASK [Initialize openshift.node.sdn_mtu] *************************************** changed: [node02] ok: [node01] PLAY [Initialize etcd host variables] ****************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** ok: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** ok: [node01] => { "msg": "openshift_pkg_version was not defined. Falling back to -3.10.0" } TASK [openshift_version : set_fact] ******************************************** ok: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Verify Requirements] ***************************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [Run variable sanity checks] ********************************************** ok: [node01] TASK [Validate openshift_node_groups and openshift_node_group_name] ************ ok: [node01] PLAY [Initialization Checkpoint End] ******************************************* TASK [Set install initialization 'Complete'] *********************************** ok: [node01] PLAY [Validate node hostnames] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Query DNS for IP address of node02] ************************************** ok: [node02] TASK [Validate openshift_hostname when defined] ******************************** skipping: [node02] TASK [Validate openshift_ip exists on node when defined] *********************** skipping: [node02] PLAY [Configure os_firewall] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [os_firewall : Detecting Atomic Host Operating System] ******************** ok: [node02] TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** ok: [node02] TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** skipping: [node02] TASK [os_firewall : Install firewalld packages] ******************************** skipping: [node02] TASK [os_firewall : Ensure iptables services are not enabled] ****************** skipping: [node02] => (item=iptables) skipping: [node02] => (item=ip6tables) TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** skipping: [node02] TASK [os_firewall : Start and enable firewalld service] ************************ skipping: [node02] TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** skipping: [node02] TASK [os_firewall : Restart polkitd] ******************************************* skipping: [node02] TASK [os_firewall : Wait for polkit action to have been created] *************** skipping: [node02] TASK [os_firewall : Ensure firewalld service is not enabled] ******************* ok: [node02] TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** skipping: [node02] TASK [os_firewall : Install iptables packages] ********************************* ok: [node02] => (item=iptables) ok: [node02] => (item=iptables-services) TASK [os_firewall : Start and enable iptables service] ************************* ok: [node02 -> node02] => (item=node02) TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** skipping: [node02] PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [container_runtime : Setup the docker-storage for overlay] **************** skipping: [node02] TASK [container_runtime : Create file system on extra volume device] *********** TASK [container_runtime : Create mount entry for extra volume] ***************** PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** ok: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* ok: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Add http_proxy to /etc/atomic.conf] ****************** skipping: [node02] TASK [container_runtime : Add https_proxy to /etc/atomic.conf] ***************** skipping: [node02] TASK [container_runtime : Add no_proxy to /etc/atomic.conf] ******************** skipping: [node02] TASK [container_runtime : Get current installed Docker version] **************** ok: [node02] TASK [container_runtime : Error out if Docker pre-installed but too old] ******* skipping: [node02] TASK [container_runtime : Error out if requested Docker is too old] ************ skipping: [node02] TASK [container_runtime : Install Docker] ************************************** skipping: [node02] TASK [container_runtime : Ensure docker.service.d directory exists] ************ ok: [node02] TASK [container_runtime : Configure Docker service unit file] ****************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Set registry params] ********************************* skipping: [node02] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) TASK [container_runtime : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** skipping: [node02] TASK [container_runtime : Set Proxy Settings] ********************************** skipping: [node02] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) TASK [container_runtime : Set various Docker options] ************************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Configure Docker Network OPTIONS] ******************** ok: [node02] TASK [container_runtime : Detect if docker is already started] ***************** ok: [node02] TASK [container_runtime : Start the Docker service] **************************** ok: [node02] TASK [container_runtime : set_fact] ******************************************** ok: [node02] TASK [container_runtime : Check for docker_storage_path/overlay2] ************** ok: [node02] TASK [container_runtime : Fixup SELinux permissions for docker] **************** changed: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* ok: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** ok: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** ok: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] TASK [container_runtime : Fail if Atomic Host since this is an rpm request] **** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Check that overlay is in the kernel] ***************** skipping: [node02] TASK [container_runtime : Add overlay to modprobe.d] *************************** skipping: [node02] TASK [container_runtime : Manually modprobe overlay into the kernel] *********** skipping: [node02] TASK [container_runtime : Enable and start systemd-modules-load] *************** skipping: [node02] TASK [container_runtime : Install cri-o] *************************************** skipping: [node02] TASK [container_runtime : Remove CRI-O default configuration files] ************ skipping: [node02] => (item=/etc/cni/net.d/200-loopback.conf) skipping: [node02] => (item=/etc/cni/net.d/100-crio-bridge.conf) TASK [container_runtime : Create the CRI-O configuration] ********************** skipping: [node02] TASK [container_runtime : Ensure CNI configuration directory exists] *********** skipping: [node02] TASK [container_runtime : Add iptables allow rules] **************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove iptables rules] ******************************* TASK [container_runtime : Add firewalld allow rules] *************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove firewalld allow rules] ************************ TASK [container_runtime : Configure the CNI network] *************************** skipping: [node02] TASK [container_runtime : Create /etc/sysconfig/crio-network] ****************** skipping: [node02] TASK [container_runtime : Start the CRI-O service] ***************************** skipping: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* skipping: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** skipping: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** skipping: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** skipping: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Node Preparation Checkpoint Start] *************************************** TASK [Set Node preparation 'In Progress'] ************************************** ok: [node01] PLAY [Only target nodes that have not yet been bootstrapped] ******************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [add_host] **************************************************************** skipping: [localhost] => (item=node02) ok: [localhost] => (item=node01) PLAY [Disable excluders] ******************************************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for node02 TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-docker-excluder version detected] ************ skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-docker-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-excluder version detected] ******************* skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** skipping: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] PLAY [Configure nodes] ********************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_cloud_provider : Set cloud provider facts] ********************* skipping: [node02] TASK [openshift_cloud_provider : Create cloudprovider config dir] ************** skipping: [node02] TASK [openshift_cloud_provider : include the defined cloud provider files] ***** skipping: [node02] TASK [openshift_node : fail] *************************************************** skipping: [node02] TASK [openshift_node : Check for NetworkManager service] *********************** ok: [node02] TASK [openshift_node : Set fact using_network_manager] ************************* ok: [node02] TASK [openshift_node : Install dnsmasq] **************************************** ok: [node02] TASK [openshift_node : ensure origin/node directory exists] ******************** changed: [node02] => (item=/etc/origin) changed: [node02] => (item=/etc/origin/node) TASK [openshift_node : Install NetworkManager during node_bootstrap provisioning] *** skipping: [node02] TASK [openshift_node : Install network manager dispatch script] **************** skipping: [node02] TASK [openshift_node : Install dnsmasq configuration] ************************** ok: [node02] TASK [openshift_node : Deploy additional dnsmasq.conf] ************************* skipping: [node02] TASK [openshift_node : Enable dnsmasq] ***************************************** ok: [node02] TASK [openshift_node : Install network manager dispatch script] **************** ok: [node02] TASK [openshift_node : Add iptables allow rules] ******************************* ok: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) ok: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) ok: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) ok: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) ok: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove iptables rules] ********************************** TASK [openshift_node : Add firewalld allow rules] ****************************** skipping: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) skipping: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) skipping: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) skipping: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) skipping: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove firewalld allow rules] *************************** TASK [openshift_node : Checking for journald.conf] ***************************** ok: [node02] TASK [openshift_node : Create journald persistence directories] **************** ok: [node02] TASK [openshift_node : Update journald setup] ********************************** ok: [node02] => (item={u'var': u'Storage', u'val': u'persistent'}) ok: [node02] => (item={u'var': u'Compress', u'val': True}) ok: [node02] => (item={u'var': u'SyncIntervalSec', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitInterval', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitBurst', u'val': 10000}) ok: [node02] => (item={u'var': u'SystemMaxUse', u'val': u'8G'}) ok: [node02] => (item={u'var': u'SystemKeepFree', u'val': u'20%'}) ok: [node02] => (item={u'var': u'SystemMaxFileSize', u'val': u'10M'}) ok: [node02] => (item={u'var': u'MaxRetentionSec', u'val': u'1month'}) ok: [node02] => (item={u'var': u'MaxFileSec', u'val': u'1day'}) ok: [node02] => (item={u'var': u'ForwardToSyslog', u'val': False}) ok: [node02] => (item={u'var': u'ForwardToWall', u'val': False}) TASK [openshift_node : Restart journald] *************************************** skipping: [node02] TASK [openshift_node : Disable swap] ******************************************* ok: [node02] TASK [openshift_node : Install node, clients, and conntrack packages] ********** ok: [node02] => (item={u'name': u'origin-node-3.10.0*'}) ok: [node02] => (item={u'name': u'origin-clients-3.10.0*'}) ok: [node02] => (item={u'name': u'conntrack-tools'}) TASK [openshift_node : Restart cri-o] ****************************************** skipping: [node02] TASK [openshift_node : restart NetworkManager to ensure resolv.conf is present] *** changed: [node02] TASK [openshift_node : sysctl] ************************************************* ok: [node02] TASK [openshift_node : Check for credentials file for registry auth] *********** skipping: [node02] TASK [openshift_node : Create credentials for registry auth] ******************* skipping: [node02] TASK [openshift_node : Create credentials for registry auth (alternative)] ***** skipping: [node02] TASK [openshift_node : Setup ro mount of /root/.docker for containerized hosts] *** skipping: [node02] TASK [openshift_node : Check that node image is present] *********************** changed: [node02] TASK [openshift_node : Pre-pull node image] ************************************ skipping: [node02] TASK [openshift_node : Copy node script to the node] *************************** ok: [node02] TASK [openshift_node : Install Node service file] ****************************** ok: [node02] TASK [openshift_node : Ensure old system path is set] ************************** skipping: [node02] => (item=/etc/origin/openvswitch) skipping: [node02] => (item=/var/lib/kubelet) skipping: [node02] => (item=/opt/cni/bin) TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Copy node container image to ostree storage] ************ skipping: [node02] TASK [openshift_node : Install or Update node system container] **************** skipping: [node02] TASK [openshift_node : Restart network manager to ensure networking configuration is in place] *** skipping: [node02] TASK [openshift_node : Configure Node settings] ******************************** ok: [node02] => (item={u'regex': u'^OPTIONS=', u'line': u'OPTIONS='}) ok: [node02] => (item={u'regex': u'^DEBUG_LOGLEVEL=', u'line': u'DEBUG_LOGLEVEL=2'}) ok: [node02] => (item={u'regex': u'^IMAGE_VERSION=', u'line': u'IMAGE_VERSION=v3.10.0-rc.0'}) TASK [openshift_node : Configure Proxy Settings] ******************************* skipping: [node02] => (item={u'regex': u'^HTTP_PROXY=', u'line': u'HTTP_PROXY='}) skipping: [node02] => (item={u'regex': u'^HTTPS_PROXY=', u'line': u'HTTPS_PROXY='}) skipping: [node02] => (item={u'regex': u'^NO_PROXY=', u'line': u'NO_PROXY=[],172.30.0.0/16,10.128.0.0/14'}) TASK [openshift_node : file] *************************************************** skipping: [node02] TASK [openshift_node : Create the Node config] ********************************* changed: [node02] TASK [openshift_node : Configure Node Environment Variables] ******************* TASK [openshift_node : Ensure the node static pod directory exists] ************ changed: [node02] TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Install NFS storage plugin dependencies] **************** ok: [node02] TASK [openshift_node : Check for existence of nfs sebooleans] ****************** ok: [node02] => (item=virt_use_nfs) ok: [node02] => (item=virt_sandbox_use_nfs) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 10:33:32.833336', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.013717', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 10:33:32.819619', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 10:33:34.350746', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.011497', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 10:33:34.339249', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 10:33:32.833336', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.013717', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 10:33:32.819619', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 10:33:34.350746', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.011497', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 10:33:34.339249', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install GlusterFS storage plugin dependencies] ********** ok: [node02] TASK [openshift_node : Check for existence of fusefs sebooleans] *************** ok: [node02] => (item=virt_use_fusefs) ok: [node02] => (item=virt_sandbox_use_fusefs) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 10:33:42.096815', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.013014', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 10:33:42.083801', '_ansible_ignore_errors': None, 'failed': False}) ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 10:33:43.644122', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.016644', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 10:33:43.627478', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 10:33:42.096815', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.013014', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 10:33:42.083801', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-26 10:33:43.644122', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.016644', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-26 10:33:43.627478', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install Ceph storage plugin dependencies] *************** ok: [node02] TASK [openshift_node : Install iSCSI storage plugin dependencies] ************** ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=device-mapper-multipath) TASK [openshift_node : restart services] *************************************** ok: [node02] => (item=multipathd) ok: [node02] => (item=rpcbind) ok: [node02] => (item=iscsid) TASK [openshift_node : Template multipath configuration] *********************** changed: [node02] TASK [openshift_node : Enable and start multipath] ***************************** changed: [node02] TASK [tuned : Check for tuned package] ***************************************** ok: [node02] TASK [tuned : Set tuned OpenShift variables] *********************************** ok: [node02] TASK [tuned : Ensure directory structure exists] ******************************* ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Ensure files are populated from templates] *********************** skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Make tuned use the recommended tuned profile on restart] ********* changed: [node02] => (item=/etc/tuned/active_profile) changed: [node02] => (item=/etc/tuned/profile_mode) TASK [tuned : Restart tuned service] ******************************************* changed: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] ******* ok: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] *** PLAY [node bootstrap config] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_node : install needed rpm(s)] ********************************** ok: [node02] => (item=origin-node) ok: [node02] => (item=origin-docker-excluder) ok: [node02] => (item=ansible) ok: [node02] => (item=bash-completion) ok: [node02] => (item=docker) ok: [node02] => (item=haproxy) ok: [node02] => (item=dnsmasq) ok: [node02] => (item=ntp) ok: [node02] => (item=logrotate) ok: [node02] => (item=httpd-tools) ok: [node02] => (item=bind-utils) ok: [node02] => (item=firewalld) ok: [node02] => (item=libselinux-python) ok: [node02] => (item=conntrack-tools) ok: [node02] => (item=openssl) ok: [node02] => (item=iproute) ok: [node02] => (item=python-dbus) ok: [node02] => (item=PyYAML) ok: [node02] => (item=yum-utils) ok: [node02] => (item=glusterfs-fuse) ok: [node02] => (item=device-mapper-multipath) ok: [node02] => (item=nfs-utils) ok: [node02] => (item=cockpit-ws) ok: [node02] => (item=cockpit-system) ok: [node02] => (item=cockpit-bridge) ok: [node02] => (item=cockpit-docker) ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=ceph-common) TASK [openshift_node : create the directory for node] ************************** skipping: [node02] TASK [openshift_node : laydown systemd override] ******************************* skipping: [node02] TASK [openshift_node : update the sysconfig to have necessary variables] ******* ok: [node02] => (item={u'regexp': u'^KUBECONFIG=.*', u'line': u'KUBECONFIG=/etc/origin/node/bootstrap.kubeconfig'}) TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : disable origin-node service] **************************** changed: [node02] => (item=origin-node.service) TASK [openshift_node : Check for RPM generated config marker file .config_managed] *** ok: [node02] TASK [openshift_node : create directories for bootstrapping] ******************* ok: [node02] => (item=/root/openshift_bootstrap) changed: [node02] => (item=/var/lib/origin/openshift.local.config) changed: [node02] => (item=/var/lib/origin/openshift.local.config/node) ok: [node02] => (item=/etc/docker/certs.d/docker-registry.default.svc:5000) TASK [openshift_node : laydown the bootstrap.yml file for on boot configuration] *** ok: [node02] TASK [openshift_node : Create a symlink to the node client CA for the docker registry] *** ok: [node02] TASK [openshift_node : Remove RPM generated config files if present] *********** skipping: [node02] => (item=master) skipping: [node02] => (item=.config_managed) TASK [openshift_node : find all files in /etc/origin/node so we can remove them] *** skipping: [node02] TASK [openshift_node : Remove everything except the resolv.conf required for node] *** skipping: [node02] TASK [openshift_node_group : create node config template] ********************** changed: [node02] TASK [openshift_node_group : remove existing node config] ********************** changed: [node02] TASK [openshift_node_group : Ensure required directories are present] ********** ok: [node02] => (item=/etc/origin/node/pods) changed: [node02] => (item=/etc/origin/node/certificates) TASK [openshift_node_group : Update the sysconfig to group "node-config-compute"] *** changed: [node02] TASK [set_fact] **************************************************************** ok: [node02] PLAY [Re-enable excluder if it was previously enabled] ************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/enable.yml for node02 TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] PLAY [Node Preparation Checkpoint End] ***************************************** TASK [Set Node preparation 'Complete'] ***************************************** ok: [node01] PLAY [Distribute bootstrap and start nodes] ************************************ TASK [openshift_node : Gather node information] ******************************** changed: [node02] ok: [node01] TASK [openshift_node : Copy master bootstrap config locally] ******************* ok: [node02] TASK [openshift_node : Distribute bootstrap kubeconfig if one does not exist] *** ok: [node01] changed: [node02] TASK [openshift_node : Start and enable node for bootstrapping] **************** changed: [node01] changed: [node02] TASK [openshift_node : Get node logs] ****************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : debug] ************************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : fail] *************************************************** skipping: [node02] skipping: [node01] PLAY [Approve any pending CSR requests from inventory nodes] ******************* TASK [Dump all candidate bootstrap hostnames] ********************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Find all hostnames for bootstrapping] ************************************ ok: [node01] TASK [Dump the bootstrap hostnames] ******************************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Approve bootstrap nodes] ************************************************* changed: [node01] TASK [Get CSRs] **************************************************************** skipping: [node01] TASK [Report approval errors] ************************************************** skipping: [node01] PLAY [Ensure any inventory labels are applied to the nodes] ******************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_manage_node : Wait for master API to become available before proceeding] *** skipping: [node02] TASK [openshift_manage_node : Wait for Node Registration] ********************** ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/config.yml for node02, node01 TASK [openshift_manage_node : Set node schedulability] ************************* ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/set_default_node_role.yml for node02, node01 TASK [openshift_manage_node : Retrieve nodes that are marked with the infra selector or the legacy infra selector] *** ok: [node02 -> node01] TASK [openshift_manage_node : Label infra or legacy infra nodes with the new role label] *** TASK [openshift_manage_node : Retrieve non-infra, non-master nodes that are not yet labeled compute] *** ok: [node02 -> node01] TASK [openshift_manage_node : label non-master non-infra nodes compute] ******** TASK [openshift_manage_node : Label all-in-one master as a compute node] ******* skipping: [node02] PLAY RECAP ********************************************************************* localhost : ok=30 changed=0 unreachable=0 failed=0 node01 : ok=71 changed=3 unreachable=0 failed=0 node02 : ok=155 changed=33 unreachable=0 failed=0 INSTALLER STATUS *************************************************************** Initialization : Complete (0:04:07) Node Preparation : Complete (0:05:11) Sending file modes: C0755 110489328 oc Sending file modes: C0600 5649 admin.kubeconfig Cluster "node01:8443" set. Cluster "node01:8443" set. + set +e + kubectl get nodes --no-headers + cluster/kubectl.sh get nodes --no-headers node01 Ready compute,infra,master 18d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + kubectl_rc=0 + '[' 0 -ne 0 ']' ++ kubectl get nodes --no-headers ++ cluster/kubectl.sh get nodes --no-headers ++ grep NotReady + '[' -n '' ']' + set -e + echo 'Nodes are ready:' Nodes are ready: + kubectl get nodes + cluster/kubectl.sh get nodes NAME STATUS ROLES AGE VERSION node01 Ready compute,infra,master 18d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + make cluster-sync ./cluster/build.sh Building ... sha256:b69a3f94b2043cd36cc41eb5d9446480e0a640962e468ab72c3cc51f2b89386a go version go1.10 linux/amd64 go version go1.10 linux/amd64 make[1]: Entering directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' hack/dockerized "./hack/check.sh && KUBEVIRT_VERSION= ./hack/build-go.sh install " && ./hack/build-copy-artifacts.sh sha256:b69a3f94b2043cd36cc41eb5d9446480e0a640962e468ab72c3cc51f2b89386a go version go1.10 linux/amd64 go version go1.10 linux/amd64 find: '/root/go/src/kubevirt.io/kubevirt/_out/cmd': No such file or directory Compiling tests... compiled tests.test hack/build-docker.sh build Sending build context to Docker daemon 40.37 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 3265a3c6f899 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-controller ---> Using cache ---> 84570f0bf244 Step 4/8 : WORKDIR /home/virt-controller ---> Using cache ---> 4b8efcbf3461 Step 5/8 : USER 1001 ---> Using cache ---> c49257f2ff48 Step 6/8 : COPY virt-controller /usr/bin/virt-controller ---> 288166cf4f5b Removing intermediate container 4a5a465ff290 Step 7/8 : ENTRYPOINT /usr/bin/virt-controller ---> Running in 681aad2685ae ---> cfe2bfacd770 Removing intermediate container 681aad2685ae Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-controller" '' ---> Running in 5d1c7612b7b0 ---> 6d73002545b0 Removing intermediate container 5d1c7612b7b0 Successfully built 6d73002545b0 Sending build context to Docker daemon 42.63 MB Step 1/10 : FROM kubevirt/libvirt:4.2.0 ---> 5f0bfe81a3e0 Step 2/10 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> c1e65e6c8241 Step 3/10 : RUN dnf -y install socat genisoimage util-linux libcgroup-tools ethtool net-tools sudo && dnf -y clean all && test $(id -u qemu) = 107 # make sure that the qemu user really is 107 ---> Using cache ---> 4c20d196c128 Step 4/10 : COPY virt-launcher /usr/bin/virt-launcher ---> cee46e462c53 Removing intermediate container 5480f33f62f1 Step 5/10 : COPY kubevirt-sudo /etc/sudoers.d/kubevirt ---> 3234e31e5714 Removing intermediate container be4f0f237a04 Step 6/10 : RUN setcap CAP_NET_BIND_SERVICE=+eip /usr/bin/qemu-system-x86_64 ---> Running in 183e69b5a184  ---> 76582369be66 Removing intermediate container 183e69b5a184 Step 7/10 : RUN mkdir -p /usr/share/kubevirt/virt-launcher ---> Running in f1a7f8cf92b0  ---> b3aacd907d05 Removing intermediate container f1a7f8cf92b0 Step 8/10 : COPY entrypoint.sh libvirtd.sh sock-connector /usr/share/kubevirt/virt-launcher/ ---> 74d8f296317e Removing intermediate container da48d386e038 Step 9/10 : ENTRYPOINT /usr/share/kubevirt/virt-launcher/entrypoint.sh ---> Running in 983d22293411 ---> 5b8e8a3ff8ff Removing intermediate container 983d22293411 Step 10/10 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-launcher" '' ---> Running in 690958cb20f1 ---> a46fea696260 Removing intermediate container 690958cb20f1 Successfully built a46fea696260 Sending build context to Docker daemon 41.67 MB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 3265a3c6f899 Step 3/5 : COPY virt-handler /usr/bin/virt-handler ---> 5fff2d4b91ea Removing intermediate container e71dba0836aa Step 4/5 : ENTRYPOINT /usr/bin/virt-handler ---> Running in ffc4c793b3a2 ---> 50a6889a3b97 Removing intermediate container ffc4c793b3a2 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-handler" '' ---> Running in e4ee90dc5e9d ---> 64b8fe099576 Removing intermediate container e4ee90dc5e9d Successfully built 64b8fe099576 Sending build context to Docker daemon 38.76 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 3265a3c6f899 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-api ---> Using cache ---> 6f2134b876af Step 4/8 : WORKDIR /home/virt-api ---> Using cache ---> d5ef0239bf68 Step 5/8 : USER 1001 ---> Using cache ---> 233000b2d9b5 Step 6/8 : COPY virt-api /usr/bin/virt-api ---> 8bf2dc4ce2d7 Removing intermediate container a3ae82f99381 Step 7/8 : ENTRYPOINT /usr/bin/virt-api ---> Running in 908dd009ea42 ---> 769408cf7084 Removing intermediate container 908dd009ea42 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-api" '' ---> Running in 0ed2dde55e91 ---> 1b5fa6b89099 Removing intermediate container 0ed2dde55e91 Successfully built 1b5fa6b89099 Sending build context to Docker daemon 4.096 kB Step 1/7 : FROM fedora:28 ---> cc510acfcd70 Step 2/7 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 3265a3c6f899 Step 3/7 : ENV container docker ---> Using cache ---> 3fe7db912524 Step 4/7 : RUN mkdir -p /images/custom /images/alpine && truncate -s 64M /images/custom/disk.img && curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /images/alpine/disk.img ---> Using cache ---> 06d762a67408 Step 5/7 : ADD entrypoint.sh / ---> Using cache ---> 3876d185cf84 Step 6/7 : CMD /entrypoint.sh ---> Using cache ---> 1fb50ce9b78f Step 7/7 : LABEL "disks-images-provider" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in c757ac3153ea ---> 488f20582014 Removing intermediate container c757ac3153ea Successfully built 488f20582014 Sending build context to Docker daemon 2.56 kB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 3265a3c6f899 Step 3/5 : ENV container docker ---> Using cache ---> 3fe7db912524 Step 4/5 : RUN dnf -y install procps-ng nmap-ncat && dnf -y clean all ---> Using cache ---> 6bc4f549313f Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "vm-killer" '' ---> Running in e2f3e9e5b8eb ---> faf362ed7ca9 Removing intermediate container e2f3e9e5b8eb Successfully built faf362ed7ca9 Sending build context to Docker daemon 5.12 kB Step 1/7 : FROM debian:sid ---> 68f33cf86aab Step 2/7 : MAINTAINER "David Vossel" \ ---> Using cache ---> 9ef1c0ce5d24 Step 3/7 : ENV container docker ---> Using cache ---> 9ad55e41ed61 Step 4/7 : RUN apt-get update && apt-get install -y bash curl bzip2 qemu-utils && mkdir -p /disk && rm -rf /var/lib/apt/lists/* ---> Using cache ---> 17a81fda7c2b Step 5/7 : ADD entry-point.sh / ---> Using cache ---> 681d01e165e6 Step 6/7 : CMD /entry-point.sh ---> Using cache ---> a79815fe82d9 Step 7/7 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "registry-disk-v1alpha" '' ---> Running in 277b32465f20 ---> 7c153d2534b2 Removing intermediate container 277b32465f20 Successfully built 7c153d2534b2 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:32872/kubevirt/registry-disk-v1alpha:devel ---> 7c153d2534b2 Step 2/4 : MAINTAINER "David Vossel" \ ---> Running in 7db27b66a67c ---> bbb86fad02e3 Removing intermediate container 7db27b66a67c Step 3/4 : RUN curl https://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img > /disk/cirros.img ---> Running in 887efe2ec414   % Total % Received % Xferd Average Speed Time  Time Time Current  Dload Upload Total Spent  Left Speed 0 0  0 0 0  0   0  0 --:--:-- --:--:-- --:--:--  0 0 12.1M 0 49152 0 0 81647  0 0:02:35 --:--:-- 0:02:35 81512 85 12.1M 85 10.4M 0 0 6865k 0 0:00:01 0:00:01 --:--:-- 6861k 100 12.1M 100 12.1M 0 0 7503k  0 0:00:01 0:00:01 --:--:-- 7498k  ---> 9c3c23585dd7 Removing intermediate container 887efe2ec414 Step 4/4 : LABEL "cirros-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in 48429aaee8fd ---> 2e457ce01529 Removing intermediate container 48429aaee8fd Successfully built 2e457ce01529 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:32872/kubevirt/registry-disk-v1alpha:devel ---> 7c153d2534b2 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Running in 0ef7fe0d1d58 ---> d7bac2e3d9f6 Removing intermediate container 0ef7fe0d1d58 Step 3/4 : RUN curl -g -L https://download.fedoraproject.org/pub/fedora/linux/releases/27/CloudImages/x86_64/images/Fedora-Cloud-Base-27-1.6.x86_64.qcow2 > /disk/fedora.qcow2 ---> Running in c28780264ef2  % Total % Received % Xferd Average Speed Time  Time Time Current  Dload Upload Total Spent Left Speed  0  0 0 0  0  0  0 0 --:--:-- --:--:-- --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0  0 221M 0 349k 0  0 312k  0  0:12:06 0:00:01 0:12:05 312k 0 221M 0 1402k 0 0  665k  0 0:05:40 0:00:02 0:05:38 1066k 1 221M 1 3515k 0 0 1136k  0 0:03:19 0:00:03  0:03:16 1603k 3 221M 3 7214k 0 0 1768k 0 0:02:08 0:00:04 0:02:04 2319k 4 221M 4 11.0M 0 0 2224k 0 0:01:42 0:00:05 0:01:37 2765k 6 221M 6 15.3M 0 0 2573k 0 0:01:28  0:00:06 0:01:22 3081k 8 221M 8 19.8M 0 0 2865k 0 0:01:19 0:00:07 0:01:12 3794k 10 221M 10 23.4M 0 0 2972k 0 0:01:16 0:00:08 0:01:08 4112k 11 221M 11 26.0M 0 0 2936k 0 0:01:17 0:00:09 0:01:08 3888k 13 221M 13 28.8M 0 0 2936k 0 0:01:17 0:00:10 0:01:07 3658k 14 221M 14 31.7M 0 0 2928k 0 0:01:17 0:00:11 0:01:06 3361k 15 221M 15 34.5M 0 0 2927k 0 0:01:17 0:00:12 0:01:05 3017k 16 221M 16 37.4M 0 0 2929k 0 0:01:17  0:00:13  0:01:04 2859k 18 221M 18 40.3M 0 0 2930k 0 0:01:17 0:00:14 0:01:03 2921k 19 221M 19 43.4M 0 0 2950k 0 0:01:16 0:00:15 0:01:01 2979k 21 221M 21 46.7M 0 0 2971k 0 0:01:16 0:00:16 0:01:00 3068k 22 221M 22 49.5M 0 0 2970k 0 0:01:16 0:00:17 0:00:59 3073k 23 221M 23 52.6M 0 0 2982k 0 0:01:16 0:00:18 0:00:58 3120k 25 221M 25 56.0M 0 0 3006k 0  0:01:15 0:00:19 0:00:56 3219k 26 221M 26 59.6M 0 0 3043k 0 0:01:14 0:00:20 0:00:54 3325k 28 221M 28 62.2M 0 0 3020k 0  0:01:15 0:00:21 0:00:54 3176k 28 221M 28 63.7M 0 0 2957k 0 0:01:16 0:00:22 0:00:54 2910k 29 221M 29 65.3M 0 0 2901k   0 0:01:18 0:00:23 0:00:55 2607k 30 221M 30 67.0M 0 0 2852k 0 0:01:19 0:00:24 0:00:55 2263k 31 221M 31 68.7M 0 0 2808k 0 0:01:20 0:00:25 0:00:55 1862k 31 221M 31 70.4M 0 0 2763k 0 0:01:22 0:00:26 0:00:56 1679k 32 221M 32 72.1M 0 0 2727k  0 0:01:23 0:00:27 0:00:56 1710k 33 221M 33 74.0M 0 0 2699k 0 0:01:24 0:00:28 0:00:56 1769k 34 221M 34 76.2M 0 0 2685k  0 0:01:24 0:00:29 0:00:55 1881k 35 221M 35 78.6M 0 0 2679k 0 0:01:24 0:00:30 0:00:54 2030k 36 221M 36 80.5M 0 0 2652k 0 0:01:25 0:00:31 0:00:54 2074k 36 221M 36 81.9M 0 0 2614k 0 0:01:26 0:00:32 0:00:54 2007k 37 221M 37 83.3M 0 0 2580k 0 0:01:27 0:00:33 0:00:54 1912k 38 221M 38 84.8M 0 0 2549k 0 0:01:28 0:00:34 0:00:54 1758k 38 221M 38 86.3M 0 0 2519k 0 0:01:30 0:00:35 0:00:55 1562k 39 221M 39 87.7M 0 0 2491k 0 0:01:31 0:00:36 0:00:55 1482k 40 221M 40 89.3M 0 0 2466k 0 0:01:31 0:00:37 0:00:54 1515k 41 221M 41 91.0M 0 0 2448k 0 0:01:32 0:00:38 0:00:54 1574k 42 221M 42 93.1M 0 0 2439k 0 0:01:33 0:00:39 0:00:54 1689k 43 221M 43 95.6M 0 0 2443k 0 0:01:32  0:00:40 0:00:52 1911k 44 221M 44 99.0M 0 0 2468k 0  0:01:31 0:00:41 0:00:50 2304k 46 221M 46 103M 0 0 2510k 0 0:01:30 0:00:42 0:00:48 2839k 47 221M 47 106M 0 0 2523k 0 0:01:29 0:00:43 0:00:46 3092k 49 221M 49 109M 0 0 2533k 0 0:01:29 0:00:44 0:00:45 3271k 50 221M 50 112M 0 0 2546k 0 0:01:29 0:00:45 0:00:44 3369k 51  221M 51 115M 0 0 2559k  0 0:01:28 0:00:46 0:00:42 3306k 53 221M 53 118M 0 0 2575k 0 0:01:28 0:00:47 0:00:41 3123k 54 221M 54 121M 0 0 2587k 0 0:01:27 0:00:48 0:00:39 3143k 56 221M 56 124M 0 0 2600k 0 0:01:27 0:00:49 0:00:38 3189k 57 221M 57 128M 0 0 2617k 0 0:01:26 0:00:50 0:00:36 3258k 59 221M 59 131M 0 0 2637k 0 0:01:26 0:00:51 0:00:35 3355k 61 221M 61 135M 0 0 2670k 0 0:01:24 0:00:52 0:00:32 3567k 63 221M 63 140M 0 0 2712k 0 0:01:23 0:00:53 0:00:30 3915k 65 221M 65 145M 0 0 2745k 0 0:01:22 0:00:54 0:00:28 4171k 67 221M 67 149M 0 0 2781k 0  0:01:21 0:00:55 0:00:26 4428k 69 221M 69 154M 0 0 2825k 0 0:01:20 0:00:56 0:00:24 4743k 72 221M 72 160M 0 0 2873k 0 0:01:18 0:00:57 0:00:21 4982k 74 221M 74 165M 0 0 2922k 0 0:01:17 0:00:58 0:00:19 5143k 77 221M 77 171M 0 0 2969k 0 0:01:16 0:00:59 0:00:17 5409k 79 221M 79 176M 0 0 3012k 0 0:01:15 0:01:00 0:00:15 5547k 82 221M 82 182M 0 0 3055k 0 0:01:14 0:01:01 0:00:13 5634k 84 221M 84 186M 0 0 3078k 0 0:01:13 0:01:02 0:00:11 5419k 86 221M 86 191M 0 0 3110k 0 0:01:12 0:01:03 0:00:09 5294k 88 221M 88 196M 0 0 3144k 0 0:01:12 0:01:04 0:00:08 5213k 91 221M 91 202M 0 0 3180k 0 0:01:11  0:01:05 0:00:06 5202k 93 221M 93 207M 0 0 3217k 0 0:01:10 0:01:06 0:00:04 5201k 96 221M 96 213M 0 0 3254k 0 0:01:09 0:01:07 0:00:02 5436k 98 221M 98 218M 0 0 3292k 0 0:01:08 0:01:08 --:--:-- 5593k 100 221M 100 221M 0 0 3307k 0 0:01:08 0:01:08 --:--:-- 5629k  ---> fc220aede27b Removing intermediate container c28780264ef2 Step 4/4 : LABEL "fedora-cloud-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in d43a61165ad7 ---> 74bacaba3450 Removing intermediate container d43a61165ad7 Successfully built 74bacaba3450 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:32872/kubevirt/registry-disk-v1alpha:devel ---> 7c153d2534b2 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> d7bac2e3d9f6 Step 3/4 : RUN curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /disk/alpine.iso ---> Running in c857823a53d7  % Total % Received % Xferd Average Speed  Time Time Time Current    Dload Upload Total Spent Left Speed 0 0  0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 1 37.0M 1 551k 0 0 1491k 0 0:00:25 --:--:-- 0:00:25 1487k 26 37.0M 26 9856k 0 0 7194k 0 0:00:05 0:00:01  0:00:04 7189k 79 37.0M 79 29.2M 0 0 12.3M 0 0:00:02 0:00:02 --:--:-- 12.3M 100 37.0M 100 37.0M 0 0 12.4M 0 0:00:02 0:00:02 --:--:-- 12.4M  ---> 4076d21f272e Removing intermediate container c857823a53d7 Step 4/4 : LABEL "alpine-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in e5bf8243470b ---> 23194af67d16 Removing intermediate container e5bf8243470b Successfully built 23194af67d16 Sending build context to Docker daemon 35.57 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 3265a3c6f899 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virtctl ---> Using cache ---> deebe9dc06da Step 4/8 : WORKDIR /home/virtctl ---> Using cache ---> 4094ce77e412 Step 5/8 : USER 1001 ---> Using cache ---> ba694520e9a4 Step 6/8 : COPY subresource-access-test /subresource-access-test ---> 5ec0fa5ee9a6 Removing intermediate container 023db3f59889 Step 7/8 : ENTRYPOINT /subresource-access-test ---> Running in 65fa30ff40a5 ---> 4866bf7ff8b5 Removing intermediate container 65fa30ff40a5 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "subresource-access-test" '' ---> Running in a9e6ee2debd1 ---> b27796ab1490 Removing intermediate container a9e6ee2debd1 Successfully built b27796ab1490 Sending build context to Docker daemon 3.072 kB Step 1/9 : FROM fedora:28 ---> cc510acfcd70 Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 3265a3c6f899 Step 3/9 : ENV container docker ---> Using cache ---> 3fe7db912524 Step 4/9 : RUN dnf -y install make git gcc && dnf -y clean all ---> Using cache ---> e0cf52293e57 Step 5/9 : ENV GIMME_GO_VERSION 1.9.2 ---> Using cache ---> 8c031086e8cb Step 6/9 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 0f6dd31de4d3 Step 7/9 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 6a702eb79a95 Step 8/9 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/masterzen/winrm-cli ---> Using cache ---> bed79012c9f3 Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "winrmcli" '' ---> Running in 53863280b258 ---> 11fc488e0cbd Removing intermediate container 53863280b258 Successfully built 11fc488e0cbd Sending build context to Docker daemon 36.77 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> cc296a71da13 Step 3/5 : COPY example-hook-sidecar /example-hook-sidecar ---> 67d7204ad059 Removing intermediate container 304a00a4a11e Step 4/5 : ENTRYPOINT /example-hook-sidecar ---> Running in 782a552c3c71 ---> 802c4afaeaf9 Removing intermediate container 782a552c3c71 Step 5/5 : LABEL "example-hook-sidecar" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in bde03127fa94 ---> b76c405d97a6 Removing intermediate container bde03127fa94 Successfully built b76c405d97a6 hack/build-docker.sh push The push refers to a repository [localhost:32872/kubevirt/virt-controller] 63445203d1a3: Preparing 915a0c3e3f5f: Preparing 891e1e4ef82a: Preparing 915a0c3e3f5f: Pushed 63445203d1a3: Pushed 891e1e4ef82a: Pushed devel: digest: sha256:33714641851b43c667881e40609ab279a0efa9517fd259c0800c0aa3d021d9b6 size: 949 The push refers to a repository [localhost:32872/kubevirt/virt-launcher] 2f1e49c201f5: Preparing b57ce2b6f058: Preparing a822b56c3f65: Preparing a0e093bb887a: Preparing d666016a80b2: Preparing 5379fb5d8cce: Preparing da38cf808aa5: Preparing b83399358a92: Preparing 186d8b3e4fd8: Preparing fa6154170bf5: Preparing da38cf808aa5: Waiting b83399358a92: Waiting fa6154170bf5: Waiting 5eefb9960a36: Preparing 891e1e4ef82a: Preparing 186d8b3e4fd8: Waiting 891e1e4ef82a: Waiting 5eefb9960a36: Waiting b57ce2b6f058: Pushed 2f1e49c201f5: Pushed a0e093bb887a: Pushed b83399358a92: Pushed da38cf808aa5: Pushed 186d8b3e4fd8: Pushed fa6154170bf5: Pushed 891e1e4ef82a: Mounted from kubevirt/virt-controller a822b56c3f65: Pushed 5379fb5d8cce: Pushed d666016a80b2: Pushed 5eefb9960a36: Pushed devel: digest: sha256:fcb64b991de35dddc6d299c05aa78dff94c6219df16bfca8fbbfd78765384701 size: 2828 The push refers to a repository [localhost:32872/kubevirt/virt-handler] 8693b0a09274: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-launcher 8693b0a09274: Pushed devel: digest: sha256:bfc2d33ed5450cfd9ee7716eba400dcc4cec5bbfad8c7df7cd5621899105551b size: 741 The push refers to a repository [localhost:32872/kubevirt/virt-api] 69698f10e729: Preparing 7cc07c574d2a: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-handler 7cc07c574d2a: Pushed 69698f10e729: Pushed devel: digest: sha256:3e3e1f318915ae7152a499aff64c7197fdebf58db6e3b85d5843ad16f21de3d4 size: 948 The push refers to a repository [localhost:32872/kubevirt/disks-images-provider] 1548fa7b1c9e: Preparing a7621d2cf364: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-api 1548fa7b1c9e: Pushed a7621d2cf364: Pushed devel: digest: sha256:2132037d2c676ee79d60597d508fe40bf501a0360c754d52a2e891fdaa360210 size: 948 The push refers to a repository [localhost:32872/kubevirt/vm-killer] 3c31f9f8d755: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/disks-images-provider 3c31f9f8d755: Pushed devel: digest: sha256:824d73b4926e21d0a80792eddb2df37838fa4fe564436b2ded3ea7c0a268bdf1 size: 740 The push refers to a repository [localhost:32872/kubevirt/registry-disk-v1alpha] c66b9a220e25: Preparing 4662bbc21c2d: Preparing 25edbec0eaea: Preparing c66b9a220e25: Pushed 4662bbc21c2d: Pushed 25edbec0eaea: Pushed devel: digest: sha256:896d1ae1238971977fc247625d2383ed418db62b20ec90c30c0a94c6aad3c162 size: 948 The push refers to a repository [localhost:32872/kubevirt/cirros-registry-disk-demo] 3d2d302ea8c7: Preparing c66b9a220e25: Preparing 4662bbc21c2d: Preparing 25edbec0eaea: Preparing 4662bbc21c2d: Mounted from kubevirt/registry-disk-v1alpha c66b9a220e25: Mounted from kubevirt/registry-disk-v1alpha 25edbec0eaea: Mounted from kubevirt/registry-disk-v1alpha 3d2d302ea8c7: Pushed devel: digest: sha256:c48808929dd451c00cd3ef1935542e305051116208fd8bc26d171c292ce68812 size: 1160 The push refers to a repository [localhost:32872/kubevirt/fedora-cloud-registry-disk-demo] ebd270596a8b: Preparing c66b9a220e25: Preparing 4662bbc21c2d: Preparing 25edbec0eaea: Preparing 25edbec0eaea: Mounted from kubevirt/cirros-registry-disk-demo 4662bbc21c2d: Mounted from kubevirt/cirros-registry-disk-demo c66b9a220e25: Mounted from kubevirt/cirros-registry-disk-demo ebd270596a8b: Pushed devel: digest: sha256:442cbf9cd0e4b7aad071d8efef8bf07c3c84a9176d1414451c1613502f318049 size: 1161 The push refers to a repository [localhost:32872/kubevirt/alpine-registry-disk-demo] f850b7a047fd: Preparing c66b9a220e25: Preparing 4662bbc21c2d: Preparing 25edbec0eaea: Preparing 4662bbc21c2d: Mounted from kubevirt/fedora-cloud-registry-disk-demo c66b9a220e25: Mounted from kubevirt/fedora-cloud-registry-disk-demo 25edbec0eaea: Mounted from kubevirt/fedora-cloud-registry-disk-demo f850b7a047fd: Pushed devel: digest: sha256:c92d448a03101bdd1038276ccf07730554d8a4e90203dc0f613d4f381e549bc2 size: 1160 The push refers to a repository [localhost:32872/kubevirt/subresource-access-test] b0e247064607: Preparing 7e69243e781e: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/vm-killer 7e69243e781e: Pushed b0e247064607: Pushed devel: digest: sha256:e2a7eeffab8970c49128e5b994a425465763c6bdc71618e0e80cc398e4bfddde size: 948 The push refers to a repository [localhost:32872/kubevirt/winrmcli] a117c61a5658: Preparing c9df4405017d: Preparing 99bb32247f65: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/subresource-access-test a117c61a5658: Pushed 99bb32247f65: Pushed c9df4405017d: Pushed devel: digest: sha256:2006386c52af221c1819c83d388ef7aef1b7ed29fe124fbf702c8c8af4c5f56f size: 1165 The push refers to a repository [localhost:32872/kubevirt/example-hook-sidecar] 157095476b8f: Preparing 39bae602f753: Preparing 157095476b8f: Pushed 39bae602f753: Pushed devel: digest: sha256:57b53dbbb3c9f007bf57d1af2dc22547ebc7fc8d2fc97ca61d6f702418c57ec9 size: 740 make[1]: Leaving directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' Done ./cluster/clean.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release0 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release0 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-130-gc242bcb ++ KUBEVIRT_VERSION=v0.7.0-130-gc242bcb + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:32872/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Cleaning up ...' Cleaning up ... + cluster/kubectl.sh get vmis --all-namespaces -o=custom-columns=NAME:.metadata.name,NAMESPACE:.metadata.namespace,FINALIZERS:.metadata.finalizers --no-headers + grep foregroundDeleteVirtualMachine + read p error: the server doesn't have a resource type "vmis" + _kubectl delete ds -l kubevirt.io -n kube-system --cascade=false --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=libvirt --force --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=virt-handler --force --grace-period 0 No resources found + namespaces=(default ${namespace}) + for i in '${namespaces[@]}' + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete deployment -l kubevirt.io No resources found + _kubectl -n default delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rs -l kubevirt.io No resources found + _kubectl -n default delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete services -l kubevirt.io No resources found + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n default delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete secrets -l kubevirt.io No resources found + _kubectl -n default delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pv -l kubevirt.io No resources found + _kubectl -n default delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pvc -l kubevirt.io No resources found + _kubectl -n default delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete ds -l kubevirt.io No resources found + _kubectl -n default delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n default delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pods -l kubevirt.io No resources found + _kubectl -n default delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n default delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rolebinding -l kubevirt.io No resources found + _kubectl -n default delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete roles -l kubevirt.io No resources found + _kubectl -n default delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterroles -l kubevirt.io No resources found + _kubectl -n default delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n default get crd offlinevirtualmachines.kubevirt.io ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ wc -l ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n default get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + for i in '${namespaces[@]}' + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete deployment -l kubevirt.io No resources found + _kubectl -n kube-system delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rs -l kubevirt.io No resources found + _kubectl -n kube-system delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete services -l kubevirt.io No resources found + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n kube-system delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete secrets -l kubevirt.io No resources found + _kubectl -n kube-system delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pv -l kubevirt.io No resources found + _kubectl -n kube-system delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pvc -l kubevirt.io No resources found + _kubectl -n kube-system delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete ds -l kubevirt.io No resources found + _kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n kube-system delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pods -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete roles -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterroles -l kubevirt.io No resources found + _kubectl -n kube-system delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ wc -l ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + sleep 2 + echo Done Done ./cluster/deploy.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release0 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release0 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-130-gc242bcb ++ KUBEVIRT_VERSION=v0.7.0-130-gc242bcb + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:32872/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Deploying ...' Deploying ... + [[ -z openshift-3.10-release ]] + [[ openshift-3.10-release =~ .*-dev ]] + [[ openshift-3.10-release =~ .*-release ]] + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/demo-content.yaml =~ .*demo.* ]] + continue + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml =~ .*demo.* ]] + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml clusterrole.rbac.authorization.k8s.io "kubevirt.io:admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:edit" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:view" created serviceaccount "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver-auth-delegator" created rolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created role.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-controller" created serviceaccount "kubevirt-controller" created serviceaccount "kubevirt-privileged" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller-cluster-admin" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-privileged-cluster-admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:default" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt.io:default" created service "virt-api" created deployment.extensions "virt-api" created deployment.extensions "virt-controller" created daemonset.extensions "virt-handler" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstances.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancereplicasets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancepresets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachines.kubevirt.io" created + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R persistentvolumeclaim "disk-alpine" created persistentvolume "host-path-disk-alpine" created persistentvolumeclaim "disk-custom" created persistentvolume "host-path-disk-custom" created daemonset.extensions "disks-images-provider" created serviceaccount "kubevirt-testing" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-testing-cluster-admin" created + [[ os-3.10.0 =~ os-* ]] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-controller"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-testing"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-privileged"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-apiserver"] + _kubectl adm policy add-scc-to-user privileged admin + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged admin scc "privileged" added to: ["admin"] + echo Done Done + namespaces=(kube-system default) + [[ kube-system != \k\u\b\e\-\s\y\s\t\e\m ]] + timeout=300 + sample=30 + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n 'disks-images-provider-8ftmp 0/1 ContainerCreating 0 5s disks-images-provider-gmnhx 0/1 ContainerCreating 0 5s virt-api-7d79764579-56ccl 0/1 ContainerCreating 0 7s virt-api-7d79764579-jq8hh 0/1 ContainerCreating 0 7s virt-controller-7d57d96b65-48ghm 0/1 ContainerCreating 0 7s virt-controller-7d57d96b65-rpf7m 0/1 ContainerCreating 0 7s virt-handler-n2w27 0/1 ContainerCreating 0 7s virt-handler-qxrvv 0/1 ContainerCreating 0 7s' ']' + echo 'Waiting for kubevirt pods to enter the Running state ...' Waiting for kubevirt pods to enter the Running state ... + kubectl get pods -n kube-system --no-headers + cluster/kubectl.sh get pods -n kube-system --no-headers + grep -v Running disks-images-provider-8ftmp 0/1 ContainerCreating 0 6s disks-images-provider-gmnhx 0/1 ContainerCreating 0 6s virt-api-7d79764579-56ccl 0/1 ContainerCreating 0 8s virt-api-7d79764579-jq8hh 0/1 ContainerCreating 0 8s virt-controller-7d57d96b65-48ghm 0/1 ContainerCreating 0 8s virt-handler-n2w27 0/1 ContainerCreating 0 8s virt-handler-qxrvv 0/1 ContainerCreating 0 8s + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n false ']' + echo 'Waiting for KubeVirt containers to become ready ...' Waiting for KubeVirt containers to become ready ... + kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + grep false + cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers false + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n kube-system + cluster/kubectl.sh get pods -n kube-system NAME READY STATUS RESTARTS AGE disks-images-provider-8ftmp 1/1 Running 0 1m disks-images-provider-gmnhx 1/1 Running 0 1m master-api-node01 1/1 Running 1 18d master-controllers-node01 1/1 Running 1 18d master-etcd-node01 1/1 Running 1 18d virt-api-7d79764579-56ccl 1/1 Running 1 1m virt-api-7d79764579-jq8hh 1/1 Running 0 1m virt-controller-7d57d96b65-48ghm 1/1 Running 0 1m virt-controller-7d57d96b65-rpf7m 1/1 Running 0 1m virt-handler-n2w27 1/1 Running 0 1m virt-handler-qxrvv 1/1 Running 0 1m + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n default --no-headers ++ cluster/kubectl.sh get pods -n default --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n default + cluster/kubectl.sh get pods -n default NAME READY STATUS RESTARTS AGE docker-registry-1-rl562 1/1 Running 1 18d registry-console-1-rw9zf 1/1 Running 1 18d router-1-6cch9 1/1 Running 1 18d + kubectl version + cluster/kubectl.sh version oc v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 features: Basic-Auth GSSAPI Kerberos SPNEGO Server https://127.0.0.1:32869 openshift v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 + ginko_params='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + [[ openshift-3.10-release =~ windows.* ]] + FUNC_TEST_ARGS='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + make functest hack/dockerized "hack/build-func-tests.sh" sha256:b69a3f94b2043cd36cc41eb5d9446480e0a640962e468ab72c3cc51f2b89386a go version go1.10 linux/amd64 go version go1.10 linux/amd64 Compiling tests... compiled tests.test hack/functests.sh Running Suite: Tests Suite ========================== Random Seed: 1532602464 Will run 150 of 150 specs • [SLOW TEST:40.939 seconds] LeaderElection /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:43 Start a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:53 when the controller pod is not running /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:54 should success /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:55 ------------------------------ •• ------------------------------ • [SLOW TEST:18.414 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should update VirtualMachine once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:195 ------------------------------ •• ------------------------------ • [SLOW TEST:57.760 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if it gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:245 ------------------------------ • [SLOW TEST:45.268 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if the VirtualMachineInstance's pod gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:265 ------------------------------ • [SLOW TEST:35.070 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should stop VirtualMachineInstance if running set to false /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:325 ------------------------------ • [SLOW TEST:229.387 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should start and stop VirtualMachineInstance multiple times /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:333 ------------------------------ • [SLOW TEST:50.125 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should not update the VirtualMachineInstance spec if Running /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:346 ------------------------------ • [SLOW TEST:180.115 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should survive guest shutdown, multiple times /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:387 ------------------------------ VM testvmig8flt was scheduled to start • [SLOW TEST:18.818 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should start a VirtualMachineInstance once /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:436 ------------------------------ VM testvmijkd5w was scheduled to stop • [SLOW TEST:61.708 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should stop a VirtualMachineInstance once /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:467 ------------------------------ • [SLOW TEST:44.824 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 should have cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:82 ------------------------------ • [SLOW TEST:109.943 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 with injected ssh-key /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:92 should have ssh-key under authorized keys /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:93 ------------------------------ • [SLOW TEST:54.098 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userData source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:118 should process provided cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:119 ------------------------------ • [SLOW TEST:42.227 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 should take user-data from k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:162 ------------------------------ • [SLOW TEST:19.397 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should successfully start with hook sidecar annotation /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:60 ------------------------------ • [SLOW TEST:20.863 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should call Collect and OnDefineDomain on the hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:67 ------------------------------ • [SLOW TEST:22.311 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should update domain XML with SM BIOS properties /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:83 ------------------------------ • [SLOW TEST:45.648 seconds] Health Monitoring /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:37 A VirtualMachineInstance with a watchdog device /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:56 should be shut down when the watchdog expires /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:57 ------------------------------ • [SLOW TEST:95.879 seconds] Slirp /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:39 should be able to /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 VirtualMachineInstance with slirp interface /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ •• ------------------------------ • [SLOW TEST:10.841 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should scale /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 to five, to six and then to zero replicas /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ •• ------------------------------ • [SLOW TEST:18.894 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should update readyReplicas once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:157 ------------------------------ •• ------------------------------ • [SLOW TEST:5.705 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should not scale when paused and scale when resume /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:223 ------------------------------ • [SLOW TEST:11.959 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should remove the finished VM /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:279 ------------------------------ ••••••••••• ------------------------------ • [SLOW TEST:6.027 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:48 with correct permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:51 should be allowed to access subresource endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:52 ------------------------------ • [SLOW TEST:5.171 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:48 Without permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:56 should not be able to access subresource endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:57 ------------------------------ • ------------------------------ • [SLOW TEST:5.210 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization For Version Command /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:63 Without permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:71 should be able to access subresource version endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:72 ------------------------------ • [SLOW TEST:21.617 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:20.550 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given an vm /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:20.955 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi preset /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:21.170 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi replica set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ ••• ------------------------------ • [SLOW TEST:6.682 seconds] Templates /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:42 Launching VMI from VM Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:60 with given Fedora Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:193 with given VM JSON from the Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:152 with given VM from the VM JSON /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:158 with given VMI from the VM /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:163 should succeed to terminate the VMI using oc-patch command /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:166 ------------------------------ • ------------------------------ • [SLOW TEST:19.156 seconds] VNC /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:54 with VNC connection /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:62 should allow accessing the VNC device /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:64 ------------------------------ •• ------------------------------ • [SLOW TEST:32.914 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:17:18 http: TLS handshake error from 10.129.0.1:60682: EOF 2018/07/26 11:17:28 http: TLS handshake error from 10.129.0.1:60694: EOF 2018/07/26 11:17:38 http: TLS handshake error from 10.129.0.1:60706: EOF 2018/07/26 11:17:48 http: TLS handshake error from 10.129.0.1:60718: EOF level=info timestamp=2018-07-26T11:17:55.968875Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" level=error timestamp=2018-07-26T11:17:56.551419Z pos=subresource.go:85 component=virt-api msg="connection failed: command terminated with exit code 1" 2018/07/26 11:17:56 http: response.WriteHeader on hijacked connection level=error timestamp=2018-07-26T11:17:56.555393Z pos=subresource.go:97 component=virt-api reason="read tcp 10.129.0.2:8443->10.128.0.1:51814: use of closed network connection" msg="error ecountered reading from websocket stream" level=info timestamp=2018-07-26T11:17:56.555611Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4dx56/console proto=HTTP/1.1 statusCode=500 contentLength=0 2018/07/26 11:17:58 http: TLS handshake error from 10.129.0.1:60736: EOF level=info timestamp=2018-07-26T11:18:05.677520Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:18:08 http: TLS handshake error from 10.129.0.1:60748: EOF 2018/07/26 11:18:18 http: TLS handshake error from 10.129.0.1:60762: EOF 2018/07/26 11:18:28 http: TLS handshake error from 10.129.0.1:60774: EOF 2018/07/26 11:18:38 http: TLS handshake error from 10.129.0.1:60786: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running level=info timestamp=2018-07-26T11:18:14.469630Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" 2018/07/26 11:18:15 http: TLS handshake error from 10.129.0.1:56556: EOF level=info timestamp=2018-07-26T11:18:16.418979Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:18:18.154806Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:18:18.588091Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:18:23.225411Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:18:25 http: TLS handshake error from 10.129.0.1:56568: EOF level=error timestamp=2018-07-26T11:18:29.575529Z pos=subresource.go:85 component=virt-api msg= 2018/07/26 11:18:29 http: response.WriteHeader on hijacked connection level=error timestamp=2018-07-26T11:18:29.576113Z pos=subresource.go:97 component=virt-api reason="read tcp 10.129.0.3:8443->10.128.0.1:51908: use of closed network connection" msg="error ecountered reading from websocket stream" level=info timestamp=2018-07-26T11:18:29.576407Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmidwprg/console proto=HTTP/1.1 statusCode=200 contentLength=0 level=info timestamp=2018-07-26T11:18:33.453632Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:18:35 http: TLS handshake error from 10.129.0.1:56580: EOF level=info timestamp=2018-07-26T11:18:36.042762Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T11:18:43.681133Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:17:33.841059Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-26T11:17:33.863430Z pos=vm.go:135 component=virt-controller service=http namespace=default name=testvm kind= uid=7c3f029b-90c5-11e8-90ce-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T11:17:33.863618Z pos=vm.go:186 component=virt-controller service=http namespace=default name=testvm kind= uid=7c3f029b-90c5-11e8-90ce-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-26T11:17:33.863735Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-26T11:17:36.364187Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv4rm8 kind= uid=7f942118-90c5-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:17:36.364662Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv4rm8 kind= uid=7f942118-90c5-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:17:37.008616Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4dx56 kind= uid=7ff9e193-90c5-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:17:37.008982Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4dx56 kind= uid=7ff9e193-90c5-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:17:37.215500Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi4dx56\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi4dx56" level=info timestamp=2018-07-26T11:17:56.389754Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidwprg kind= uid=8b871081-90c5-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:17:56.390278Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidwprg kind= uid=8b871081-90c5-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:17:56.630472Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidwprg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidwprg" level=info timestamp=2018-07-26T11:17:56.853952Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidwprg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidwprg" level=info timestamp=2018-07-26T11:18:29.270618Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizwhs4 kind= uid=9f200740-90c5-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:18:29.271711Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizwhs4 kind= uid=9f200740-90c5-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T10:54:30.531434Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:18:29.545529Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmidwprg kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:18:29.545649Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidwprg, existing: false\n" level=info timestamp=2018-07-26T11:18:29.545730Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:18:29.584789Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmidwprg kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:18:29.585574Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidwprg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:18:44.228348Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmizwhs4, existing: true\n" level=info timestamp=2018-07-26T11:18:44.229108Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T11:18:44.229195Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:18:44.229528Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmizwhs4 kind= uid=9f200740-90c5-11e8-90ce-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T11:18:44.261183Z pos=vm.go:424 component=virt-handler namespace=kubevirt-test-default name=testvmizwhs4 kind= uid=9f200740-90c5-11e8-90ce-525500d15501 reason="server error. command Launcher.Sync failed: virError(Code=0, Domain=0, Message='Missing error')" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T11:18:44.319147Z pos=vm.go:251 component=virt-handler reason="server error. command Launcher.Sync failed: virError(Code=0, Domain=0, Message='Missing error')" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmizwhs4" level=info timestamp=2018-07-26T11:18:44.321277Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmizwhs4, existing: true\n" level=info timestamp=2018-07-26T11:18:44.321392Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T11:18:44.321464Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:18:44.321748Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmizwhs4 kind= uid=9f200740-90c5-11e8-90ce-525500d15501 msg="Processing vmi update" Pod name: virt-launcher-testvmizwhs4-mpf5g Pod phase: Running level=info timestamp=2018-07-26T11:18:33.317867Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T11:18:33.318376Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T11:18:33.322640Z pos=libvirt.go:256 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-26T11:18:43.331418Z pos=libvirt.go:271 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-26T11:18:43.387557Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmizwhs4" level=info timestamp=2018-07-26T11:18:43.390133Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-26T11:18:43.391057Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" level=error timestamp=2018-07-26T11:18:44.251693Z pos=manager.go:160 component=virt-launcher namespace=kubevirt-test-default name=testvmizwhs4 kind= uid=9f200740-90c5-11e8-90ce-525500d15501 reason="virError(Code=0, Domain=0, Message='Missing error')" msg="Getting the domain failed." level=error timestamp=2018-07-26T11:18:44.252312Z pos=server.go:68 component=virt-launcher namespace=kubevirt-test-default name=testvmizwhs4 kind= uid=9f200740-90c5-11e8-90ce-525500d15501 reason="virError(Code=0, Domain=0, Message='Missing error')" msg="Failed to sync vmi" level=error timestamp=2018-07-26T11:18:44.385213Z pos=common.go:126 component=virt-launcher msg="updated MAC for interface: eth0 - 0a:58:0a:dd:7f:5e" level=info timestamp=2018-07-26T11:18:44.405326Z pos=converter.go:751 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-26T11:18:44.405507Z pos=converter.go:752 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-26T11:18:44.407667Z pos=dhcp.go:62 component=virt-launcher msg="Starting SingleClientDHCPServer" level=info timestamp=2018-07-26T11:18:44.598121Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:18:44.612267Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmizwhs4 kind= uid=9f200740-90c5-11e8-90ce-525500d15501 msg="Domain defined." • Failure [91.473 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-07-26T11:18:30.208496Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmizwhs4-mpf5g" level=info timestamp=2018-07-26T11:18:44.964429Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmizwhs4-mpf5g" level=error timestamp=2018-07-26T11:18:45.051373Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="server error. command Launcher.Sync failed: virError(Code=0, Domain=0, Message='Missing error')" STEP: Checking that the VirtualMachineInstance console has expected output • [SLOW TEST:156.172 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:121.776 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:45.535 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:113 should create a writeable emptyDisk with the right capacity /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:115 ------------------------------ • [SLOW TEST:44.473 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined and a specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:163 should create a writeable emptyDisk with the specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:165 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:27:30 http: TLS handshake error from 10.129.0.1:54306: EOF level=info timestamp=2018-07-26T11:27:34.609208Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:27:40 http: TLS handshake error from 10.129.0.1:54314: EOF 2018/07/26 11:27:50 http: TLS handshake error from 10.129.0.1:54326: EOF 2018/07/26 11:28:00 http: TLS handshake error from 10.129.0.1:54338: EOF 2018/07/26 11:28:10 http: TLS handshake error from 10.129.0.1:54350: EOF 2018/07/26 11:28:20 http: TLS handshake error from 10.129.0.1:54362: EOF 2018/07/26 11:28:30 http: TLS handshake error from 10.129.0.1:54374: EOF level=info timestamp=2018-07-26T11:28:36.347208Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:28:40 http: TLS handshake error from 10.129.0.1:54386: EOF 2018/07/26 11:28:50 http: TLS handshake error from 10.129.0.1:54398: EOF 2018/07/26 11:29:00 http: TLS handshake error from 10.129.0.1:54410: EOF level=info timestamp=2018-07-26T11:29:06.485502Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:29:10 http: TLS handshake error from 10.129.0.1:54422: EOF 2018/07/26 11:29:20 http: TLS handshake error from 10.129.0.1:54434: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:27:34 http: TLS handshake error from 10.129.0.1:38136: EOF level=info timestamp=2018-07-26T11:27:36.380421Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:27:44 http: TLS handshake error from 10.129.0.1:38148: EOF 2018/07/26 11:27:54 http: TLS handshake error from 10.129.0.1:38160: EOF 2018/07/26 11:28:04 http: TLS handshake error from 10.129.0.1:38172: EOF level=info timestamp=2018-07-26T11:28:06.266556Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:28:14 http: TLS handshake error from 10.129.0.1:38184: EOF 2018/07/26 11:28:24 http: TLS handshake error from 10.129.0.1:38196: EOF 2018/07/26 11:28:34 http: TLS handshake error from 10.129.0.1:38208: EOF 2018/07/26 11:28:44 http: TLS handshake error from 10.129.0.1:38220: EOF 2018/07/26 11:28:54 http: TLS handshake error from 10.129.0.1:38232: EOF 2018/07/26 11:29:04 http: TLS handshake error from 10.129.0.1:38244: EOF 2018/07/26 11:29:14 http: TLS handshake error from 10.129.0.1:38256: EOF 2018/07/26 11:29:24 http: TLS handshake error from 10.129.0.1:38268: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:27:06.003406Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0726 11:27:09.082074 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host level=info timestamp=2018-07-26T11:27:13.510210Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-26T11:27:13.515360Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-26T11:27:13.523876Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-26T11:27:13.527836Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-26T11:27:13.528836Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-26T11:27:13.529602Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-26T11:27:13.530374Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-26T11:27:13.531493Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-26T11:27:13.543398Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-26T11:27:13.544524Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-26T11:27:13.554428Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-26T11:27:13.545852Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:27:15.390026Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-26T11:27:15.391195Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-26T11:27:15.499798Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-26T11:27:15.617285Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-07-26T11:27:15.649810Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-26T11:27:15.838273Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidfjnw, existing: true\n" level=info timestamp=2018-07-26T11:27:15.838390Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:27:15.838426Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:27:15.838533Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmidfjnw kind=VirtualMachineInstance uid=b10646ca-90c6-11e8-90ce-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T11:27:15.992209Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidfjnw kind=VirtualMachineInstance uid=b10646ca-90c6-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:27:15.997382Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmidfjnw, existing: true\n" level=info timestamp=2018-07-26T11:27:15.997495Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-26T11:27:15.997562Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:27:15.997842Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmidfjnw kind= uid=b10646ca-90c6-11e8-90ce-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T11:27:15.998104Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmidfjnw kind= uid=b10646ca-90c6-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmidfjnw-9xqhn Pod phase: Running level=info timestamp=2018-07-26T11:27:10.982521Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T11:27:10.983012Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T11:27:10.985406Z pos=libvirt.go:256 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-26T11:27:21.109047Z pos=libvirt.go:271 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-26T11:27:21.137132Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmidfjnw" level=info timestamp=2018-07-26T11:27:21.142127Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-26T11:27:21.142728Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [198.557 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should be successfully started [It] /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:207 Expected : 180000000000 to be nil /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:214 ------------------------------ STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-07-26T11:26:09.810228Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmidfjnw-9xqhn" level=info timestamp=2018-07-26T11:26:25.094181Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmidfjnw-9xqhn" level=info timestamp=2018-07-26T11:26:26.695844Z pos=utils.go:243 component=tests msg="VirtualMachineInstance defined." level=info timestamp=2018-07-26T11:26:26.831400Z pos=utils.go:243 component=tests msg="VirtualMachineInstance started." STEP: Checking that the VirtualMachineInstance console has expected output level=info timestamp=2018-07-26T11:29:27.039956Z pos=utils.go:1275 component=tests namespace=kubevirt-test-default name=testvmidfjnw kind=VirtualMachineInstance uid= msg="Login: [{2 \r\n\r\n\r\nISOLINUX 6.04 6.04-pre1 Copyright (C) 1994-2015 H. Peter Anvin et al\r\nboot: \u001b[?7h\r\n []}]" Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:28:40 http: TLS handshake error from 10.129.0.1:54386: EOF 2018/07/26 11:28:50 http: TLS handshake error from 10.129.0.1:54398: EOF 2018/07/26 11:29:00 http: TLS handshake error from 10.129.0.1:54410: EOF level=info timestamp=2018-07-26T11:29:06.485502Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:29:10 http: TLS handshake error from 10.129.0.1:54422: EOF 2018/07/26 11:29:20 http: TLS handshake error from 10.129.0.1:54434: EOF 2018/07/26 11:29:30 http: TLS handshake error from 10.129.0.1:54446: EOF 2018/07/26 11:29:40 http: TLS handshake error from 10.129.0.1:54458: EOF 2018/07/26 11:29:50 http: TLS handshake error from 10.129.0.1:54470: EOF 2018/07/26 11:30:00 http: TLS handshake error from 10.129.0.1:54482: EOF 2018/07/26 11:30:10 http: TLS handshake error from 10.129.0.1:54496: EOF 2018/07/26 11:30:20 http: TLS handshake error from 10.129.0.1:54508: EOF 2018/07/26 11:30:30 http: TLS handshake error from 10.129.0.1:54520: EOF 2018/07/26 11:30:40 http: TLS handshake error from 10.129.0.1:54532: EOF 2018/07/26 11:30:50 http: TLS handshake error from 10.129.0.1:54544: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:29:04 http: TLS handshake error from 10.129.0.1:38244: EOF 2018/07/26 11:29:14 http: TLS handshake error from 10.129.0.1:38256: EOF 2018/07/26 11:29:24 http: TLS handshake error from 10.129.0.1:38268: EOF 2018/07/26 11:29:34 http: TLS handshake error from 10.129.0.1:38280: EOF level=info timestamp=2018-07-26T11:29:36.376826Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:29:44 http: TLS handshake error from 10.129.0.1:38292: EOF 2018/07/26 11:29:54 http: TLS handshake error from 10.129.0.1:38304: EOF 2018/07/26 11:30:04 http: TLS handshake error from 10.129.0.1:38318: EOF level=info timestamp=2018-07-26T11:30:06.266516Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:30:14 http: TLS handshake error from 10.129.0.1:38330: EOF 2018/07/26 11:30:24 http: TLS handshake error from 10.129.0.1:38342: EOF 2018/07/26 11:30:34 http: TLS handshake error from 10.129.0.1:38354: EOF level=info timestamp=2018-07-26T11:30:36.519633Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:30:44 http: TLS handshake error from 10.129.0.1:38366: EOF 2018/07/26 11:30:54 http: TLS handshake error from 10.129.0.1:38378: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:27:13.510210Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-26T11:27:13.515360Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-26T11:27:13.523876Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-26T11:27:13.527836Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-26T11:27:13.528836Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-26T11:27:13.529602Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-26T11:27:13.530374Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-26T11:27:13.531493Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-26T11:27:13.543398Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-26T11:27:13.544524Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-26T11:27:13.554428Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-26T11:27:13.545852Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-26T11:29:58.022286Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:29:58.023500Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:29:58.277555Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiztnvm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiztnvm" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:30:14.870081Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="No update processing required" level=error timestamp=2018-07-26T11:30:14.895194Z pos=vm.go:431 component=virt-handler namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiztnvm\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-26T11:30:14.897381Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiztnvm\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiztnvm" level=info timestamp=2018-07-26T11:30:14.904839Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiztnvm, existing: true\n" level=info timestamp=2018-07-26T11:30:14.906072Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:30:14.906825Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:30:14.907464Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:30:14.908344Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:30:14.927807Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:30:14.929458Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiztnvm, existing: true\n" level=info timestamp=2018-07-26T11:30:14.930499Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:30:14.931578Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:30:14.932362Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:30:14.934326Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:30:14.952432Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiztnvm-hlncr Pod phase: Running level=info timestamp=2018-07-26T11:30:14.020218Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:30:14.064860Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 5f799d5a-a7d2-4723-be75-1798bd623712" level=info timestamp=2018-07-26T11:30:14.065956Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:30:14.094167Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:30:14.704441Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:30:14.727906Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:30:14.733210Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:30:14.745068Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:30:14.746742Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:30:14.748946Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:30:14.762762Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:30:14.774221Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:30:14.916931Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:30:14.944626Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:30:15.083614Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 5f799d5a-a7d2-4723-be75-1798bd623712: 187" • Failure [93.151 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should not persist data [It] /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:218 Timed out after 92.064s. Expected error: <*errors.StatusError | 0xc4204522d0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 ------------------------------ STEP: Starting the VirtualMachineInstance STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:31:00 http: TLS handshake error from 10.129.0.1:54556: EOF 2018/07/26 11:31:10 http: TLS handshake error from 10.129.0.1:54568: EOF 2018/07/26 11:31:20 http: TLS handshake error from 10.129.0.1:54580: EOF 2018/07/26 11:31:30 http: TLS handshake error from 10.129.0.1:54592: EOF level=info timestamp=2018-07-26T11:31:36.203606Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:31:40 http: TLS handshake error from 10.129.0.1:54604: EOF 2018/07/26 11:31:50 http: TLS handshake error from 10.129.0.1:54616: EOF 2018/07/26 11:32:00 http: TLS handshake error from 10.129.0.1:54628: EOF level=info timestamp=2018-07-26T11:32:06.414352Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:32:10 http: TLS handshake error from 10.129.0.1:54640: EOF 2018/07/26 11:32:20 http: TLS handshake error from 10.129.0.1:54652: EOF 2018/07/26 11:32:30 http: TLS handshake error from 10.129.0.1:54664: EOF 2018/07/26 11:32:40 http: TLS handshake error from 10.129.0.1:54676: EOF 2018/07/26 11:32:50 http: TLS handshake error from 10.129.0.1:54688: EOF 2018/07/26 11:33:00 http: TLS handshake error from 10.129.0.1:54700: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:31:04 http: TLS handshake error from 10.129.0.1:38390: EOF level=info timestamp=2018-07-26T11:31:06.468239Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:31:14 http: TLS handshake error from 10.129.0.1:38402: EOF 2018/07/26 11:31:24 http: TLS handshake error from 10.129.0.1:38414: EOF 2018/07/26 11:31:34 http: TLS handshake error from 10.129.0.1:38426: EOF 2018/07/26 11:31:44 http: TLS handshake error from 10.129.0.1:38438: EOF 2018/07/26 11:31:54 http: TLS handshake error from 10.129.0.1:38450: EOF 2018/07/26 11:32:04 http: TLS handshake error from 10.129.0.1:38462: EOF 2018/07/26 11:32:14 http: TLS handshake error from 10.129.0.1:38474: EOF 2018/07/26 11:32:24 http: TLS handshake error from 10.129.0.1:38486: EOF 2018/07/26 11:32:34 http: TLS handshake error from 10.129.0.1:38498: EOF level=info timestamp=2018-07-26T11:32:36.475820Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:32:44 http: TLS handshake error from 10.129.0.1:38510: EOF 2018/07/26 11:32:54 http: TLS handshake error from 10.129.0.1:38522: EOF 2018/07/26 11:33:04 http: TLS handshake error from 10.129.0.1:38534: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:27:13.523876Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-26T11:27:13.527836Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-26T11:27:13.528836Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-26T11:27:13.529602Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-26T11:27:13.530374Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-26T11:27:13.531493Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-26T11:27:13.543398Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-26T11:27:13.544524Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-26T11:27:13.554428Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-26T11:27:13.545852Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-26T11:29:58.022286Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:29:58.023500Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:29:58.277555Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiztnvm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiztnvm" level=info timestamp=2018-07-26T11:31:31.117416Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:31:31.118521Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:31:48.713507Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=Domain uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-26T11:31:48.792555Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-26T11:31:48.792936Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:31:48.810228Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: true\n" level=info timestamp=2018-07-26T11:31:48.811114Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T11:31:48.811983Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:31:48.813328Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:31:48.814385Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T11:31:48.882516Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:31:48.892562Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: true\n" level=info timestamp=2018-07-26T11:31:48.893227Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:31:48.893733Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:31:48.894169Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:31:48.894754Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:31:48.906231Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi6wsdw-s9cgt Pod phase: Running level=info timestamp=2018-07-26T11:31:47.100046Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Domain defined." level=info timestamp=2018-07-26T11:31:48.404621Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:31:48.410226Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID f843445c-53fc-4b5d-9f5a-1415725a0d3e" level=info timestamp=2018-07-26T11:31:48.411336Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:31:48.524398Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:31:48.666972Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:31:48.700474Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:31:48.721255Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:31:48.735150Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:31:48.770731Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:31:48.773011Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:31:48.784539Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:31:48.801485Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:31:48.903102Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:31:49.479768Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f843445c-53fc-4b5d-9f5a-1415725a0d3e: 188" • Failure [124.286 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With VirtualMachineInstance with two PVCs /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:266 should start vmi multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:278 Timed out after 123.094s. Expected error: <*errors.StatusError | 0xc42056e360>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 ------------------------------ STEP: Starting and stopping the VirtualMachineInstance number of times STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running level=info timestamp=2018-07-26T11:31:36.203606Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:31:40 http: TLS handshake error from 10.129.0.1:54604: EOF 2018/07/26 11:31:50 http: TLS handshake error from 10.129.0.1:54616: EOF 2018/07/26 11:32:00 http: TLS handshake error from 10.129.0.1:54628: EOF level=info timestamp=2018-07-26T11:32:06.414352Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:32:10 http: TLS handshake error from 10.129.0.1:54640: EOF 2018/07/26 11:32:20 http: TLS handshake error from 10.129.0.1:54652: EOF 2018/07/26 11:32:30 http: TLS handshake error from 10.129.0.1:54664: EOF 2018/07/26 11:32:40 http: TLS handshake error from 10.129.0.1:54676: EOF 2018/07/26 11:32:50 http: TLS handshake error from 10.129.0.1:54688: EOF 2018/07/26 11:33:00 http: TLS handshake error from 10.129.0.1:54700: EOF level=info timestamp=2018-07-26T11:33:06.426839Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:33:10 http: TLS handshake error from 10.129.0.1:54714: EOF 2018/07/26 11:33:20 http: TLS handshake error from 10.129.0.1:54726: EOF 2018/07/26 11:33:30 http: TLS handshake error from 10.129.0.1:54738: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:31:24 http: TLS handshake error from 10.129.0.1:38414: EOF 2018/07/26 11:31:34 http: TLS handshake error from 10.129.0.1:38426: EOF 2018/07/26 11:31:44 http: TLS handshake error from 10.129.0.1:38438: EOF 2018/07/26 11:31:54 http: TLS handshake error from 10.129.0.1:38450: EOF 2018/07/26 11:32:04 http: TLS handshake error from 10.129.0.1:38462: EOF 2018/07/26 11:32:14 http: TLS handshake error from 10.129.0.1:38474: EOF 2018/07/26 11:32:24 http: TLS handshake error from 10.129.0.1:38486: EOF 2018/07/26 11:32:34 http: TLS handshake error from 10.129.0.1:38498: EOF level=info timestamp=2018-07-26T11:32:36.475820Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:32:44 http: TLS handshake error from 10.129.0.1:38510: EOF 2018/07/26 11:32:54 http: TLS handshake error from 10.129.0.1:38522: EOF 2018/07/26 11:33:04 http: TLS handshake error from 10.129.0.1:38534: EOF 2018/07/26 11:33:14 http: TLS handshake error from 10.129.0.1:38548: EOF 2018/07/26 11:33:24 http: TLS handshake error from 10.129.0.1:38560: EOF 2018/07/26 11:33:34 http: TLS handshake error from 10.129.0.1:38572: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:27:13.530374Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-26T11:27:13.531493Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-26T11:27:13.543398Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-26T11:27:13.544524Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-26T11:27:13.554428Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-26T11:27:13.545852Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-26T11:29:58.022286Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:29:58.023500Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:29:58.277555Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiztnvm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiztnvm" level=info timestamp=2018-07-26T11:31:31.117416Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:31:31.118521Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:33:35.403337Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidpkbb kind= uid=a90d2386-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:33:35.404189Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidpkbb kind= uid=a90d2386-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:33:35.696353Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:33:35.762807Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:33:05.374256Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:33:05.374485Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:33:05.374827Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:33:05.376245Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6wsdw" level=info timestamp=2018-07-26T11:33:05.381151Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.386204Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:33:05.389254Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:33:05.391447Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.392419Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.392629Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.394315Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.395145Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.395736Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.395908Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.396123Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmidpkbb-mxqm4 Pod phase: Pending • Failure [31.222 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting and stopping the same VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:90 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:91 should success multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:92 Expected <*errors.StatusError | 0xc420157170>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:98 ------------------------------ STEP: Starting the VirtualMachineInstance Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:32:10 http: TLS handshake error from 10.129.0.1:54640: EOF 2018/07/26 11:32:20 http: TLS handshake error from 10.129.0.1:54652: EOF 2018/07/26 11:32:30 http: TLS handshake error from 10.129.0.1:54664: EOF 2018/07/26 11:32:40 http: TLS handshake error from 10.129.0.1:54676: EOF 2018/07/26 11:32:50 http: TLS handshake error from 10.129.0.1:54688: EOF 2018/07/26 11:33:00 http: TLS handshake error from 10.129.0.1:54700: EOF level=info timestamp=2018-07-26T11:33:06.426839Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:33:10 http: TLS handshake error from 10.129.0.1:54714: EOF 2018/07/26 11:33:20 http: TLS handshake error from 10.129.0.1:54726: EOF 2018/07/26 11:33:30 http: TLS handshake error from 10.129.0.1:54738: EOF level=info timestamp=2018-07-26T11:33:36.283271Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:33:40 http: TLS handshake error from 10.129.0.1:54750: EOF 2018/07/26 11:33:50 http: TLS handshake error from 10.129.0.1:54762: EOF 2018/07/26 11:34:00 http: TLS handshake error from 10.129.0.1:54774: EOF level=info timestamp=2018-07-26T11:34:06.575321Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:31:54 http: TLS handshake error from 10.129.0.1:38450: EOF 2018/07/26 11:32:04 http: TLS handshake error from 10.129.0.1:38462: EOF 2018/07/26 11:32:14 http: TLS handshake error from 10.129.0.1:38474: EOF 2018/07/26 11:32:24 http: TLS handshake error from 10.129.0.1:38486: EOF 2018/07/26 11:32:34 http: TLS handshake error from 10.129.0.1:38498: EOF level=info timestamp=2018-07-26T11:32:36.475820Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:32:44 http: TLS handshake error from 10.129.0.1:38510: EOF 2018/07/26 11:32:54 http: TLS handshake error from 10.129.0.1:38522: EOF 2018/07/26 11:33:04 http: TLS handshake error from 10.129.0.1:38534: EOF 2018/07/26 11:33:14 http: TLS handshake error from 10.129.0.1:38548: EOF 2018/07/26 11:33:24 http: TLS handshake error from 10.129.0.1:38560: EOF 2018/07/26 11:33:34 http: TLS handshake error from 10.129.0.1:38572: EOF 2018/07/26 11:33:44 http: TLS handshake error from 10.129.0.1:38584: EOF 2018/07/26 11:33:54 http: TLS handshake error from 10.129.0.1:38596: EOF 2018/07/26 11:34:04 http: TLS handshake error from 10.129.0.1:38608: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:27:13.554428Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-26T11:27:13.545852Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-26T11:29:58.022286Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:29:58.023500Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiztnvm kind= uid=27793dae-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:29:58.277555Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiztnvm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiztnvm" level=info timestamp=2018-07-26T11:31:31.117416Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:31:31.118521Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:33:35.403337Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidpkbb kind= uid=a90d2386-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:33:35.404189Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidpkbb kind= uid=a90d2386-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:33:35.696353Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:33:35.762807Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:33:36.232574Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmidpkbb, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: a90d2386-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:34:06.636175Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5rqj9 kind= uid=bbab39e7-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:34:06.638905Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5rqj9 kind= uid=bbab39e7-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:34:07.156140Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5rqj9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5rqj9" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:33:05.374256Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:33:05.374485Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:33:05.374827Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:33:05.376245Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6wsdw" level=info timestamp=2018-07-26T11:33:05.381151Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.386204Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:33:05.389254Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:33:05.391447Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.392419Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.392629Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.394315Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.395145Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.395736Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.395908Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.396123Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi5rqj9-qvhhq Pod phase: Pending • Failure [31.311 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:111 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:112 should not modify the spec on status update [It] /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:113 Expected <*errors.StatusError | 0xc4206f4cf0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:119 ------------------------------ STEP: Starting the VirtualMachineInstance Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:32:50 http: TLS handshake error from 10.129.0.1:54688: EOF 2018/07/26 11:33:00 http: TLS handshake error from 10.129.0.1:54700: EOF level=info timestamp=2018-07-26T11:33:06.426839Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:33:10 http: TLS handshake error from 10.129.0.1:54714: EOF 2018/07/26 11:33:20 http: TLS handshake error from 10.129.0.1:54726: EOF 2018/07/26 11:33:30 http: TLS handshake error from 10.129.0.1:54738: EOF level=info timestamp=2018-07-26T11:33:36.283271Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:33:40 http: TLS handshake error from 10.129.0.1:54750: EOF 2018/07/26 11:33:50 http: TLS handshake error from 10.129.0.1:54762: EOF 2018/07/26 11:34:00 http: TLS handshake error from 10.129.0.1:54774: EOF level=info timestamp=2018-07-26T11:34:06.575321Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:34:10 http: TLS handshake error from 10.129.0.1:54786: EOF 2018/07/26 11:34:20 http: TLS handshake error from 10.129.0.1:54798: EOF 2018/07/26 11:34:30 http: TLS handshake error from 10.129.0.1:54810: EOF level=info timestamp=2018-07-26T11:34:36.417425Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:32:24 http: TLS handshake error from 10.129.0.1:38486: EOF 2018/07/26 11:32:34 http: TLS handshake error from 10.129.0.1:38498: EOF level=info timestamp=2018-07-26T11:32:36.475820Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:32:44 http: TLS handshake error from 10.129.0.1:38510: EOF 2018/07/26 11:32:54 http: TLS handshake error from 10.129.0.1:38522: EOF 2018/07/26 11:33:04 http: TLS handshake error from 10.129.0.1:38534: EOF 2018/07/26 11:33:14 http: TLS handshake error from 10.129.0.1:38548: EOF 2018/07/26 11:33:24 http: TLS handshake error from 10.129.0.1:38560: EOF 2018/07/26 11:33:34 http: TLS handshake error from 10.129.0.1:38572: EOF 2018/07/26 11:33:44 http: TLS handshake error from 10.129.0.1:38584: EOF 2018/07/26 11:33:54 http: TLS handshake error from 10.129.0.1:38596: EOF 2018/07/26 11:34:04 http: TLS handshake error from 10.129.0.1:38608: EOF 2018/07/26 11:34:14 http: TLS handshake error from 10.129.0.1:38620: EOF 2018/07/26 11:34:24 http: TLS handshake error from 10.129.0.1:38632: EOF 2018/07/26 11:34:34 http: TLS handshake error from 10.129.0.1:38644: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:29:58.277555Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiztnvm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiztnvm" level=info timestamp=2018-07-26T11:31:31.117416Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:31:31.118521Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:33:35.403337Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidpkbb kind= uid=a90d2386-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:33:35.404189Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidpkbb kind= uid=a90d2386-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:33:35.696353Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:33:35.762807Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:33:36.232574Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmidpkbb, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: a90d2386-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:34:06.636175Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5rqj9 kind= uid=bbab39e7-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:34:06.638905Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5rqj9 kind= uid=bbab39e7-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:34:07.156140Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5rqj9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5rqj9" level=info timestamp=2018-07-26T11:34:07.460619Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5rqj9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi5rqj9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: bbab39e7-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5rqj9" level=info timestamp=2018-07-26T11:34:37.659272Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7cskr kind= uid=ce2626f1-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:34:37.659887Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7cskr kind= uid=ce2626f1-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:34:37.896166Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7cskr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7cskr" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:33:05.374256Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:33:05.374485Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:33:05.374827Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:33:05.376245Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6wsdw" level=info timestamp=2018-07-26T11:33:05.381151Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.386204Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:33:05.389254Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:33:05.391447Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.392419Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.392629Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.394315Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.395145Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.395736Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.395908Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.396123Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi7cskr-nb64k Pod phase: Pending • Failure [30.979 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting multiple VMIs /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:129 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:130 should success [It] /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:131 Expected <*errors.StatusError | 0xc4201570e0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:55 ------------------------------ STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:33:50 http: TLS handshake error from 10.129.0.1:54762: EOF 2018/07/26 11:34:00 http: TLS handshake error from 10.129.0.1:54774: EOF level=info timestamp=2018-07-26T11:34:06.575321Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:34:10 http: TLS handshake error from 10.129.0.1:54786: EOF 2018/07/26 11:34:20 http: TLS handshake error from 10.129.0.1:54798: EOF 2018/07/26 11:34:30 http: TLS handshake error from 10.129.0.1:54810: EOF level=info timestamp=2018-07-26T11:34:36.417425Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:34:40 http: TLS handshake error from 10.129.0.1:54822: EOF 2018/07/26 11:34:50 http: TLS handshake error from 10.129.0.1:54834: EOF 2018/07/26 11:35:00 http: TLS handshake error from 10.129.0.1:54846: EOF level=info timestamp=2018-07-26T11:35:06.237173Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:35:10 http: TLS handshake error from 10.129.0.1:54858: EOF 2018/07/26 11:35:20 http: TLS handshake error from 10.129.0.1:54870: EOF 2018/07/26 11:35:30 http: TLS handshake error from 10.129.0.1:54882: EOF level=info timestamp=2018-07-26T11:35:36.691875Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:33:24 http: TLS handshake error from 10.129.0.1:38560: EOF 2018/07/26 11:33:34 http: TLS handshake error from 10.129.0.1:38572: EOF 2018/07/26 11:33:44 http: TLS handshake error from 10.129.0.1:38584: EOF 2018/07/26 11:33:54 http: TLS handshake error from 10.129.0.1:38596: EOF 2018/07/26 11:34:04 http: TLS handshake error from 10.129.0.1:38608: EOF 2018/07/26 11:34:14 http: TLS handshake error from 10.129.0.1:38620: EOF 2018/07/26 11:34:24 http: TLS handshake error from 10.129.0.1:38632: EOF 2018/07/26 11:34:34 http: TLS handshake error from 10.129.0.1:38644: EOF 2018/07/26 11:34:44 http: TLS handshake error from 10.129.0.1:38656: EOF 2018/07/26 11:34:54 http: TLS handshake error from 10.129.0.1:38668: EOF 2018/07/26 11:35:04 http: TLS handshake error from 10.129.0.1:38680: EOF 2018/07/26 11:35:14 http: TLS handshake error from 10.129.0.1:38692: EOF 2018/07/26 11:35:24 http: TLS handshake error from 10.129.0.1:38704: EOF 2018/07/26 11:35:34 http: TLS handshake error from 10.129.0.1:38716: EOF level=info timestamp=2018-07-26T11:35:36.618737Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:31:31.117416Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:31:31.118521Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6wsdw kind= uid=5ef7d778-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:33:35.403337Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidpkbb kind= uid=a90d2386-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:33:35.404189Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidpkbb kind= uid=a90d2386-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:33:35.696353Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:33:35.762807Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:33:36.232574Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmidpkbb, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: a90d2386-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:34:06.636175Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5rqj9 kind= uid=bbab39e7-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:34:06.638905Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5rqj9 kind= uid=bbab39e7-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:34:07.156140Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5rqj9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5rqj9" level=info timestamp=2018-07-26T11:34:07.460619Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5rqj9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi5rqj9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: bbab39e7-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5rqj9" level=info timestamp=2018-07-26T11:34:37.659272Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7cskr kind= uid=ce2626f1-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:34:37.659887Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7cskr kind= uid=ce2626f1-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:34:37.896166Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7cskr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7cskr" level=info timestamp=2018-07-26T11:34:38.446512Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7cskr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi7cskr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: ce2626f1-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7cskr" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:33:05.374256Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:33:05.374485Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:33:05.374827Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:33:05.376245Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6wsdw" level=info timestamp=2018-07-26T11:33:05.381151Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.386204Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:33:05.389254Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:33:05.391447Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.392419Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.392629Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.394315Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.395145Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.395736Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.395908Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.396123Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure [60.503 seconds] Version /root/go/src/kubevirt.io/kubevirt/tests/version_test.go:35 Check that version parameters where loaded by ldflags in build time /root/go/src/kubevirt.io/kubevirt/tests/version_test.go:46 Should return a good version information struct [It] /root/go/src/kubevirt.io/kubevirt/tests/version_test.go:47 Expected error: <*errors.StatusError | 0xc420157c20>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "the server was unable to return a response in the time allotted, but may still be processing the request", Reason: "Timeout", Details: { Name: "", Group: "", Kind: "", UID: "", Causes: [ { Type: "UnexpectedServerResponse", Message: "{\"metadata\":{},\"status\":\"Failure\",\"message\":\"Timeout: request did not complete within 1m0s\",\"reason\":\"Timeout\",\"details\":{},\"code\":504}", Field: "", }, ], RetryAfterSeconds: 0, }, Code: 504, }, } the server was unable to return a response in the time allotted, but may still be processing the request not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/version_test.go:49 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:34:20 http: TLS handshake error from 10.129.0.1:54798: EOF 2018/07/26 11:34:30 http: TLS handshake error from 10.129.0.1:54810: EOF level=info timestamp=2018-07-26T11:34:36.417425Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:34:40 http: TLS handshake error from 10.129.0.1:54822: EOF 2018/07/26 11:34:50 http: TLS handshake error from 10.129.0.1:54834: EOF 2018/07/26 11:35:00 http: TLS handshake error from 10.129.0.1:54846: EOF level=info timestamp=2018-07-26T11:35:06.237173Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:35:10 http: TLS handshake error from 10.129.0.1:54858: EOF 2018/07/26 11:35:20 http: TLS handshake error from 10.129.0.1:54870: EOF 2018/07/26 11:35:30 http: TLS handshake error from 10.129.0.1:54882: EOF level=info timestamp=2018-07-26T11:35:36.691875Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:35:40 http: TLS handshake error from 10.129.0.1:54894: EOF 2018/07/26 11:35:50 http: TLS handshake error from 10.129.0.1:54906: EOF 2018/07/26 11:36:00 http: TLS handshake error from 10.129.0.1:54918: EOF level=info timestamp=2018-07-26T11:36:06.509198Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:33:54 http: TLS handshake error from 10.129.0.1:38596: EOF 2018/07/26 11:34:04 http: TLS handshake error from 10.129.0.1:38608: EOF 2018/07/26 11:34:14 http: TLS handshake error from 10.129.0.1:38620: EOF 2018/07/26 11:34:24 http: TLS handshake error from 10.129.0.1:38632: EOF 2018/07/26 11:34:34 http: TLS handshake error from 10.129.0.1:38644: EOF 2018/07/26 11:34:44 http: TLS handshake error from 10.129.0.1:38656: EOF 2018/07/26 11:34:54 http: TLS handshake error from 10.129.0.1:38668: EOF 2018/07/26 11:35:04 http: TLS handshake error from 10.129.0.1:38680: EOF 2018/07/26 11:35:14 http: TLS handshake error from 10.129.0.1:38692: EOF 2018/07/26 11:35:24 http: TLS handshake error from 10.129.0.1:38704: EOF 2018/07/26 11:35:34 http: TLS handshake error from 10.129.0.1:38716: EOF level=info timestamp=2018-07-26T11:35:36.618737Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:35:44 http: TLS handshake error from 10.129.0.1:38728: EOF 2018/07/26 11:35:54 http: TLS handshake error from 10.129.0.1:38740: EOF 2018/07/26 11:36:04 http: TLS handshake error from 10.129.0.1:38752: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:33:35.403337Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidpkbb kind= uid=a90d2386-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:33:35.404189Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidpkbb kind= uid=a90d2386-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:33:35.696353Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:33:35.762807Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:33:36.232574Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmidpkbb, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: a90d2386-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:34:06.636175Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5rqj9 kind= uid=bbab39e7-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:34:06.638905Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5rqj9 kind= uid=bbab39e7-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:34:07.156140Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5rqj9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5rqj9" level=info timestamp=2018-07-26T11:34:07.460619Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5rqj9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi5rqj9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: bbab39e7-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5rqj9" level=info timestamp=2018-07-26T11:34:37.659272Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7cskr kind= uid=ce2626f1-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:34:37.659887Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7cskr kind= uid=ce2626f1-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:34:37.896166Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7cskr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7cskr" level=info timestamp=2018-07-26T11:34:38.446512Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7cskr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi7cskr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: ce2626f1-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7cskr" level=info timestamp=2018-07-26T11:36:09.049855Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixpkgr kind= uid=049955c1-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:36:09.050447Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixpkgr kind= uid=049955c1-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:33:05.374256Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:33:05.374485Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:33:05.374827Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:33:05.376245Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6wsdw" level=info timestamp=2018-07-26T11:33:05.381151Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.386204Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:33:05.389254Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:33:05.391447Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.392419Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.392629Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.394315Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.395145Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.395736Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.395908Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.396123Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmixpkgr-dssmd Pod phase: Pending • Failure [30.703 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with 3 CPU cores /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:56 should report 3 cpu cores under guest OS [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:62 Expected error: <*errors.StatusError | 0xc4201566c0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:74 ------------------------------ STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:34:40 http: TLS handshake error from 10.129.0.1:54822: EOF 2018/07/26 11:34:50 http: TLS handshake error from 10.129.0.1:54834: EOF 2018/07/26 11:35:00 http: TLS handshake error from 10.129.0.1:54846: EOF level=info timestamp=2018-07-26T11:35:06.237173Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:35:10 http: TLS handshake error from 10.129.0.1:54858: EOF 2018/07/26 11:35:20 http: TLS handshake error from 10.129.0.1:54870: EOF 2018/07/26 11:35:30 http: TLS handshake error from 10.129.0.1:54882: EOF level=info timestamp=2018-07-26T11:35:36.691875Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:35:40 http: TLS handshake error from 10.129.0.1:54894: EOF 2018/07/26 11:35:50 http: TLS handshake error from 10.129.0.1:54906: EOF 2018/07/26 11:36:00 http: TLS handshake error from 10.129.0.1:54918: EOF level=info timestamp=2018-07-26T11:36:06.509198Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:36:10 http: TLS handshake error from 10.129.0.1:54932: EOF 2018/07/26 11:36:20 http: TLS handshake error from 10.129.0.1:54944: EOF 2018/07/26 11:36:30 http: TLS handshake error from 10.129.0.1:54956: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:34:34 http: TLS handshake error from 10.129.0.1:38644: EOF 2018/07/26 11:34:44 http: TLS handshake error from 10.129.0.1:38656: EOF 2018/07/26 11:34:54 http: TLS handshake error from 10.129.0.1:38668: EOF 2018/07/26 11:35:04 http: TLS handshake error from 10.129.0.1:38680: EOF 2018/07/26 11:35:14 http: TLS handshake error from 10.129.0.1:38692: EOF 2018/07/26 11:35:24 http: TLS handshake error from 10.129.0.1:38704: EOF 2018/07/26 11:35:34 http: TLS handshake error from 10.129.0.1:38716: EOF level=info timestamp=2018-07-26T11:35:36.618737Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:35:44 http: TLS handshake error from 10.129.0.1:38728: EOF 2018/07/26 11:35:54 http: TLS handshake error from 10.129.0.1:38740: EOF 2018/07/26 11:36:04 http: TLS handshake error from 10.129.0.1:38752: EOF 2018/07/26 11:36:14 http: TLS handshake error from 10.129.0.1:38766: EOF 2018/07/26 11:36:24 http: TLS handshake error from 10.129.0.1:38778: EOF 2018/07/26 11:36:34 http: TLS handshake error from 10.129.0.1:38790: EOF level=info timestamp=2018-07-26T11:36:36.449759Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:33:35.762807Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:33:36.232574Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidpkbb\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmidpkbb, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: a90d2386-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidpkbb" level=info timestamp=2018-07-26T11:34:06.636175Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5rqj9 kind= uid=bbab39e7-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:34:06.638905Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5rqj9 kind= uid=bbab39e7-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:34:07.156140Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5rqj9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5rqj9" level=info timestamp=2018-07-26T11:34:07.460619Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5rqj9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi5rqj9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: bbab39e7-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5rqj9" level=info timestamp=2018-07-26T11:34:37.659272Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7cskr kind= uid=ce2626f1-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:34:37.659887Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7cskr kind= uid=ce2626f1-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:34:37.896166Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7cskr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7cskr" level=info timestamp=2018-07-26T11:34:38.446512Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7cskr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi7cskr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: ce2626f1-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7cskr" level=info timestamp=2018-07-26T11:36:09.049855Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixpkgr kind= uid=049955c1-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:36:09.050447Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixpkgr kind= uid=049955c1-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:36:09.671125Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixpkgr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmixpkgr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 049955c1-90c8-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixpkgr" level=info timestamp=2018-07-26T11:36:39.861074Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij6fr2 kind= uid=16fd7f15-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:36:39.861985Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij6fr2 kind= uid=16fd7f15-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:33:05.374256Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:33:05.374485Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:33:05.374827Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:33:05.376245Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6wsdw" level=info timestamp=2018-07-26T11:33:05.381151Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.386204Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:33:05.389254Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:33:05.391447Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.392419Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.392629Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.394315Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.395145Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.395736Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.395908Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.396123Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmij6fr2-ghrc4 Pod phase: Pending • Failure [30.773 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:108 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-2Mi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc42056f200>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:184 ------------------------------ STEP: Starting a VM S [SKIPPING] [0.239 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:108 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-1Gi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 No node with hugepages hugepages-1Gi capacity /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:160 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:35:20 http: TLS handshake error from 10.129.0.1:54870: EOF 2018/07/26 11:35:30 http: TLS handshake error from 10.129.0.1:54882: EOF level=info timestamp=2018-07-26T11:35:36.691875Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:35:40 http: TLS handshake error from 10.129.0.1:54894: EOF 2018/07/26 11:35:50 http: TLS handshake error from 10.129.0.1:54906: EOF 2018/07/26 11:36:00 http: TLS handshake error from 10.129.0.1:54918: EOF level=info timestamp=2018-07-26T11:36:06.509198Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:36:10 http: TLS handshake error from 10.129.0.1:54932: EOF 2018/07/26 11:36:20 http: TLS handshake error from 10.129.0.1:54944: EOF 2018/07/26 11:36:30 http: TLS handshake error from 10.129.0.1:54956: EOF 2018/07/26 11:36:40 http: TLS handshake error from 10.129.0.1:54968: EOF 2018/07/26 11:36:50 http: TLS handshake error from 10.129.0.1:54980: EOF 2018/07/26 11:37:00 http: TLS handshake error from 10.129.0.1:54992: EOF level=info timestamp=2018-07-26T11:37:06.329407Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:37:10 http: TLS handshake error from 10.129.0.1:55004: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:35:04 http: TLS handshake error from 10.129.0.1:38680: EOF 2018/07/26 11:35:14 http: TLS handshake error from 10.129.0.1:38692: EOF 2018/07/26 11:35:24 http: TLS handshake error from 10.129.0.1:38704: EOF 2018/07/26 11:35:34 http: TLS handshake error from 10.129.0.1:38716: EOF level=info timestamp=2018-07-26T11:35:36.618737Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:35:44 http: TLS handshake error from 10.129.0.1:38728: EOF 2018/07/26 11:35:54 http: TLS handshake error from 10.129.0.1:38740: EOF 2018/07/26 11:36:04 http: TLS handshake error from 10.129.0.1:38752: EOF 2018/07/26 11:36:14 http: TLS handshake error from 10.129.0.1:38766: EOF 2018/07/26 11:36:24 http: TLS handshake error from 10.129.0.1:38778: EOF 2018/07/26 11:36:34 http: TLS handshake error from 10.129.0.1:38790: EOF level=info timestamp=2018-07-26T11:36:36.449759Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:36:44 http: TLS handshake error from 10.129.0.1:38802: EOF 2018/07/26 11:36:54 http: TLS handshake error from 10.129.0.1:38814: EOF 2018/07/26 11:37:04 http: TLS handshake error from 10.129.0.1:38826: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:34:06.636175Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5rqj9 kind= uid=bbab39e7-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:34:06.638905Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5rqj9 kind= uid=bbab39e7-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:34:07.156140Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5rqj9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5rqj9" level=info timestamp=2018-07-26T11:34:07.460619Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5rqj9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi5rqj9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: bbab39e7-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5rqj9" level=info timestamp=2018-07-26T11:34:37.659272Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7cskr kind= uid=ce2626f1-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:34:37.659887Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7cskr kind= uid=ce2626f1-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:34:37.896166Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7cskr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7cskr" level=info timestamp=2018-07-26T11:34:38.446512Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7cskr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi7cskr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: ce2626f1-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7cskr" level=info timestamp=2018-07-26T11:36:09.049855Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixpkgr kind= uid=049955c1-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:36:09.050447Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixpkgr kind= uid=049955c1-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:36:09.671125Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixpkgr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmixpkgr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 049955c1-90c8-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixpkgr" level=info timestamp=2018-07-26T11:36:39.861074Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij6fr2 kind= uid=16fd7f15-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:36:39.861985Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij6fr2 kind= uid=16fd7f15-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:37:10.737646Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7g8hl kind= uid=2965de0c-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:37:10.738219Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7g8hl kind= uid=2965de0c-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:33:05.374256Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:33:05.374485Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:33:05.374827Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:33:05.376245Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6wsdw" level=info timestamp=2018-07-26T11:33:05.381151Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.386204Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:33:05.389254Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:33:05.391447Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.392419Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.392629Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.394315Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.395145Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.395736Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.395908Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.396123Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi7g8hl-l7rlm Pod phase: Pending • Failure [30.678 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:108 with usupported page size /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:194 should failed to schedule the pod [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:195 Expected error: <*errors.StatusError | 0xc420453950>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:214 ------------------------------ STEP: Starting a VM Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:35:40 http: TLS handshake error from 10.129.0.1:54894: EOF 2018/07/26 11:35:50 http: TLS handshake error from 10.129.0.1:54906: EOF 2018/07/26 11:36:00 http: TLS handshake error from 10.129.0.1:54918: EOF level=info timestamp=2018-07-26T11:36:06.509198Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:36:10 http: TLS handshake error from 10.129.0.1:54932: EOF 2018/07/26 11:36:20 http: TLS handshake error from 10.129.0.1:54944: EOF 2018/07/26 11:36:30 http: TLS handshake error from 10.129.0.1:54956: EOF 2018/07/26 11:36:40 http: TLS handshake error from 10.129.0.1:54968: EOF 2018/07/26 11:36:50 http: TLS handshake error from 10.129.0.1:54980: EOF 2018/07/26 11:37:00 http: TLS handshake error from 10.129.0.1:54992: EOF level=info timestamp=2018-07-26T11:37:06.329407Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:37:10 http: TLS handshake error from 10.129.0.1:55004: EOF 2018/07/26 11:37:20 http: TLS handshake error from 10.129.0.1:55016: EOF 2018/07/26 11:37:30 http: TLS handshake error from 10.129.0.1:55028: EOF 2018/07/26 11:37:40 http: TLS handshake error from 10.129.0.1:55040: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running level=info timestamp=2018-07-26T11:35:36.618737Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:35:44 http: TLS handshake error from 10.129.0.1:38728: EOF 2018/07/26 11:35:54 http: TLS handshake error from 10.129.0.1:38740: EOF 2018/07/26 11:36:04 http: TLS handshake error from 10.129.0.1:38752: EOF 2018/07/26 11:36:14 http: TLS handshake error from 10.129.0.1:38766: EOF 2018/07/26 11:36:24 http: TLS handshake error from 10.129.0.1:38778: EOF 2018/07/26 11:36:34 http: TLS handshake error from 10.129.0.1:38790: EOF level=info timestamp=2018-07-26T11:36:36.449759Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:36:44 http: TLS handshake error from 10.129.0.1:38802: EOF 2018/07/26 11:36:54 http: TLS handshake error from 10.129.0.1:38814: EOF 2018/07/26 11:37:04 http: TLS handshake error from 10.129.0.1:38826: EOF 2018/07/26 11:37:14 http: TLS handshake error from 10.129.0.1:38838: EOF 2018/07/26 11:37:24 http: TLS handshake error from 10.129.0.1:38850: EOF 2018/07/26 11:37:34 http: TLS handshake error from 10.129.0.1:38862: EOF level=info timestamp=2018-07-26T11:37:36.297085Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:34:07.156140Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5rqj9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5rqj9" level=info timestamp=2018-07-26T11:34:07.460619Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5rqj9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi5rqj9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: bbab39e7-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5rqj9" level=info timestamp=2018-07-26T11:34:37.659272Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7cskr kind= uid=ce2626f1-90c7-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:34:37.659887Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7cskr kind= uid=ce2626f1-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:34:37.896166Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7cskr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7cskr" level=info timestamp=2018-07-26T11:34:38.446512Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7cskr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi7cskr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: ce2626f1-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7cskr" level=info timestamp=2018-07-26T11:36:09.049855Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixpkgr kind= uid=049955c1-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:36:09.050447Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixpkgr kind= uid=049955c1-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:36:09.671125Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixpkgr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmixpkgr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 049955c1-90c8-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixpkgr" level=info timestamp=2018-07-26T11:36:39.861074Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij6fr2 kind= uid=16fd7f15-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:36:39.861985Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij6fr2 kind= uid=16fd7f15-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:37:10.737646Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7g8hl kind= uid=2965de0c-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:37:10.738219Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7g8hl kind= uid=2965de0c-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:37:41.793639Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9x4bz kind= uid=3be1dec9-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:37:41.794488Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9x4bz kind= uid=3be1dec9-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:33:05.374256Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:33:05.374485Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:33:05.374827Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:33:05.376245Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6wsdw" level=info timestamp=2018-07-26T11:33:05.381151Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.386204Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:33:05.389254Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:33:05.391447Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.392419Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.392629Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.394315Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.395145Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.395736Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.395908Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.396123Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi9x4bz-dqmhw Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.902 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:238 when CPU model defined [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:284 should report defined CPU model /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:285 Expected error: <*errors.StatusError | 0xc420157560>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1576 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running level=info timestamp=2018-07-26T11:36:06.509198Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:36:10 http: TLS handshake error from 10.129.0.1:54932: EOF 2018/07/26 11:36:20 http: TLS handshake error from 10.129.0.1:54944: EOF 2018/07/26 11:36:30 http: TLS handshake error from 10.129.0.1:54956: EOF 2018/07/26 11:36:40 http: TLS handshake error from 10.129.0.1:54968: EOF 2018/07/26 11:36:50 http: TLS handshake error from 10.129.0.1:54980: EOF 2018/07/26 11:37:00 http: TLS handshake error from 10.129.0.1:54992: EOF level=info timestamp=2018-07-26T11:37:06.329407Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:37:10 http: TLS handshake error from 10.129.0.1:55004: EOF 2018/07/26 11:37:20 http: TLS handshake error from 10.129.0.1:55016: EOF 2018/07/26 11:37:30 http: TLS handshake error from 10.129.0.1:55028: EOF 2018/07/26 11:37:40 http: TLS handshake error from 10.129.0.1:55040: EOF 2018/07/26 11:37:50 http: TLS handshake error from 10.129.0.1:55052: EOF 2018/07/26 11:38:00 http: TLS handshake error from 10.129.0.1:55064: EOF 2018/07/26 11:38:10 http: TLS handshake error from 10.129.0.1:55076: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:36:14 http: TLS handshake error from 10.129.0.1:38766: EOF 2018/07/26 11:36:24 http: TLS handshake error from 10.129.0.1:38778: EOF 2018/07/26 11:36:34 http: TLS handshake error from 10.129.0.1:38790: EOF level=info timestamp=2018-07-26T11:36:36.449759Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:36:44 http: TLS handshake error from 10.129.0.1:38802: EOF 2018/07/26 11:36:54 http: TLS handshake error from 10.129.0.1:38814: EOF 2018/07/26 11:37:04 http: TLS handshake error from 10.129.0.1:38826: EOF 2018/07/26 11:37:14 http: TLS handshake error from 10.129.0.1:38838: EOF 2018/07/26 11:37:24 http: TLS handshake error from 10.129.0.1:38850: EOF 2018/07/26 11:37:34 http: TLS handshake error from 10.129.0.1:38862: EOF level=info timestamp=2018-07-26T11:37:36.297085Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:37:44 http: TLS handshake error from 10.129.0.1:38874: EOF 2018/07/26 11:37:54 http: TLS handshake error from 10.129.0.1:38886: EOF 2018/07/26 11:38:04 http: TLS handshake error from 10.129.0.1:38898: EOF level=info timestamp=2018-07-26T11:38:06.289204Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:34:37.659887Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7cskr kind= uid=ce2626f1-90c7-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:34:37.896166Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7cskr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7cskr" level=info timestamp=2018-07-26T11:34:38.446512Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7cskr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi7cskr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: ce2626f1-90c7-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7cskr" level=info timestamp=2018-07-26T11:36:09.049855Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixpkgr kind= uid=049955c1-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:36:09.050447Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixpkgr kind= uid=049955c1-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:36:09.671125Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixpkgr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmixpkgr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 049955c1-90c8-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixpkgr" level=info timestamp=2018-07-26T11:36:39.861074Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij6fr2 kind= uid=16fd7f15-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:36:39.861985Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij6fr2 kind= uid=16fd7f15-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:37:10.737646Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7g8hl kind= uid=2965de0c-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:37:10.738219Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7g8hl kind= uid=2965de0c-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:37:41.793639Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9x4bz kind= uid=3be1dec9-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:37:41.794488Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9x4bz kind= uid=3be1dec9-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:37:42.241114Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9x4bz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9x4bz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3be1dec9-90c8-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9x4bz" level=info timestamp=2018-07-26T11:38:12.449169Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv65t kind= uid=4e2e770b-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:38:12.451321Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv65t kind= uid=4e2e770b-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:33:05.374256Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:33:05.374485Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:33:05.374827Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:33:05.376245Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6wsdw" level=info timestamp=2018-07-26T11:33:05.381151Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.386204Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:33:05.389254Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:33:05.391447Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.392419Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.392629Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.394315Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.395145Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.395736Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.395908Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.396123Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmibv65t-p4lhk Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.884 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:238 when CPU model equals to passthrough [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:312 should report exactly the same model as node CPU /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:313 Expected error: <*errors.StatusError | 0xc420f359e0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1576 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:36:40 http: TLS handshake error from 10.129.0.1:54968: EOF 2018/07/26 11:36:50 http: TLS handshake error from 10.129.0.1:54980: EOF 2018/07/26 11:37:00 http: TLS handshake error from 10.129.0.1:54992: EOF level=info timestamp=2018-07-26T11:37:06.329407Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:37:10 http: TLS handshake error from 10.129.0.1:55004: EOF 2018/07/26 11:37:20 http: TLS handshake error from 10.129.0.1:55016: EOF 2018/07/26 11:37:30 http: TLS handshake error from 10.129.0.1:55028: EOF 2018/07/26 11:37:40 http: TLS handshake error from 10.129.0.1:55040: EOF 2018/07/26 11:37:50 http: TLS handshake error from 10.129.0.1:55052: EOF 2018/07/26 11:38:00 http: TLS handshake error from 10.129.0.1:55064: EOF 2018/07/26 11:38:10 http: TLS handshake error from 10.129.0.1:55076: EOF 2018/07/26 11:38:20 http: TLS handshake error from 10.129.0.1:55088: EOF 2018/07/26 11:38:30 http: TLS handshake error from 10.129.0.1:55100: EOF level=info timestamp=2018-07-26T11:38:36.455761Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:38:40 http: TLS handshake error from 10.129.0.1:55112: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running level=info timestamp=2018-07-26T11:36:36.449759Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:36:44 http: TLS handshake error from 10.129.0.1:38802: EOF 2018/07/26 11:36:54 http: TLS handshake error from 10.129.0.1:38814: EOF 2018/07/26 11:37:04 http: TLS handshake error from 10.129.0.1:38826: EOF 2018/07/26 11:37:14 http: TLS handshake error from 10.129.0.1:38838: EOF 2018/07/26 11:37:24 http: TLS handshake error from 10.129.0.1:38850: EOF 2018/07/26 11:37:34 http: TLS handshake error from 10.129.0.1:38862: EOF level=info timestamp=2018-07-26T11:37:36.297085Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:37:44 http: TLS handshake error from 10.129.0.1:38874: EOF 2018/07/26 11:37:54 http: TLS handshake error from 10.129.0.1:38886: EOF 2018/07/26 11:38:04 http: TLS handshake error from 10.129.0.1:38898: EOF level=info timestamp=2018-07-26T11:38:06.289204Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:38:14 http: TLS handshake error from 10.129.0.1:38910: EOF 2018/07/26 11:38:24 http: TLS handshake error from 10.129.0.1:38922: EOF 2018/07/26 11:38:34 http: TLS handshake error from 10.129.0.1:38934: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:36:09.049855Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixpkgr kind= uid=049955c1-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:36:09.050447Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixpkgr kind= uid=049955c1-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:36:09.671125Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixpkgr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmixpkgr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 049955c1-90c8-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixpkgr" level=info timestamp=2018-07-26T11:36:39.861074Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij6fr2 kind= uid=16fd7f15-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:36:39.861985Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij6fr2 kind= uid=16fd7f15-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:37:10.737646Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7g8hl kind= uid=2965de0c-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:37:10.738219Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7g8hl kind= uid=2965de0c-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:37:41.793639Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9x4bz kind= uid=3be1dec9-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:37:41.794488Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9x4bz kind= uid=3be1dec9-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:37:42.241114Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9x4bz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9x4bz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3be1dec9-90c8-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9x4bz" level=info timestamp=2018-07-26T11:38:12.449169Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv65t kind= uid=4e2e770b-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:38:12.451321Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv65t kind= uid=4e2e770b-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:38:43.307015Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdrdm kind= uid=60958b6b-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:38:43.307912Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdrdm kind= uid=60958b6b-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:38:43.522160Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijdrdm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijdrdm" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:33:05.374256Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:33:05.374485Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:33:05.374827Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:33:05.376245Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6wsdw" level=info timestamp=2018-07-26T11:33:05.381151Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.386204Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:33:05.389254Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:33:05.391447Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.392419Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.392629Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.394315Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.395145Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.395736Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.395908Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.396123Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmijdrdm-dtkhk Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.755 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:238 when CPU model not defined [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:336 should report CPU model from libvirt capabilities /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:337 Expected error: <*errors.StatusError | 0xc420f34240>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1576 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:37:10 http: TLS handshake error from 10.129.0.1:55004: EOF 2018/07/26 11:37:20 http: TLS handshake error from 10.129.0.1:55016: EOF 2018/07/26 11:37:30 http: TLS handshake error from 10.129.0.1:55028: EOF 2018/07/26 11:37:40 http: TLS handshake error from 10.129.0.1:55040: EOF 2018/07/26 11:37:50 http: TLS handshake error from 10.129.0.1:55052: EOF 2018/07/26 11:38:00 http: TLS handshake error from 10.129.0.1:55064: EOF 2018/07/26 11:38:10 http: TLS handshake error from 10.129.0.1:55076: EOF 2018/07/26 11:38:20 http: TLS handshake error from 10.129.0.1:55088: EOF 2018/07/26 11:38:30 http: TLS handshake error from 10.129.0.1:55100: EOF level=info timestamp=2018-07-26T11:38:36.455761Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:38:40 http: TLS handshake error from 10.129.0.1:55112: EOF 2018/07/26 11:38:50 http: TLS handshake error from 10.129.0.1:55124: EOF 2018/07/26 11:39:00 http: TLS handshake error from 10.129.0.1:55136: EOF level=info timestamp=2018-07-26T11:39:06.509616Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:39:10 http: TLS handshake error from 10.129.0.1:55150: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:37:14 http: TLS handshake error from 10.129.0.1:38838: EOF 2018/07/26 11:37:24 http: TLS handshake error from 10.129.0.1:38850: EOF 2018/07/26 11:37:34 http: TLS handshake error from 10.129.0.1:38862: EOF level=info timestamp=2018-07-26T11:37:36.297085Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:37:44 http: TLS handshake error from 10.129.0.1:38874: EOF 2018/07/26 11:37:54 http: TLS handshake error from 10.129.0.1:38886: EOF 2018/07/26 11:38:04 http: TLS handshake error from 10.129.0.1:38898: EOF level=info timestamp=2018-07-26T11:38:06.289204Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:38:14 http: TLS handshake error from 10.129.0.1:38910: EOF 2018/07/26 11:38:24 http: TLS handshake error from 10.129.0.1:38922: EOF 2018/07/26 11:38:34 http: TLS handshake error from 10.129.0.1:38934: EOF 2018/07/26 11:38:44 http: TLS handshake error from 10.129.0.1:38946: EOF 2018/07/26 11:38:54 http: TLS handshake error from 10.129.0.1:38958: EOF 2018/07/26 11:39:04 http: TLS handshake error from 10.129.0.1:38970: EOF 2018/07/26 11:39:14 http: TLS handshake error from 10.129.0.1:38984: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:37:10.737646Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7g8hl kind= uid=2965de0c-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:37:10.738219Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7g8hl kind= uid=2965de0c-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:37:41.793639Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9x4bz kind= uid=3be1dec9-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:37:41.794488Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9x4bz kind= uid=3be1dec9-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:37:42.241114Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9x4bz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9x4bz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3be1dec9-90c8-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9x4bz" level=info timestamp=2018-07-26T11:38:12.449169Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv65t kind= uid=4e2e770b-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:38:12.451321Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv65t kind= uid=4e2e770b-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:38:43.307015Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdrdm kind= uid=60958b6b-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:38:43.307912Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdrdm kind= uid=60958b6b-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:38:43.522160Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijdrdm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijdrdm" level=info timestamp=2018-07-26T11:38:43.857772Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijdrdm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmijdrdm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 60958b6b-90c8-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijdrdm" level=info timestamp=2018-07-26T11:39:14.130085Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:39:14.130625Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:39:14.403393Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivbx7q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivbx7q" level=info timestamp=2018-07-26T11:39:14.428469Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivbx7q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivbx7q" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:33:05.374256Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:33:05.374485Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:33:05.374827Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:33:05.376245Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi6wsdw" level=info timestamp=2018-07-26T11:33:05.381151Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.386204Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:33:05.389254Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:33:05.391447Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.392419Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.392629Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.394315Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:33:05.395145Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6wsdw, existing: false\n" level=info timestamp=2018-07-26T11:33:05.395736Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:33:05.395908Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:33:05.396123Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6wsdw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmivbx7q-g6tnd Pod phase: Pending • Failure [30.866 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 New VirtualMachineInstance with all supported drives /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:357 should have all the device nodes [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:380 Expected error: <*errors.StatusError | 0xc420156ea0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:382 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:37:50 http: TLS handshake error from 10.129.0.1:55052: EOF 2018/07/26 11:38:00 http: TLS handshake error from 10.129.0.1:55064: EOF 2018/07/26 11:38:10 http: TLS handshake error from 10.129.0.1:55076: EOF 2018/07/26 11:38:20 http: TLS handshake error from 10.129.0.1:55088: EOF 2018/07/26 11:38:30 http: TLS handshake error from 10.129.0.1:55100: EOF level=info timestamp=2018-07-26T11:38:36.455761Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:38:40 http: TLS handshake error from 10.129.0.1:55112: EOF 2018/07/26 11:38:50 http: TLS handshake error from 10.129.0.1:55124: EOF 2018/07/26 11:39:00 http: TLS handshake error from 10.129.0.1:55136: EOF level=info timestamp=2018-07-26T11:39:06.509616Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:39:10 http: TLS handshake error from 10.129.0.1:55150: EOF 2018/07/26 11:39:20 http: TLS handshake error from 10.129.0.1:55162: EOF 2018/07/26 11:39:30 http: TLS handshake error from 10.129.0.1:55174: EOF level=info timestamp=2018-07-26T11:39:36.410139Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:39:40 http: TLS handshake error from 10.129.0.1:55186: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running level=info timestamp=2018-07-26T11:37:36.297085Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:37:44 http: TLS handshake error from 10.129.0.1:38874: EOF 2018/07/26 11:37:54 http: TLS handshake error from 10.129.0.1:38886: EOF 2018/07/26 11:38:04 http: TLS handshake error from 10.129.0.1:38898: EOF level=info timestamp=2018-07-26T11:38:06.289204Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:38:14 http: TLS handshake error from 10.129.0.1:38910: EOF 2018/07/26 11:38:24 http: TLS handshake error from 10.129.0.1:38922: EOF 2018/07/26 11:38:34 http: TLS handshake error from 10.129.0.1:38934: EOF 2018/07/26 11:38:44 http: TLS handshake error from 10.129.0.1:38946: EOF 2018/07/26 11:38:54 http: TLS handshake error from 10.129.0.1:38958: EOF 2018/07/26 11:39:04 http: TLS handshake error from 10.129.0.1:38970: EOF 2018/07/26 11:39:14 http: TLS handshake error from 10.129.0.1:38984: EOF 2018/07/26 11:39:24 http: TLS handshake error from 10.129.0.1:38996: EOF 2018/07/26 11:39:34 http: TLS handshake error from 10.129.0.1:39008: EOF 2018/07/26 11:39:44 http: TLS handshake error from 10.129.0.1:39020: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:37:41.794488Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9x4bz kind= uid=3be1dec9-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:37:42.241114Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9x4bz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9x4bz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3be1dec9-90c8-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9x4bz" level=info timestamp=2018-07-26T11:38:12.449169Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv65t kind= uid=4e2e770b-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:38:12.451321Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv65t kind= uid=4e2e770b-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:38:43.307015Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdrdm kind= uid=60958b6b-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:38:43.307912Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdrdm kind= uid=60958b6b-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:38:43.522160Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijdrdm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijdrdm" level=info timestamp=2018-07-26T11:38:43.857772Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijdrdm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmijdrdm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 60958b6b-90c8-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijdrdm" level=info timestamp=2018-07-26T11:39:14.130085Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:39:14.130625Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:39:14.403393Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivbx7q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivbx7q" level=info timestamp=2018-07-26T11:39:14.428469Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivbx7q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivbx7q" level=info timestamp=2018-07-26T11:39:44.731625Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:39:44.732486Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:39:45.102001Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilg524\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilg524" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:39:33.816846Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind=Domain uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-26T11:39:33.818442Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:39:33.818627Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmivbx7q, existing: true\n" level=info timestamp=2018-07-26T11:39:33.818724Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T11:39:33.818816Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:39:33.818898Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:39:33.819093Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T11:39:33.823111Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-26T11:39:33.951889Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:39:33.952711Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmivbx7q, existing: true\n" level=info timestamp=2018-07-26T11:39:33.953169Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:39:33.953617Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:39:33.954024Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:39:33.954927Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:39:33.967256Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmilg524-92d57 Pod phase: Pending Pod name: virt-launcher-testvmivbx7q-g6tnd Pod phase: Running level=info timestamp=2018-07-26T11:39:31.960797Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:39:32.981805Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:39:32.998764Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 45ea60bd-5c76-4157-9152-3e102e614b62" level=info timestamp=2018-07-26T11:39:33.010861Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:39:33.025440Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.696699Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:39:33.765839Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:39:33.780929Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:39:33.785633Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:39:33.789828Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.793628Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:39:33.815603Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:39:33.823695Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.961477Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:39:34.017156Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 45ea60bd-5c76-4157-9152-3e102e614b62: 190" • Failure in Spec Setup (BeforeEach) [30.727 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose ClusterIP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:68 Should expose a Cluster IP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:71 Expected error: <*errors.StatusError | 0xc420f35440>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:38:30 http: TLS handshake error from 10.129.0.1:55100: EOF level=info timestamp=2018-07-26T11:38:36.455761Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:38:40 http: TLS handshake error from 10.129.0.1:55112: EOF 2018/07/26 11:38:50 http: TLS handshake error from 10.129.0.1:55124: EOF 2018/07/26 11:39:00 http: TLS handshake error from 10.129.0.1:55136: EOF level=info timestamp=2018-07-26T11:39:06.509616Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:39:10 http: TLS handshake error from 10.129.0.1:55150: EOF 2018/07/26 11:39:20 http: TLS handshake error from 10.129.0.1:55162: EOF 2018/07/26 11:39:30 http: TLS handshake error from 10.129.0.1:55174: EOF level=info timestamp=2018-07-26T11:39:36.410139Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:39:40 http: TLS handshake error from 10.129.0.1:55186: EOF 2018/07/26 11:39:50 http: TLS handshake error from 10.129.0.1:55198: EOF 2018/07/26 11:40:00 http: TLS handshake error from 10.129.0.1:55210: EOF level=info timestamp=2018-07-26T11:40:06.439148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:40:10 http: TLS handshake error from 10.129.0.1:55222: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:38:04 http: TLS handshake error from 10.129.0.1:38898: EOF level=info timestamp=2018-07-26T11:38:06.289204Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:38:14 http: TLS handshake error from 10.129.0.1:38910: EOF 2018/07/26 11:38:24 http: TLS handshake error from 10.129.0.1:38922: EOF 2018/07/26 11:38:34 http: TLS handshake error from 10.129.0.1:38934: EOF 2018/07/26 11:38:44 http: TLS handshake error from 10.129.0.1:38946: EOF 2018/07/26 11:38:54 http: TLS handshake error from 10.129.0.1:38958: EOF 2018/07/26 11:39:04 http: TLS handshake error from 10.129.0.1:38970: EOF 2018/07/26 11:39:14 http: TLS handshake error from 10.129.0.1:38984: EOF 2018/07/26 11:39:24 http: TLS handshake error from 10.129.0.1:38996: EOF 2018/07/26 11:39:34 http: TLS handshake error from 10.129.0.1:39008: EOF 2018/07/26 11:39:44 http: TLS handshake error from 10.129.0.1:39020: EOF 2018/07/26 11:39:54 http: TLS handshake error from 10.129.0.1:39032: EOF 2018/07/26 11:40:04 http: TLS handshake error from 10.129.0.1:39044: EOF 2018/07/26 11:40:14 http: TLS handshake error from 10.129.0.1:39056: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:38:43.307015Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdrdm kind= uid=60958b6b-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:38:43.307912Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdrdm kind= uid=60958b6b-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:38:43.522160Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijdrdm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijdrdm" level=info timestamp=2018-07-26T11:38:43.857772Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijdrdm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmijdrdm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 60958b6b-90c8-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijdrdm" level=info timestamp=2018-07-26T11:39:14.130085Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:39:14.130625Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:39:14.403393Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivbx7q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivbx7q" level=info timestamp=2018-07-26T11:39:14.428469Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivbx7q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivbx7q" level=info timestamp=2018-07-26T11:39:44.731625Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:39:44.732486Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:39:45.102001Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilg524\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilg524" level=info timestamp=2018-07-26T11:40:15.435412Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:40:15.436049Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:40:15.703596Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipxrsb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipxrsb" level=info timestamp=2018-07-26T11:40:15.746555Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipxrsb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipxrsb" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:40:01.770557Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=Domain uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-26T11:40:01.794252Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-26T11:40:01.801148Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:40:01.801778Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilg524, existing: true\n" level=info timestamp=2018-07-26T11:40:01.801865Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T11:40:01.801915Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:40:01.801970Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:40:01.802092Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T11:40:01.857950Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:40:01.861293Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilg524, existing: true\n" level=info timestamp=2018-07-26T11:40:01.866815Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:40:01.868326Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:40:01.869553Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:40:01.870391Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:40:01.880059Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmilg524-92d57 Pod phase: Running level=info timestamp=2018-07-26T11:40:00.535505Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.355095Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:40:01.362824Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 3ae660da-0c69-4461-a4f8-ab71f7060132" level=info timestamp=2018-07-26T11:40:01.363381Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:40:01.366567Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.727530Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.755836Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:01.761866Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:40:01.767215Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:01.771817Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.772044Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.789934Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:01.795131Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.877618Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:02.370297Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 3ae660da-0c69-4461-a4f8-ab71f7060132: 184" Pod name: virt-launcher-testvmipxrsb-pw9vl Pod phase: Pending Pod name: virt-launcher-testvmivbx7q-g6tnd Pod phase: Running level=info timestamp=2018-07-26T11:39:31.960797Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:39:32.981805Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:39:32.998764Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 45ea60bd-5c76-4157-9152-3e102e614b62" level=info timestamp=2018-07-26T11:39:33.010861Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:39:33.025440Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.696699Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:39:33.765839Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:39:33.780929Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:39:33.785633Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:39:33.789828Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.793628Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:39:33.815603Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:39:33.823695Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.961477Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:39:34.017156Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 45ea60bd-5c76-4157-9152-3e102e614b62: 190" • Failure in Spec Setup (BeforeEach) [30.988 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose ClusterIP service with string target-port [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:98 Should expose a ClusterIP service and connect to the vm on port 80 /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:101 Expected error: <*errors.StatusError | 0xc420156630>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:38:50 http: TLS handshake error from 10.129.0.1:55124: EOF 2018/07/26 11:39:00 http: TLS handshake error from 10.129.0.1:55136: EOF level=info timestamp=2018-07-26T11:39:06.509616Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:39:10 http: TLS handshake error from 10.129.0.1:55150: EOF 2018/07/26 11:39:20 http: TLS handshake error from 10.129.0.1:55162: EOF 2018/07/26 11:39:30 http: TLS handshake error from 10.129.0.1:55174: EOF level=info timestamp=2018-07-26T11:39:36.410139Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:39:40 http: TLS handshake error from 10.129.0.1:55186: EOF 2018/07/26 11:39:50 http: TLS handshake error from 10.129.0.1:55198: EOF 2018/07/26 11:40:00 http: TLS handshake error from 10.129.0.1:55210: EOF level=info timestamp=2018-07-26T11:40:06.439148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:40:10 http: TLS handshake error from 10.129.0.1:55222: EOF 2018/07/26 11:40:20 http: TLS handshake error from 10.129.0.1:55234: EOF 2018/07/26 11:40:30 http: TLS handshake error from 10.129.0.1:55246: EOF 2018/07/26 11:40:40 http: TLS handshake error from 10.129.0.1:55258: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:38:34 http: TLS handshake error from 10.129.0.1:38934: EOF 2018/07/26 11:38:44 http: TLS handshake error from 10.129.0.1:38946: EOF 2018/07/26 11:38:54 http: TLS handshake error from 10.129.0.1:38958: EOF 2018/07/26 11:39:04 http: TLS handshake error from 10.129.0.1:38970: EOF 2018/07/26 11:39:14 http: TLS handshake error from 10.129.0.1:38984: EOF 2018/07/26 11:39:24 http: TLS handshake error from 10.129.0.1:38996: EOF 2018/07/26 11:39:34 http: TLS handshake error from 10.129.0.1:39008: EOF 2018/07/26 11:39:44 http: TLS handshake error from 10.129.0.1:39020: EOF 2018/07/26 11:39:54 http: TLS handshake error from 10.129.0.1:39032: EOF 2018/07/26 11:40:04 http: TLS handshake error from 10.129.0.1:39044: EOF 2018/07/26 11:40:14 http: TLS handshake error from 10.129.0.1:39056: EOF 2018/07/26 11:40:24 http: TLS handshake error from 10.129.0.1:39068: EOF 2018/07/26 11:40:34 http: TLS handshake error from 10.129.0.1:39080: EOF level=info timestamp=2018-07-26T11:40:36.574737Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:40:44 http: TLS handshake error from 10.129.0.1:39092: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:38:43.522160Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijdrdm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijdrdm" level=info timestamp=2018-07-26T11:38:43.857772Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijdrdm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmijdrdm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 60958b6b-90c8-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijdrdm" level=info timestamp=2018-07-26T11:39:14.130085Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:39:14.130625Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:39:14.403393Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivbx7q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivbx7q" level=info timestamp=2018-07-26T11:39:14.428469Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivbx7q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivbx7q" level=info timestamp=2018-07-26T11:39:44.731625Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:39:44.732486Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:39:45.102001Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilg524\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilg524" level=info timestamp=2018-07-26T11:40:15.435412Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:40:15.436049Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:40:15.703596Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipxrsb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipxrsb" level=info timestamp=2018-07-26T11:40:15.746555Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipxrsb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipxrsb" level=info timestamp=2018-07-26T11:40:46.417423Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:40:46.418947Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:15:04.044267Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:15:04.045046Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:15:04.045323Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:15:04.047472Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmikrbfhjwglr" level=info timestamp=2018-07-26T11:15:04.058512Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:15:04.059660Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:15:04.060092Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.060493Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.061615Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.062034Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.062892Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:15:04.063359Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikrbfhjwglr, existing: false\n" level=info timestamp=2018-07-26T11:15:04.063624Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:15:04.064122Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:15:04.064668Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmikrbfhjwglr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:40:33.179893Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind=Domain uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-26T11:40:33.323748Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:40:33.324026Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipxrsb, existing: true\n" level=info timestamp=2018-07-26T11:40:33.324084Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T11:40:33.324153Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:40:33.324219Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:40:33.324429Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T11:40:33.426508Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-26T11:40:33.720369Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:40:33.737918Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipxrsb, existing: true\n" level=info timestamp=2018-07-26T11:40:33.753080Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:40:33.753244Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:40:33.753299Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:40:33.753489Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:40:33.786129Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi5pfwp-2ns9n Pod phase: Pending Pod name: virt-launcher-testvmilg524-92d57 Pod phase: Running level=info timestamp=2018-07-26T11:40:00.535505Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.355095Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:40:01.362824Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 3ae660da-0c69-4461-a4f8-ab71f7060132" level=info timestamp=2018-07-26T11:40:01.363381Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:40:01.366567Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.727530Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.755836Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:01.761866Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:40:01.767215Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:01.771817Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.772044Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.789934Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:01.795131Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.877618Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:02.370297Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 3ae660da-0c69-4461-a4f8-ab71f7060132: 184" Pod name: virt-launcher-testvmipxrsb-pw9vl Pod phase: Running level=info timestamp=2018-07-26T11:40:32.270247Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:40:32.793395Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 9a9e14a0-7754-42d1-8435-dc14431f9628" level=info timestamp=2018-07-26T11:40:32.793431Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:40:32.793976Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:40:32.803194Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.088647Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:40:33.138445Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:33.181785Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.184940Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:40:33.214229Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:40:33.234843Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:33.250124Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:33.547460Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.780525Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:33.805240Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 9a9e14a0-7754-42d1-8435-dc14431f9628: 184" Pod name: virt-launcher-testvmivbx7q-g6tnd Pod phase: Running level=info timestamp=2018-07-26T11:39:31.960797Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:39:32.981805Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:39:32.998764Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 45ea60bd-5c76-4157-9152-3e102e614b62" level=info timestamp=2018-07-26T11:39:33.010861Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:39:33.025440Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.696699Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:39:33.765839Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:39:33.780929Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:39:33.785633Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:39:33.789828Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.793628Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:39:33.815603Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:39:33.823695Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.961477Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:39:34.017156Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 45ea60bd-5c76-4157-9152-3e102e614b62: 190" • Failure in Spec Setup (BeforeEach) [30.915 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose NodePort service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:124 Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:129 Expected error: <*errors.StatusError | 0xc420452750>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:39:20 http: TLS handshake error from 10.129.0.1:55162: EOF 2018/07/26 11:39:30 http: TLS handshake error from 10.129.0.1:55174: EOF level=info timestamp=2018-07-26T11:39:36.410139Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:39:40 http: TLS handshake error from 10.129.0.1:55186: EOF 2018/07/26 11:39:50 http: TLS handshake error from 10.129.0.1:55198: EOF 2018/07/26 11:40:00 http: TLS handshake error from 10.129.0.1:55210: EOF level=info timestamp=2018-07-26T11:40:06.439148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:40:10 http: TLS handshake error from 10.129.0.1:55222: EOF 2018/07/26 11:40:20 http: TLS handshake error from 10.129.0.1:55234: EOF 2018/07/26 11:40:30 http: TLS handshake error from 10.129.0.1:55246: EOF 2018/07/26 11:40:40 http: TLS handshake error from 10.129.0.1:55258: EOF 2018/07/26 11:40:50 http: TLS handshake error from 10.129.0.1:55270: EOF 2018/07/26 11:41:00 http: TLS handshake error from 10.129.0.1:55282: EOF level=info timestamp=2018-07-26T11:41:06.281244Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:41:10 http: TLS handshake error from 10.129.0.1:55294: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:39:04 http: TLS handshake error from 10.129.0.1:38970: EOF 2018/07/26 11:39:14 http: TLS handshake error from 10.129.0.1:38984: EOF 2018/07/26 11:39:24 http: TLS handshake error from 10.129.0.1:38996: EOF 2018/07/26 11:39:34 http: TLS handshake error from 10.129.0.1:39008: EOF 2018/07/26 11:39:44 http: TLS handshake error from 10.129.0.1:39020: EOF 2018/07/26 11:39:54 http: TLS handshake error from 10.129.0.1:39032: EOF 2018/07/26 11:40:04 http: TLS handshake error from 10.129.0.1:39044: EOF 2018/07/26 11:40:14 http: TLS handshake error from 10.129.0.1:39056: EOF 2018/07/26 11:40:24 http: TLS handshake error from 10.129.0.1:39068: EOF 2018/07/26 11:40:34 http: TLS handshake error from 10.129.0.1:39080: EOF level=info timestamp=2018-07-26T11:40:36.574737Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:40:44 http: TLS handshake error from 10.129.0.1:39092: EOF 2018/07/26 11:40:54 http: TLS handshake error from 10.129.0.1:39104: EOF 2018/07/26 11:41:04 http: TLS handshake error from 10.129.0.1:39116: EOF 2018/07/26 11:41:14 http: TLS handshake error from 10.129.0.1:39128: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:39:14.130625Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:39:14.403393Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivbx7q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivbx7q" level=info timestamp=2018-07-26T11:39:14.428469Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivbx7q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivbx7q" level=info timestamp=2018-07-26T11:39:44.731625Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:39:44.732486Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:39:45.102001Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilg524\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilg524" level=info timestamp=2018-07-26T11:40:15.435412Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:40:15.436049Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:40:15.703596Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipxrsb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipxrsb" level=info timestamp=2018-07-26T11:40:15.746555Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipxrsb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipxrsb" level=info timestamp=2018-07-26T11:40:46.417423Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:40:46.418947Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:41:17.394453Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:41:17.395417Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:41:17.731989Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicsvmn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicsvmn" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:41:02.830249Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="No update processing required" level=error timestamp=2018-07-26T11:41:02.846379Z pos=vm.go:431 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5pfwp\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-26T11:41:02.846575Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5pfwp\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5pfwp" level=info timestamp=2018-07-26T11:41:02.848125Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi5pfwp, existing: true\n" level=info timestamp=2018-07-26T11:41:02.848457Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:41:02.848583Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:41:02.849098Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:41:02.849808Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:41:02.857393Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:41:02.869683Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi5pfwp, existing: true\n" level=info timestamp=2018-07-26T11:41:02.869854Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:41:02.869947Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:41:02.870044Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:41:02.870446Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:41:02.881507Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:40:33.179893Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind=Domain uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-26T11:40:33.323748Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:40:33.324026Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipxrsb, existing: true\n" level=info timestamp=2018-07-26T11:40:33.324084Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T11:40:33.324153Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:40:33.324219Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:40:33.324429Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T11:40:33.426508Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-26T11:40:33.720369Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:40:33.737918Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipxrsb, existing: true\n" level=info timestamp=2018-07-26T11:40:33.753080Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:40:33.753244Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:40:33.753299Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:40:33.753489Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:40:33.786129Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi5pfwp-2ns9n Pod phase: Running level=info timestamp=2018-07-26T11:41:01.900295Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:41:01.907000Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 3f2154f4-d6b6-470b-a6ba-78c21602f202" level=info timestamp=2018-07-26T11:41:01.907447Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:41:01.933250Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:02.696873Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:41:02.730319Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:41:02.739736Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:41:02.741825Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:02.742382Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:02.742694Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:41:02.754473Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:41:02.770694Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:02.855912Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:02.877490Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:02.912262Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 3f2154f4-d6b6-470b-a6ba-78c21602f202: 188" Pod name: virt-launcher-testvmicsvmn-lt6ql Pod phase: Pending Pod name: virt-launcher-testvmilg524-92d57 Pod phase: Running level=info timestamp=2018-07-26T11:40:00.535505Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.355095Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:40:01.362824Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 3ae660da-0c69-4461-a4f8-ab71f7060132" level=info timestamp=2018-07-26T11:40:01.363381Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:40:01.366567Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.727530Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.755836Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:01.761866Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:40:01.767215Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:01.771817Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.772044Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.789934Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:01.795131Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.877618Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:02.370297Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 3ae660da-0c69-4461-a4f8-ab71f7060132: 184" Pod name: virt-launcher-testvmipxrsb-pw9vl Pod phase: Running level=info timestamp=2018-07-26T11:40:32.270247Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:40:32.793395Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 9a9e14a0-7754-42d1-8435-dc14431f9628" level=info timestamp=2018-07-26T11:40:32.793431Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:40:32.793976Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:40:32.803194Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.088647Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:40:33.138445Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:33.181785Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.184940Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:40:33.214229Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:40:33.234843Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:33.250124Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:33.547460Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.780525Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:33.805240Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 9a9e14a0-7754-42d1-8435-dc14431f9628: 184" Pod name: virt-launcher-testvmivbx7q-g6tnd Pod phase: Running level=info timestamp=2018-07-26T11:39:31.960797Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:39:32.981805Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:39:32.998764Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 45ea60bd-5c76-4157-9152-3e102e614b62" level=info timestamp=2018-07-26T11:39:33.010861Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:39:33.025440Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.696699Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:39:33.765839Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:39:33.780929Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:39:33.785633Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:39:33.789828Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.793628Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:39:33.815603Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:39:33.823695Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.961477Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:39:34.017156Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 45ea60bd-5c76-4157-9152-3e102e614b62: 190" • Failure in Spec Setup (BeforeEach) [31.092 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose UDP service on a VMI /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:166 Expose ClusterIP UDP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:173 Should expose a ClusterIP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:177 Expected error: <*errors.StatusError | 0xc420157b90>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:39:50 http: TLS handshake error from 10.129.0.1:55198: EOF 2018/07/26 11:40:00 http: TLS handshake error from 10.129.0.1:55210: EOF level=info timestamp=2018-07-26T11:40:06.439148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:40:10 http: TLS handshake error from 10.129.0.1:55222: EOF 2018/07/26 11:40:20 http: TLS handshake error from 10.129.0.1:55234: EOF 2018/07/26 11:40:30 http: TLS handshake error from 10.129.0.1:55246: EOF 2018/07/26 11:40:40 http: TLS handshake error from 10.129.0.1:55258: EOF 2018/07/26 11:40:50 http: TLS handshake error from 10.129.0.1:55270: EOF 2018/07/26 11:41:00 http: TLS handshake error from 10.129.0.1:55282: EOF level=info timestamp=2018-07-26T11:41:06.281244Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:41:10 http: TLS handshake error from 10.129.0.1:55294: EOF 2018/07/26 11:41:20 http: TLS handshake error from 10.129.0.1:55306: EOF 2018/07/26 11:41:30 http: TLS handshake error from 10.129.0.1:55318: EOF level=info timestamp=2018-07-26T11:41:36.379919Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:41:40 http: TLS handshake error from 10.129.0.1:55330: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:39:34 http: TLS handshake error from 10.129.0.1:39008: EOF 2018/07/26 11:39:44 http: TLS handshake error from 10.129.0.1:39020: EOF 2018/07/26 11:39:54 http: TLS handshake error from 10.129.0.1:39032: EOF 2018/07/26 11:40:04 http: TLS handshake error from 10.129.0.1:39044: EOF 2018/07/26 11:40:14 http: TLS handshake error from 10.129.0.1:39056: EOF 2018/07/26 11:40:24 http: TLS handshake error from 10.129.0.1:39068: EOF 2018/07/26 11:40:34 http: TLS handshake error from 10.129.0.1:39080: EOF level=info timestamp=2018-07-26T11:40:36.574737Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:40:44 http: TLS handshake error from 10.129.0.1:39092: EOF 2018/07/26 11:40:54 http: TLS handshake error from 10.129.0.1:39104: EOF 2018/07/26 11:41:04 http: TLS handshake error from 10.129.0.1:39116: EOF 2018/07/26 11:41:14 http: TLS handshake error from 10.129.0.1:39128: EOF 2018/07/26 11:41:24 http: TLS handshake error from 10.129.0.1:39140: EOF 2018/07/26 11:41:34 http: TLS handshake error from 10.129.0.1:39152: EOF 2018/07/26 11:41:44 http: TLS handshake error from 10.129.0.1:39164: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:39:14.428469Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivbx7q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivbx7q" level=info timestamp=2018-07-26T11:39:44.731625Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:39:44.732486Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:39:45.102001Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilg524\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilg524" level=info timestamp=2018-07-26T11:40:15.435412Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:40:15.436049Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:40:15.703596Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipxrsb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipxrsb" level=info timestamp=2018-07-26T11:40:15.746555Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipxrsb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipxrsb" level=info timestamp=2018-07-26T11:40:46.417423Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:40:46.418947Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:41:17.394453Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:41:17.395417Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:41:17.731989Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicsvmn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicsvmn" level=info timestamp=2018-07-26T11:41:48.440498Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:41:48.441622Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:41:02.830249Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="No update processing required" level=error timestamp=2018-07-26T11:41:02.846379Z pos=vm.go:431 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5pfwp\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-26T11:41:02.846575Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5pfwp\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5pfwp" level=info timestamp=2018-07-26T11:41:02.848125Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi5pfwp, existing: true\n" level=info timestamp=2018-07-26T11:41:02.848457Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:41:02.848583Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:41:02.849098Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:41:02.849808Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:41:02.857393Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:41:02.869683Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi5pfwp, existing: true\n" level=info timestamp=2018-07-26T11:41:02.869854Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:41:02.869947Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:41:02.870044Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:41:02.870446Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:41:02.881507Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:41:34.235019Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind=Domain uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-26T11:41:34.276873Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-26T11:41:34.285054Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:41:34.285210Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicsvmn, existing: true\n" level=info timestamp=2018-07-26T11:41:34.285258Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T11:41:34.285286Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:41:34.285328Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:41:34.285411Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T11:41:34.398870Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:41:34.401545Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicsvmn, existing: true\n" level=info timestamp=2018-07-26T11:41:34.402074Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:41:34.402485Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:41:34.402994Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:41:34.407051Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:41:34.413454Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi5pfwp-2ns9n Pod phase: Running level=info timestamp=2018-07-26T11:41:01.900295Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:41:01.907000Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 3f2154f4-d6b6-470b-a6ba-78c21602f202" level=info timestamp=2018-07-26T11:41:01.907447Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:41:01.933250Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:02.696873Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:41:02.730319Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:41:02.739736Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:41:02.741825Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:02.742382Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:02.742694Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:41:02.754473Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:41:02.770694Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:02.855912Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:02.877490Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:02.912262Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 3f2154f4-d6b6-470b-a6ba-78c21602f202: 188" Pod name: virt-launcher-testvmibz2z5-kcvnn Pod phase: Pending Pod name: virt-launcher-testvmicsvmn-lt6ql Pod phase: Running level=info timestamp=2018-07-26T11:41:32.987445Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:41:33.981937Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:41:33.987729Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID ddcf5427-726c-4b1b-a2da-7e024708597b" level=info timestamp=2018-07-26T11:41:33.988179Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:41:33.990406Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:34.205604Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:41:34.233100Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:41:34.235351Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:34.246106Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:41:34.261981Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:41:34.264451Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:34.271456Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:41:34.277818Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:34.412014Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:35.004003Z pos=monitor.go:222 component=virt-launcher msg="Found PID for ddcf5427-726c-4b1b-a2da-7e024708597b: 188" Pod name: virt-launcher-testvmilg524-92d57 Pod phase: Running level=info timestamp=2018-07-26T11:40:00.535505Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.355095Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:40:01.362824Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 3ae660da-0c69-4461-a4f8-ab71f7060132" level=info timestamp=2018-07-26T11:40:01.363381Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:40:01.366567Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.727530Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.755836Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:01.761866Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:40:01.767215Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:01.771817Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.772044Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.789934Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:01.795131Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.877618Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:02.370297Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 3ae660da-0c69-4461-a4f8-ab71f7060132: 184" Pod name: virt-launcher-testvmipxrsb-pw9vl Pod phase: Running level=info timestamp=2018-07-26T11:40:32.270247Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:40:32.793395Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 9a9e14a0-7754-42d1-8435-dc14431f9628" level=info timestamp=2018-07-26T11:40:32.793431Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:40:32.793976Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:40:32.803194Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.088647Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:40:33.138445Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:33.181785Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.184940Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:40:33.214229Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:40:33.234843Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:33.250124Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:33.547460Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.780525Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:33.805240Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 9a9e14a0-7754-42d1-8435-dc14431f9628: 184" Pod name: virt-launcher-testvmivbx7q-g6tnd Pod phase: Running level=info timestamp=2018-07-26T11:39:31.960797Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:39:32.981805Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:39:32.998764Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 45ea60bd-5c76-4157-9152-3e102e614b62" level=info timestamp=2018-07-26T11:39:33.010861Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:39:33.025440Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.696699Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:39:33.765839Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:39:33.780929Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:39:33.785633Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:39:33.789828Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.793628Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:39:33.815603Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:39:33.823695Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.961477Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:39:34.017156Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 45ea60bd-5c76-4157-9152-3e102e614b62: 190" • Failure in Spec Setup (BeforeEach) [31.515 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose UDP service on a VMI /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:166 Expose NodePort UDP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:205 Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:210 Expected error: <*errors.StatusError | 0xc420453b00>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:40:20 http: TLS handshake error from 10.129.0.1:55234: EOF 2018/07/26 11:40:30 http: TLS handshake error from 10.129.0.1:55246: EOF 2018/07/26 11:40:40 http: TLS handshake error from 10.129.0.1:55258: EOF 2018/07/26 11:40:50 http: TLS handshake error from 10.129.0.1:55270: EOF 2018/07/26 11:41:00 http: TLS handshake error from 10.129.0.1:55282: EOF level=info timestamp=2018-07-26T11:41:06.281244Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:41:10 http: TLS handshake error from 10.129.0.1:55294: EOF 2018/07/26 11:41:20 http: TLS handshake error from 10.129.0.1:55306: EOF 2018/07/26 11:41:30 http: TLS handshake error from 10.129.0.1:55318: EOF level=info timestamp=2018-07-26T11:41:36.379919Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:41:40 http: TLS handshake error from 10.129.0.1:55330: EOF 2018/07/26 11:41:50 http: TLS handshake error from 10.129.0.1:55342: EOF 2018/07/26 11:42:00 http: TLS handshake error from 10.129.0.1:55354: EOF 2018/07/26 11:42:10 http: TLS handshake error from 10.129.0.1:55366: EOF level=info timestamp=2018-07-26T11:42:12.878915Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:40:14 http: TLS handshake error from 10.129.0.1:39056: EOF 2018/07/26 11:40:24 http: TLS handshake error from 10.129.0.1:39068: EOF 2018/07/26 11:40:34 http: TLS handshake error from 10.129.0.1:39080: EOF level=info timestamp=2018-07-26T11:40:36.574737Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:40:44 http: TLS handshake error from 10.129.0.1:39092: EOF 2018/07/26 11:40:54 http: TLS handshake error from 10.129.0.1:39104: EOF 2018/07/26 11:41:04 http: TLS handshake error from 10.129.0.1:39116: EOF 2018/07/26 11:41:14 http: TLS handshake error from 10.129.0.1:39128: EOF 2018/07/26 11:41:24 http: TLS handshake error from 10.129.0.1:39140: EOF 2018/07/26 11:41:34 http: TLS handshake error from 10.129.0.1:39152: EOF 2018/07/26 11:41:44 http: TLS handshake error from 10.129.0.1:39164: EOF 2018/07/26 11:41:54 http: TLS handshake error from 10.129.0.1:39176: EOF 2018/07/26 11:42:04 http: TLS handshake error from 10.129.0.1:39188: EOF level=info timestamp=2018-07-26T11:42:06.453188Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:42:14 http: TLS handshake error from 10.129.0.1:39200: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:39:14.428469Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivbx7q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivbx7q" level=info timestamp=2018-07-26T11:39:44.731625Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:39:44.732486Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:39:45.102001Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilg524\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilg524" level=info timestamp=2018-07-26T11:40:15.435412Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:40:15.436049Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:40:15.703596Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipxrsb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipxrsb" level=info timestamp=2018-07-26T11:40:15.746555Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipxrsb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipxrsb" level=info timestamp=2018-07-26T11:40:46.417423Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:40:46.418947Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:41:17.394453Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:41:17.395417Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:41:17.731989Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicsvmn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicsvmn" level=info timestamp=2018-07-26T11:41:48.440498Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:41:48.441622Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:42:06.614415Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=Domain uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-26T11:42:06.703481Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-26T11:42:06.711951Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:06.713831Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibz2z5, existing: true\n" level=info timestamp=2018-07-26T11:42:06.714325Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T11:42:06.714732Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:42:06.749197Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:42:06.749351Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T11:42:06.929461Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:06.929938Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibz2z5, existing: true\n" level=info timestamp=2018-07-26T11:42:06.930192Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:42:06.930435Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:42:06.930908Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:42:06.931239Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:42:06.936474Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:41:34.235019Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind=Domain uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-26T11:41:34.276873Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-26T11:41:34.285054Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:41:34.285210Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicsvmn, existing: true\n" level=info timestamp=2018-07-26T11:41:34.285258Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T11:41:34.285286Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:41:34.285328Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:41:34.285411Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T11:41:34.398870Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:41:34.401545Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicsvmn, existing: true\n" level=info timestamp=2018-07-26T11:41:34.402074Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:41:34.402485Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:41:34.402994Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:41:34.407051Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:41:34.413454Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi5pfwp-2ns9n Pod phase: Running level=info timestamp=2018-07-26T11:41:01.900295Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:41:01.907000Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 3f2154f4-d6b6-470b-a6ba-78c21602f202" level=info timestamp=2018-07-26T11:41:01.907447Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:41:01.933250Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:02.696873Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:41:02.730319Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:41:02.739736Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:41:02.741825Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:02.742382Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:02.742694Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:41:02.754473Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:41:02.770694Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:02.855912Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:02.877490Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:02.912262Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 3f2154f4-d6b6-470b-a6ba-78c21602f202: 188" Pod name: virt-launcher-testvmibz2z5-kcvnn Pod phase: Running level=info timestamp=2018-07-26T11:42:05.244845Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Domain defined." level=info timestamp=2018-07-26T11:42:05.731821Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:42:05.750562Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:42:06.230221Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 78a2f1d1-ed9b-4ee9-bc57-172d9589ee7f" level=info timestamp=2018-07-26T11:42:06.231115Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:42:06.588351Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:42:06.607522Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:42:06.614303Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:42:06.631200Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:42:06.648858Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:42:06.650708Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:42:06.667255Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:42:06.705332Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:42:06.935742Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:42:07.237664Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 78a2f1d1-ed9b-4ee9-bc57-172d9589ee7f: 193" Pod name: virt-launcher-testvmicsvmn-lt6ql Pod phase: Running level=info timestamp=2018-07-26T11:41:32.987445Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:41:33.981937Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:41:33.987729Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID ddcf5427-726c-4b1b-a2da-7e024708597b" level=info timestamp=2018-07-26T11:41:33.988179Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:41:33.990406Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:34.205604Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:41:34.233100Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:41:34.235351Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:34.246106Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:41:34.261981Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:41:34.264451Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:34.271456Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:41:34.277818Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:34.412014Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:35.004003Z pos=monitor.go:222 component=virt-launcher msg="Found PID for ddcf5427-726c-4b1b-a2da-7e024708597b: 188" Pod name: virt-launcher-testvmilg524-92d57 Pod phase: Running level=info timestamp=2018-07-26T11:40:00.535505Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.355095Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:40:01.362824Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 3ae660da-0c69-4461-a4f8-ab71f7060132" level=info timestamp=2018-07-26T11:40:01.363381Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:40:01.366567Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.727530Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.755836Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:01.761866Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:40:01.767215Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:01.771817Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.772044Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.789934Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:01.795131Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.877618Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:02.370297Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 3ae660da-0c69-4461-a4f8-ab71f7060132: 184" Pod name: virt-launcher-testvmipxrsb-pw9vl Pod phase: Running level=info timestamp=2018-07-26T11:40:32.270247Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:40:32.793395Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 9a9e14a0-7754-42d1-8435-dc14431f9628" level=info timestamp=2018-07-26T11:40:32.793431Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:40:32.793976Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:40:32.803194Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.088647Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:40:33.138445Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:33.181785Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.184940Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:40:33.214229Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:40:33.234843Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:33.250124Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:33.547460Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.780525Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:33.805240Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 9a9e14a0-7754-42d1-8435-dc14431f9628: 184" Pod name: virt-launcher-testvmivbx7q-g6tnd Pod phase: Running level=info timestamp=2018-07-26T11:39:31.960797Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:39:32.981805Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:39:32.998764Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 45ea60bd-5c76-4157-9152-3e102e614b62" level=info timestamp=2018-07-26T11:39:33.010861Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:39:33.025440Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.696699Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:39:33.765839Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:39:33.780929Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:39:33.785633Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:39:33.789828Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.793628Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:39:33.815603Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:39:33.823695Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.961477Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:39:34.017156Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 45ea60bd-5c76-4157-9152-3e102e614b62: 190" • Failure in Spec Setup (BeforeEach) [31.548 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VMI replica set /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:253 Expose ClusterIP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:286 Should create a ClusterIP service on VMRS and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:290 Expected error: <*errors.StatusError | 0xc4201566c0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:264 ------------------------------ STEP: Creating a VMRS object with 2 replicas STEP: Start the replica set Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running level=info timestamp=2018-07-26T11:42:20.556744Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:42:20.614109Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:42:23.305831Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:42:30 http: TLS handshake error from 10.129.0.1:55390: EOF level=info timestamp=2018-07-26T11:42:33.396626Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:42:36.397227Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T11:42:38.077897Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:42:38.105940Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:42:38.116432Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:42:40 http: TLS handshake error from 10.129.0.1:55404: EOF level=info timestamp=2018-07-26T11:42:42.263143Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:42:42.329911Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:42:42.341595Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:42:43.470135Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:42:50 http: TLS handshake error from 10.129.0.1:55416: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running level=info timestamp=2018-07-26T11:40:36.574737Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:40:44 http: TLS handshake error from 10.129.0.1:39092: EOF 2018/07/26 11:40:54 http: TLS handshake error from 10.129.0.1:39104: EOF 2018/07/26 11:41:04 http: TLS handshake error from 10.129.0.1:39116: EOF 2018/07/26 11:41:14 http: TLS handshake error from 10.129.0.1:39128: EOF 2018/07/26 11:41:24 http: TLS handshake error from 10.129.0.1:39140: EOF 2018/07/26 11:41:34 http: TLS handshake error from 10.129.0.1:39152: EOF 2018/07/26 11:41:44 http: TLS handshake error from 10.129.0.1:39164: EOF 2018/07/26 11:41:54 http: TLS handshake error from 10.129.0.1:39176: EOF 2018/07/26 11:42:04 http: TLS handshake error from 10.129.0.1:39188: EOF level=info timestamp=2018-07-26T11:42:06.453188Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:42:14 http: TLS handshake error from 10.129.0.1:39200: EOF 2018/07/26 11:42:24 http: TLS handshake error from 10.129.0.1:39212: EOF 2018/07/26 11:42:34 http: TLS handshake error from 10.129.0.1:39224: EOF 2018/07/26 11:42:44 http: TLS handshake error from 10.129.0.1:39238: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:41:48.440498Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:41:48.441622Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=error timestamp=2018-07-26T11:42:50.006018Z pos=replicaset.go:225 component=virt-controller service=http namespace=kubevirt-test-default name=replicaset2ljlj kind= uid=e1b4a8ac-90c8-11e8-90ce-525500d15501 msg="Scaling the replicaset failed." level=info timestamp=2018-07-26T11:42:50.066012Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpckw64n kind= uid=f3a098c5-90c8-11e8-90ce-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=info timestamp=2018-07-26T11:42:50.066288Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpckw64n kind= uid=f3a098c5-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=error timestamp=2018-07-26T11:42:50.066534Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpckw64n kind= uid=f3a098c5-90c8-11e8-90ce-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmiwvqpckw64n" level=info timestamp=2018-07-26T11:42:50.066736Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpckw64n kind= uid=f3a098c5-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:42:50.093569Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-26T11:42:50.093809Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmiwvqpccs246" level=info timestamp=2018-07-26T11:42:50.094029Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:42:50.094289Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:42:50.482518Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwvqpccs246\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwvqpccs246" level=info timestamp=2018-07-26T11:42:51.584583Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirq47g kind= uid=f48aa12f-90c8-11e8-90ce-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T11:42:51.584989Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirq47g kind= uid=f48aa12f-90c8-11e8-90ce-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-26T11:42:51.585099Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:42:06.614415Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=Domain uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-26T11:42:06.703481Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-26T11:42:06.711951Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:06.713831Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibz2z5, existing: true\n" level=info timestamp=2018-07-26T11:42:06.714325Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T11:42:06.714732Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:42:06.749197Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:42:06.749351Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T11:42:06.929461Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:06.929938Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibz2z5, existing: true\n" level=info timestamp=2018-07-26T11:42:06.930192Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:42:06.930435Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:42:06.930908Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:42:06.931239Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:42:06.936474Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:41:34.235019Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind=Domain uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-26T11:41:34.276873Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-26T11:41:34.285054Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:41:34.285210Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicsvmn, existing: true\n" level=info timestamp=2018-07-26T11:41:34.285258Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T11:41:34.285286Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:41:34.285328Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:41:34.285411Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T11:41:34.398870Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:41:34.401545Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicsvmn, existing: true\n" level=info timestamp=2018-07-26T11:41:34.402074Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:41:34.402485Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:41:34.402994Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:41:34.407051Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:41:34.413454Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi5pfwp-2ns9n Pod phase: Running level=info timestamp=2018-07-26T11:41:01.900295Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:41:01.907000Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 3f2154f4-d6b6-470b-a6ba-78c21602f202" level=info timestamp=2018-07-26T11:41:01.907447Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:41:01.933250Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:02.696873Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:41:02.730319Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:41:02.739736Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:41:02.741825Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:02.742382Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:02.742694Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:41:02.754473Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:41:02.770694Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:02.855912Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:02.877490Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5pfwp kind= uid=a9f5c861-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:02.912262Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 3f2154f4-d6b6-470b-a6ba-78c21602f202: 188" Pod name: virt-launcher-testvmibz2z5-kcvnn Pod phase: Running level=info timestamp=2018-07-26T11:42:05.244845Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Domain defined." level=info timestamp=2018-07-26T11:42:05.731821Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:42:05.750562Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:42:06.230221Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 78a2f1d1-ed9b-4ee9-bc57-172d9589ee7f" level=info timestamp=2018-07-26T11:42:06.231115Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:42:06.588351Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:42:06.607522Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:42:06.614303Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:42:06.631200Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:42:06.648858Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:42:06.650708Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:42:06.667255Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:42:06.705332Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:42:06.935742Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibz2z5 kind= uid=ceeb4629-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:42:07.237664Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 78a2f1d1-ed9b-4ee9-bc57-172d9589ee7f: 193" Pod name: virt-launcher-testvmicsvmn-lt6ql Pod phase: Running level=info timestamp=2018-07-26T11:41:32.987445Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:41:33.981937Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:41:33.987729Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID ddcf5427-726c-4b1b-a2da-7e024708597b" level=info timestamp=2018-07-26T11:41:33.988179Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:41:33.990406Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:34.205604Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:41:34.233100Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:41:34.235351Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:34.246106Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:41:34.261981Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:41:34.264451Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:34.271456Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:41:34.277818Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:41:34.412014Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicsvmn kind= uid=bc63a9d3-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:41:35.004003Z pos=monitor.go:222 component=virt-launcher msg="Found PID for ddcf5427-726c-4b1b-a2da-7e024708597b: 188" Pod name: virt-launcher-testvmilg524-92d57 Pod phase: Running level=info timestamp=2018-07-26T11:40:00.535505Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.355095Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:40:01.362824Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 3ae660da-0c69-4461-a4f8-ab71f7060132" level=info timestamp=2018-07-26T11:40:01.363381Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:40:01.366567Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.727530Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.755836Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:01.761866Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:40:01.767215Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:01.771817Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.772044Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:40:01.789934Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:01.795131Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:01.877618Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmilg524 kind= uid=852bcef8-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:02.370297Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 3ae660da-0c69-4461-a4f8-ab71f7060132: 184" Pod name: virt-launcher-testvmipxrsb-pw9vl Pod phase: Running level=info timestamp=2018-07-26T11:40:32.270247Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:40:32.793395Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 9a9e14a0-7754-42d1-8435-dc14431f9628" level=info timestamp=2018-07-26T11:40:32.793431Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:40:32.793976Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:40:32.803194Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.088647Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:40:33.138445Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:33.181785Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.184940Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:40:33.214229Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:40:33.234843Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:33.250124Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:40:33.547460Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:40:33.780525Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipxrsb kind= uid=977c9505-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:40:33.805240Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 9a9e14a0-7754-42d1-8435-dc14431f9628: 184" Pod name: virt-launcher-testvmivbx7q-g6tnd Pod phase: Running level=info timestamp=2018-07-26T11:39:31.960797Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:39:32.981805Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:39:32.998764Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 45ea60bd-5c76-4157-9152-3e102e614b62" level=info timestamp=2018-07-26T11:39:33.010861Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:39:33.025440Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.696699Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:39:33.765839Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:39:33.780929Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:39:33.785633Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:39:33.789828Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.793628Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:39:33.815603Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:39:33.823695Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:39:33.961477Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivbx7q kind= uid=72e65439-90c8-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:39:34.017156Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 45ea60bd-5c76-4157-9152-3e102e614b62: 190" Pod name: virt-launcher-testvmiwvqpccs246-4qmd2 Pod phase: Pending Pod name: virt-launcher-testvmiwvqpckw64n-dgfpp Pod phase: Pending • Failure in Spec Setup (BeforeEach) [31.673 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on an VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:318 Expose ClusterIP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:362 Connect to ClusterIP services that was set when VM was offline /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:363 Expected error: <*errors.StatusError | 0xc420157440>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:331 ------------------------------ STEP: Creating an VM object STEP: Creating the VM S [SKIPPING] in Spec Setup (BeforeEach) [0.007 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to start a vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:133 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1368 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.007 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to stop a running vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:139 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1368 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.014 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have correct UUID /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:192 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1368 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.008 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have pod IP /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:208 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1368 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.018 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to start a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:242 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1368 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.027 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to stop a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:250 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1368 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running level=info timestamp=2018-07-26T11:42:42.341595Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:42:43.470135Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:42:50 http: TLS handshake error from 10.129.0.1:55416: EOF level=info timestamp=2018-07-26T11:42:54.091503Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:42:59.536255Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T11:42:59.560050Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/26 11:43:00 http: TLS handshake error from 10.129.0.1:55428: EOF level=info timestamp=2018-07-26T11:43:04.310352Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:43:10 http: TLS handshake error from 10.129.0.1:55440: EOF level=info timestamp=2018-07-26T11:43:12.597907Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:43:12.707593Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:43:12.707926Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:43:14.527282Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:43:20 http: TLS handshake error from 10.129.0.1:55452: EOF level=info timestamp=2018-07-26T11:43:24.801207Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:41:24 http: TLS handshake error from 10.129.0.1:39140: EOF 2018/07/26 11:41:34 http: TLS handshake error from 10.129.0.1:39152: EOF 2018/07/26 11:41:44 http: TLS handshake error from 10.129.0.1:39164: EOF 2018/07/26 11:41:54 http: TLS handshake error from 10.129.0.1:39176: EOF 2018/07/26 11:42:04 http: TLS handshake error from 10.129.0.1:39188: EOF level=info timestamp=2018-07-26T11:42:06.453188Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:42:14 http: TLS handshake error from 10.129.0.1:39200: EOF 2018/07/26 11:42:24 http: TLS handshake error from 10.129.0.1:39212: EOF 2018/07/26 11:42:34 http: TLS handshake error from 10.129.0.1:39224: EOF 2018/07/26 11:42:44 http: TLS handshake error from 10.129.0.1:39238: EOF 2018/07/26 11:42:54 http: TLS handshake error from 10.129.0.1:39250: EOF 2018/07/26 11:43:04 http: TLS handshake error from 10.129.0.1:39262: EOF level=info timestamp=2018-07-26T11:43:06.307985Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:43:14 http: TLS handshake error from 10.129.0.1:39274: EOF 2018/07/26 11:43:24 http: TLS handshake error from 10.129.0.1:39286: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=error timestamp=2018-07-26T11:42:50.006018Z pos=replicaset.go:225 component=virt-controller service=http namespace=kubevirt-test-default name=replicaset2ljlj kind= uid=e1b4a8ac-90c8-11e8-90ce-525500d15501 msg="Scaling the replicaset failed." level=info timestamp=2018-07-26T11:42:50.066012Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpckw64n kind= uid=f3a098c5-90c8-11e8-90ce-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=info timestamp=2018-07-26T11:42:50.066288Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpckw64n kind= uid=f3a098c5-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=error timestamp=2018-07-26T11:42:50.066534Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpckw64n kind= uid=f3a098c5-90c8-11e8-90ce-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmiwvqpckw64n" level=info timestamp=2018-07-26T11:42:50.066736Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpckw64n kind= uid=f3a098c5-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:42:50.093569Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-26T11:42:50.093809Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmiwvqpccs246" level=info timestamp=2018-07-26T11:42:50.094029Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:42:50.094289Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:42:50.482518Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwvqpccs246\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwvqpccs246" level=info timestamp=2018-07-26T11:42:51.584583Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirq47g kind= uid=f48aa12f-90c8-11e8-90ce-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T11:42:51.584989Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirq47g kind= uid=f48aa12f-90c8-11e8-90ce-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-26T11:42:51.585099Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-26T11:43:25.232409Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6snbf kind= uid=089d8600-90c9-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:43:25.233866Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6snbf kind= uid=089d8600-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:42:53.021595Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:53.021702Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:53.089325Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibz2z5, existing: false\n" level=info timestamp=2018-07-26T11:42:53.089420Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:42:53.089446Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:42:53.089580Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:42:53.089675Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:42:53.090181Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmibz2z5" level=info timestamp=2018-07-26T11:42:53.092624Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:53.093895Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:42:53.094074Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:42:53.094127Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibz2z5, existing: false\n" level=info timestamp=2018-07-26T11:42:53.094146Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:42:53.094212Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:53.094353Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:42:54.394935Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:54.395385Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:54.400197Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:42:54.400360Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:42:54.400410Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipxrsb, existing: false\n" level=info timestamp=2018-07-26T11:42:54.400430Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:42:54.400502Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:54.401812Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:54.402187Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:42:54.402330Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:42:54.414263Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:54.414392Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilg524, existing: false\n" level=info timestamp=2018-07-26T11:42:54.414438Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:42:54.414543Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:54.414789Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi6snbf-nnqfx Pod phase: Pending Pod name: virt-launcher-testvmivbx7q-g6tnd Pod phase: Running Unable to retrieve container logs for docker://8e11ada2b9bf742a70adb5664eef79562a52fad963c1d66ac5779f435970bb2d• Failure in Spec Setup (BeforeEach) [32.659 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc420f347e0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:131 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running level=info timestamp=2018-07-26T11:43:12.597907Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:43:12.707593Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:43:12.707926Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:43:14.527282Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:43:20 http: TLS handshake error from 10.129.0.1:55452: EOF level=info timestamp=2018-07-26T11:43:24.801207Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:43:30 http: TLS handshake error from 10.129.0.1:55464: EOF level=info timestamp=2018-07-26T11:43:35.098505Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:43:40 http: TLS handshake error from 10.129.0.1:55476: EOF level=info timestamp=2018-07-26T11:43:42.955917Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:43:43.021040Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:43:43.025055Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:43:45.403368Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:43:50 http: TLS handshake error from 10.129.0.1:55488: EOF level=info timestamp=2018-07-26T11:43:55.675882Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:42:04 http: TLS handshake error from 10.129.0.1:39188: EOF level=info timestamp=2018-07-26T11:42:06.453188Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:42:14 http: TLS handshake error from 10.129.0.1:39200: EOF 2018/07/26 11:42:24 http: TLS handshake error from 10.129.0.1:39212: EOF 2018/07/26 11:42:34 http: TLS handshake error from 10.129.0.1:39224: EOF 2018/07/26 11:42:44 http: TLS handshake error from 10.129.0.1:39238: EOF 2018/07/26 11:42:54 http: TLS handshake error from 10.129.0.1:39250: EOF 2018/07/26 11:43:04 http: TLS handshake error from 10.129.0.1:39262: EOF level=info timestamp=2018-07-26T11:43:06.307985Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:43:14 http: TLS handshake error from 10.129.0.1:39274: EOF 2018/07/26 11:43:24 http: TLS handshake error from 10.129.0.1:39286: EOF 2018/07/26 11:43:34 http: TLS handshake error from 10.129.0.1:39298: EOF level=info timestamp=2018-07-26T11:43:36.482940Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:43:44 http: TLS handshake error from 10.129.0.1:39310: EOF 2018/07/26 11:43:54 http: TLS handshake error from 10.129.0.1:39322: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=error timestamp=2018-07-26T11:42:50.066534Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpckw64n kind= uid=f3a098c5-90c8-11e8-90ce-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmiwvqpckw64n" level=info timestamp=2018-07-26T11:42:50.066736Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpckw64n kind= uid=f3a098c5-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:42:50.093569Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-26T11:42:50.093809Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmiwvqpccs246" level=info timestamp=2018-07-26T11:42:50.094029Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:42:50.094289Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:42:50.482518Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwvqpccs246\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwvqpccs246" level=info timestamp=2018-07-26T11:42:51.584583Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirq47g kind= uid=f48aa12f-90c8-11e8-90ce-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T11:42:51.584989Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirq47g kind= uid=f48aa12f-90c8-11e8-90ce-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-26T11:42:51.585099Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-26T11:43:25.232409Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6snbf kind= uid=089d8600-90c9-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:43:25.233866Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6snbf kind= uid=089d8600-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:43:56.131798Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8nvmq kind= uid=1b09f2da-90c9-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:43:56.132402Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8nvmq kind= uid=1b09f2da-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:43:56.330334Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8nvmq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8nvmq" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:42:53.021595Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:53.021702Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:53.089325Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibz2z5, existing: false\n" level=info timestamp=2018-07-26T11:42:53.089420Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:42:53.089446Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:42:53.089580Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:42:53.089675Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:42:53.090181Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmibz2z5" level=info timestamp=2018-07-26T11:42:53.092624Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:53.093895Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:42:53.094074Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:42:53.094127Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibz2z5, existing: false\n" level=info timestamp=2018-07-26T11:42:53.094146Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:42:53.094212Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:53.094353Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:42:54.400430Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:42:54.400502Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:54.401812Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:54.402187Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:42:54.402330Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:42:54.414263Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:54.414392Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilg524, existing: false\n" level=info timestamp=2018-07-26T11:42:54.414438Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:42:54.414543Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:54.414789Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:43:45.394548Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:43:45.396829Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmivbx7q, existing: false\n" level=info timestamp=2018-07-26T11:43:45.396956Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:43:45.397189Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:43:45.398393Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi8nvmq-ggrl5 Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.767 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with pod network connectivity explicitly set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc4206f4cf0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:131 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running level=info timestamp=2018-07-26T11:43:43.021040Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:43:43.025055Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:43:45.403368Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:43:50 http: TLS handshake error from 10.129.0.1:55488: EOF level=info timestamp=2018-07-26T11:43:55.675882Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:44:00 http: TLS handshake error from 10.129.0.1:55500: EOF level=info timestamp=2018-07-26T11:44:05.906712Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:44:06.257923Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:44:10 http: TLS handshake error from 10.129.0.1:55512: EOF level=info timestamp=2018-07-26T11:44:13.123255Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:44:13.206560Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:44:13.222139Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:44:16.116945Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:44:20 http: TLS handshake error from 10.129.0.1:55524: EOF level=info timestamp=2018-07-26T11:44:26.361575Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:42:24 http: TLS handshake error from 10.129.0.1:39212: EOF 2018/07/26 11:42:34 http: TLS handshake error from 10.129.0.1:39224: EOF 2018/07/26 11:42:44 http: TLS handshake error from 10.129.0.1:39238: EOF 2018/07/26 11:42:54 http: TLS handshake error from 10.129.0.1:39250: EOF 2018/07/26 11:43:04 http: TLS handshake error from 10.129.0.1:39262: EOF level=info timestamp=2018-07-26T11:43:06.307985Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:43:14 http: TLS handshake error from 10.129.0.1:39274: EOF 2018/07/26 11:43:24 http: TLS handshake error from 10.129.0.1:39286: EOF 2018/07/26 11:43:34 http: TLS handshake error from 10.129.0.1:39298: EOF level=info timestamp=2018-07-26T11:43:36.482940Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:43:44 http: TLS handshake error from 10.129.0.1:39310: EOF 2018/07/26 11:43:54 http: TLS handshake error from 10.129.0.1:39322: EOF 2018/07/26 11:44:04 http: TLS handshake error from 10.129.0.1:39334: EOF 2018/07/26 11:44:14 http: TLS handshake error from 10.129.0.1:39346: EOF 2018/07/26 11:44:24 http: TLS handshake error from 10.129.0.1:39358: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=error timestamp=2018-07-26T11:42:50.093809Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmiwvqpccs246" level=info timestamp=2018-07-26T11:42:50.094029Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:42:50.094289Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:42:50.482518Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwvqpccs246\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwvqpccs246" level=info timestamp=2018-07-26T11:42:51.584583Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirq47g kind= uid=f48aa12f-90c8-11e8-90ce-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T11:42:51.584989Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirq47g kind= uid=f48aa12f-90c8-11e8-90ce-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-26T11:42:51.585099Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-26T11:43:25.232409Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6snbf kind= uid=089d8600-90c9-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:43:25.233866Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6snbf kind= uid=089d8600-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:43:56.131798Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8nvmq kind= uid=1b09f2da-90c9-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:43:56.132402Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8nvmq kind= uid=1b09f2da-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:43:56.330334Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8nvmq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8nvmq" level=info timestamp=2018-07-26T11:43:56.759410Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8nvmq\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8nvmq, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1b09f2da-90c9-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8nvmq" level=info timestamp=2018-07-26T11:44:26.906525Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminksr7 kind= uid=2d5fd9e9-90c9-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:44:26.907318Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminksr7 kind= uid=2d5fd9e9-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:42:53.021595Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:53.021702Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:53.089325Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibz2z5, existing: false\n" level=info timestamp=2018-07-26T11:42:53.089420Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:42:53.089446Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:42:53.089580Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:42:53.089675Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:42:53.090181Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmibz2z5" level=info timestamp=2018-07-26T11:42:53.092624Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:53.093895Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:42:53.094074Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:42:53.094127Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibz2z5, existing: false\n" level=info timestamp=2018-07-26T11:42:53.094146Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:42:53.094212Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:53.094353Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:42:54.400430Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:42:54.400502Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:54.401812Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:54.402187Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:42:54.402330Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:42:54.414263Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:54.414392Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilg524, existing: false\n" level=info timestamp=2018-07-26T11:42:54.414438Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:42:54.414543Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:54.414789Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:43:45.394548Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:43:45.396829Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmivbx7q, existing: false\n" level=info timestamp=2018-07-26T11:43:45.396956Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:43:45.397189Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:43:45.398393Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvminksr7-rzzxd Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.816 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc420157c20>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:131 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running level=info timestamp=2018-07-26T11:44:13.123255Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:44:13.206560Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:44:13.222139Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:44:16.116945Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:44:20 http: TLS handshake error from 10.129.0.1:55524: EOF level=info timestamp=2018-07-26T11:44:26.361575Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:44:30 http: TLS handshake error from 10.129.0.1:55536: EOF level=info timestamp=2018-07-26T11:44:36.638715Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:44:40 http: TLS handshake error from 10.129.0.1:55548: EOF level=info timestamp=2018-07-26T11:44:43.327526Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:44:43.411360Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:44:43.419500Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:44:46.900070Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:44:50 http: TLS handshake error from 10.129.0.1:55560: EOF level=info timestamp=2018-07-26T11:44:57.150416Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 11:43:04 http: TLS handshake error from 10.129.0.1:39262: EOF level=info timestamp=2018-07-26T11:43:06.307985Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:43:14 http: TLS handshake error from 10.129.0.1:39274: EOF 2018/07/26 11:43:24 http: TLS handshake error from 10.129.0.1:39286: EOF 2018/07/26 11:43:34 http: TLS handshake error from 10.129.0.1:39298: EOF level=info timestamp=2018-07-26T11:43:36.482940Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:43:44 http: TLS handshake error from 10.129.0.1:39310: EOF 2018/07/26 11:43:54 http: TLS handshake error from 10.129.0.1:39322: EOF 2018/07/26 11:44:04 http: TLS handshake error from 10.129.0.1:39334: EOF 2018/07/26 11:44:14 http: TLS handshake error from 10.129.0.1:39346: EOF 2018/07/26 11:44:24 http: TLS handshake error from 10.129.0.1:39358: EOF 2018/07/26 11:44:34 http: TLS handshake error from 10.129.0.1:39370: EOF level=info timestamp=2018-07-26T11:44:36.378810Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:44:44 http: TLS handshake error from 10.129.0.1:39382: EOF 2018/07/26 11:44:54 http: TLS handshake error from 10.129.0.1:39394: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=error timestamp=2018-07-26T11:42:50.093809Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmiwvqpccs246" level=info timestamp=2018-07-26T11:42:50.094029Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:42:50.094289Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvqpccs246 kind= uid=f3a276c1-90c8-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:42:50.482518Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwvqpccs246\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwvqpccs246" level=info timestamp=2018-07-26T11:42:51.584583Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirq47g kind= uid=f48aa12f-90c8-11e8-90ce-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-26T11:42:51.584989Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirq47g kind= uid=f48aa12f-90c8-11e8-90ce-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-26T11:42:51.585099Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-26T11:43:25.232409Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6snbf kind= uid=089d8600-90c9-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:43:25.233866Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6snbf kind= uid=089d8600-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:43:56.131798Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8nvmq kind= uid=1b09f2da-90c9-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:43:56.132402Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8nvmq kind= uid=1b09f2da-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:43:56.330334Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8nvmq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8nvmq" level=info timestamp=2018-07-26T11:43:56.759410Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8nvmq\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8nvmq, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1b09f2da-90c9-11e8-90ce-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8nvmq" level=info timestamp=2018-07-26T11:44:26.906525Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminksr7 kind= uid=2d5fd9e9-90c9-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:44:26.907318Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminksr7 kind= uid=2d5fd9e9-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:42:53.021595Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:53.021702Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi5pfwp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:53.089325Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibz2z5, existing: false\n" level=info timestamp=2018-07-26T11:42:53.089420Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:42:53.089446Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:42:53.089580Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:42:53.089675Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:42:53.090181Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmibz2z5" level=info timestamp=2018-07-26T11:42:53.092624Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:53.093895Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:42:53.094074Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:42:53.094127Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmibz2z5, existing: false\n" level=info timestamp=2018-07-26T11:42:53.094146Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:42:53.094212Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:53.094353Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmibz2z5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:42:54.400430Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:42:54.400502Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:54.401812Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipxrsb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:54.402187Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:42:54.402330Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:42:54.414263Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:42:54.414392Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilg524, existing: false\n" level=info timestamp=2018-07-26T11:42:54.414438Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:42:54.414543Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:42:54.414789Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilg524 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:43:45.394548Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:43:45.396829Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmivbx7q, existing: false\n" level=info timestamp=2018-07-26T11:43:45.396956Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:43:45.397189Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:43:45.398393Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmivbx7q kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure in Spec Setup (BeforeEach) [30.560 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the internet /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc420ed8cf0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:131 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running 2018/07/26 11:44:50 http: TLS handshake error from 10.129.0.1:55560: EOF level=info timestamp=2018-07-26T11:44:57.150416Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:45:00 http: TLS handshake error from 10.129.0.1:55572: EOF level=info timestamp=2018-07-26T11:45:08.093031Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:45:10 http: TLS handshake error from 10.129.0.1:55584: EOF level=info timestamp=2018-07-26T11:45:13.447124Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:45:13.633964Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:45:13.749742Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:45:18.525851Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:45:20 http: TLS handshake error from 10.129.0.1:55596: EOF level=info timestamp=2018-07-26T11:45:22.390509Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:45:22.442191Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:45:22.703546Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:45:29.381194Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:45:30 http: TLS handshake error from 10.129.0.1:55608: EOF Pod name: virt-api-7d79764579-jq8hh Pod phase: Running level=info timestamp=2018-07-26T11:43:36.482940Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:43:44 http: TLS handshake error from 10.129.0.1:39310: EOF 2018/07/26 11:43:54 http: TLS handshake error from 10.129.0.1:39322: EOF 2018/07/26 11:44:04 http: TLS handshake error from 10.129.0.1:39334: EOF 2018/07/26 11:44:14 http: TLS handshake error from 10.129.0.1:39346: EOF 2018/07/26 11:44:24 http: TLS handshake error from 10.129.0.1:39358: EOF 2018/07/26 11:44:34 http: TLS handshake error from 10.129.0.1:39370: EOF level=info timestamp=2018-07-26T11:44:36.378810Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:44:44 http: TLS handshake error from 10.129.0.1:39382: EOF 2018/07/26 11:44:54 http: TLS handshake error from 10.129.0.1:39394: EOF 2018/07/26 11:45:04 http: TLS handshake error from 10.129.0.1:39406: EOF level=info timestamp=2018-07-26T11:45:06.663106Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:45:14 http: TLS handshake error from 10.129.0.1:39418: EOF 2018/07/26 11:45:24 http: TLS handshake error from 10.129.0.1:39430: EOF 2018/07/26 11:45:34 http: TLS handshake error from 10.129.0.1:39442: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:44:26.907318Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminksr7 kind= uid=2d5fd9e9-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:44:58.131303Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihhfdz kind= uid=3fc29495-90c9-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:44:58.132047Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihhfdz kind= uid=3fc29495-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:44:58.302514Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipzd64 kind= uid=51ef8590-90c9-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:44:58.302819Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipzd64 kind= uid=51ef8590-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:44:58.307482Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihhfdz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihhfdz" level=info timestamp=2018-07-26T11:44:58.351916Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin8dd2 kind= uid=51ffdfb7-90c9-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:44:58.352444Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin8dd2 kind= uid=51ffdfb7-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:44:58.392983Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi64qdn kind= uid=5208993e-90c9-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:44:58.393485Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi64qdn kind= uid=5208993e-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:44:58.432634Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirj2mr kind= uid=520d59ab-90c9-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:44:58.433047Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirj2mr kind= uid=520d59ab-90c9-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:44:58.669446Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmin8dd2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmin8dd2" level=info timestamp=2018-07-26T11:44:58.688728Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipzd64\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipzd64" level=info timestamp=2018-07-26T11:44:59.541711Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi64qdn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi64qdn" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:45:17.464922Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmirj2mr kind=Domain uid=520d59ab-90c9-11e8-90ce-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-26T11:45:17.469699Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-26T11:45:17.475484Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmirj2mr kind= uid=520d59ab-90c9-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:45:17.476078Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirj2mr, existing: true\n" level=info timestamp=2018-07-26T11:45:17.476558Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T11:45:17.477413Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:45:17.477939Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:45:17.478367Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmirj2mr kind= uid=520d59ab-90c9-11e8-90ce-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T11:45:17.523835Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmirj2mr kind= uid=520d59ab-90c9-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:45:17.524567Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirj2mr, existing: true\n" level=info timestamp=2018-07-26T11:45:17.524862Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:45:17.525144Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:45:17.525398Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:45:17.525763Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmirj2mr kind= uid=520d59ab-90c9-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:45:17.532801Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmirj2mr kind= uid=520d59ab-90c9-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:45:27.068862Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi64qdn kind= uid=5208993e-90c9-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:45:27.835480Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipzd64 kind= uid=51ef8590-90c9-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:45:27.893216Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmin8dd2 kind= uid=51ffdfb7-90c9-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:45:28.660426Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmin8dd2, existing: true\n" level=info timestamp=2018-07-26T11:45:28.660506Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:45:28.660537Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:45:28.660564Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:45:28.849482Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipzd64, existing: true\n" level=info timestamp=2018-07-26T11:45:29.011090Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T11:45:29.484791Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:45:29.484892Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-26T11:45:29.484989Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmipzd64 kind= uid=51ef8590-90c9-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:45:29.458722Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmin8dd2 kind= uid=51ffdfb7-90c9-11e8-90ce-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-26T11:45:30.027007Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmin8dd2 kind= uid=51ffdfb7-90c9-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:45:31.397902Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipzd64 kind= uid=51ef8590-90c9-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi64qdn-tn2ws Pod phase: Running level=info timestamp=2018-07-26T11:45:22.522809Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:45:23.506604Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:45:23.524528Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 5d7baad8-f87f-4833-b257-f9dbd0a11638" level=info timestamp=2018-07-26T11:45:23.524714Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:45:23.701647Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:23.954922Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:45:24.037853Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:45:24.053863Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi64qdn kind= uid=5208993e-90c9-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:45:24.054071Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:24.054172Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:45:24.055917Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi64qdn kind= uid=5208993e-90c9-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:45:24.100297Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:45:24.168899Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:24.534449Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 5d7baad8-f87f-4833-b257-f9dbd0a11638: 211" level=info timestamp=2018-07-26T11:45:27.012966Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi64qdn kind= uid=5208993e-90c9-11e8-90ce-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmihhfdz-kntzb Pod phase: Running level=info timestamp=2018-07-26T11:45:21.349677Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmihhfdz kind= uid=3fc29495-90c9-11e8-90ce-525500d15501 msg="Domain defined." level=info timestamp=2018-07-26T11:45:21.817523Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:45:21.848211Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 20cffcec-1cd4-4e31-acd4-a670ad7eb45b" level=info timestamp=2018-07-26T11:45:21.849422Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:45:21.861011Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:22.154443Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:45:22.172464Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:45:22.178462Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:22.180316Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:45:22.189207Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmihhfdz kind= uid=3fc29495-90c9-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:45:22.196303Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmihhfdz kind= uid=3fc29495-90c9-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:45:22.197623Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:45:22.201301Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:22.392563Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmihhfdz kind= uid=3fc29495-90c9-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:45:22.857320Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 20cffcec-1cd4-4e31-acd4-a670ad7eb45b: 206" Pod name: virt-launcher-testvmin8dd2-csb97 Pod phase: Running level=info timestamp=2018-07-26T11:45:23.370361Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:45:24.626143Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID a5709647-70f1-4a0f-bd98-9754bbcab918" level=info timestamp=2018-07-26T11:45:24.626349Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:45:24.629967Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:45:24.760401Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:25.288827Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:45:25.360148Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:45:25.379810Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmin8dd2 kind= uid=51ffdfb7-90c9-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:45:25.381485Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmin8dd2 kind= uid=51ffdfb7-90c9-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:45:25.644781Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a5709647-70f1-4a0f-bd98-9754bbcab918: 219" level=info timestamp=2018-07-26T11:45:25.716062Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:25.716223Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:45:25.743581Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:45:26.044091Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:29.821053Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmin8dd2 kind= uid=51ffdfb7-90c9-11e8-90ce-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmipzd64-qqmnn Pod phase: Running level=info timestamp=2018-07-26T11:45:22.865726Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:45:24.115042Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:45:24.137150Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 35a0acad-83ee-4127-afc0-bbc8dc32013b" level=info timestamp=2018-07-26T11:45:24.137329Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:45:24.297044Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:24.829159Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:45:24.884457Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:45:24.932324Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmipzd64 kind= uid=51ef8590-90c9-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:45:24.934312Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipzd64 kind= uid=51ef8590-90c9-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:45:25.165538Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 35a0acad-83ee-4127-afc0-bbc8dc32013b: 218" level=info timestamp=2018-07-26T11:45:25.340865Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:25.341067Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:45:25.377986Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:45:25.580023Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:30.790211Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipzd64 kind= uid=51ef8590-90c9-11e8-90ce-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmirj2mr-8dkw8 Pod phase: Running level=info timestamp=2018-07-26T11:45:16.280305Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-26T11:45:16.773041Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-26T11:45:16.789926Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:17.304429Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 7f8e1a5a-9bd7-4f1c-a8d0-f24105e677d0" level=info timestamp=2018-07-26T11:45:17.304732Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-26T11:45:17.410745Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-26T11:45:17.434171Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:45:17.438726Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmirj2mr kind= uid=520d59ab-90c9-11e8-90ce-525500d15501 msg="Domain started." level=info timestamp=2018-07-26T11:45:17.441796Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmirj2mr kind= uid=520d59ab-90c9-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:45:17.445845Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:17.446522Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T11:45:17.462864Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T11:45:17.470965Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T11:45:17.531932Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmirj2mr kind= uid=520d59ab-90c9-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T11:45:18.310161Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 7f8e1a5a-9bd7-4f1c-a8d0-f24105e677d0: 191" • Failure in Spec Setup (BeforeEach) [42.317 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on the same node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.errorString | 0xc4203a4a10>: { s: "Timeout trying to connect to the virtual machine instance", } Timeout trying to connect to the virtual machine instance not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1086 ------------------------------ level=info timestamp=2018-07-26T11:44:58.994316Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmipzd64-qqmnn" level=info timestamp=2018-07-26T11:45:22.638358Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmipzd64-qqmnn" level=info timestamp=2018-07-26T11:45:26.840861Z pos=utils.go:243 component=tests msg="VirtualMachineInstance defined." level=info timestamp=2018-07-26T11:45:28.300436Z pos=utils.go:243 component=tests msg="VirtualMachineInstance started." • [SLOW TEST:97.733 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on a different node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ ••••• ------------------------------ • [SLOW TEST:33.581 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom interface model /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:364 should expose the right device type to the guest /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:365 ------------------------------ • ------------------------------ • [SLOW TEST:33.158 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:398 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:399 ------------------------------ • [SLOW TEST:37.157 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address in non-conventional format /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:410 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:411 ------------------------------ • [SLOW TEST:32.178 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address and slirp interface /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:423 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:424 ------------------------------ • [SLOW TEST:47.117 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with disabled automatic attachment of interfaces /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:436 should not configure any external interfaces /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:437 ------------------------------ • ------------------------------ • [SLOW TEST:19.319 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should start it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:80 ------------------------------ • [SLOW TEST:19.137 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should attach virt-launcher to it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:86 ------------------------------ •••• ------------------------------ • [SLOW TEST:33.028 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:174 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Alpine as first boot /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:24.185 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:174 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Cirros as first boot /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:15.643 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:205 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:206 should retry starting the VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:207 ------------------------------ • [SLOW TEST:19.028 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:205 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:206 should log warning and proceed once the secret is there /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:237 ------------------------------ • [SLOW TEST:46.997 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-launcher crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:285 should be stopped and have Failed phase /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:286 ------------------------------ • [SLOW TEST:29.020 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:308 should recover and continue management /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:309 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running level=info timestamp=2018-07-26T11:55:37.944607Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:55:37.968647Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:55:40 http: TLS handshake error from 10.129.0.1:56396: EOF level=info timestamp=2018-07-26T11:55:42.159197Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:55:50 http: TLS handshake error from 10.129.0.1:56408: EOF level=info timestamp=2018-07-26T11:55:52.240064Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:55:58.404983Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T11:55:58.409380Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/26 11:56:00 http: TLS handshake error from 10.129.0.1:56420: EOF level=info timestamp=2018-07-26T11:56:02.448565Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:56:07.308220Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:56:08.180432Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T11:56:08.216162Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 11:56:10 http: TLS handshake error from 10.129.0.1:56432: EOF level=info timestamp=2018-07-26T11:56:12.684624Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jq8hh Pod phase: Running level=info timestamp=2018-07-26T11:54:37.682877Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:54:44 http: TLS handshake error from 10.129.0.1:40156: EOF 2018/07/26 11:54:54 http: TLS handshake error from 10.129.0.1:40170: EOF 2018/07/26 11:55:04 http: TLS handshake error from 10.129.0.1:40182: EOF level=info timestamp=2018-07-26T11:55:07.724329Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:55:14 http: TLS handshake error from 10.129.0.1:40194: EOF 2018/07/26 11:55:24 http: TLS handshake error from 10.129.0.1:40206: EOF 2018/07/26 11:55:34 http: TLS handshake error from 10.129.0.1:40218: EOF level=info timestamp=2018-07-26T11:55:36.686233Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T11:55:37.865607Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:55:44 http: TLS handshake error from 10.129.0.1:40230: EOF 2018/07/26 11:55:54 http: TLS handshake error from 10.129.0.1:40242: EOF 2018/07/26 11:56:04 http: TLS handshake error from 10.129.0.1:40254: EOF level=info timestamp=2018-07-26T11:56:07.577179Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 11:56:14 http: TLS handshake error from 10.129.0.1:40266: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:52:00.224634Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminfk5j kind= uid=4d76f3f4-90ca-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:52:00.557025Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminfk5j\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminfk5j" level=info timestamp=2018-07-26T11:52:00.656126Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminfk5j\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminfk5j" level=info timestamp=2018-07-26T11:52:24.294262Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminghkb kind= uid=5bd21cbb-90ca-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:52:24.294604Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminghkb kind= uid=5bd21cbb-90ca-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:52:24.523516Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminghkb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminghkb" level=info timestamp=2018-07-26T11:52:40.036016Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidqcdb kind= uid=65205fde-90ca-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:52:40.036333Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidqcdb kind= uid=65205fde-90ca-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:52:40.380088Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidqcdb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidqcdb" level=info timestamp=2018-07-26T11:52:58.925012Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvktk kind= uid=707617fc-90ca-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:52:58.930420Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwvktk kind= uid=707617fc-90ca-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:52:59.127775Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwvktk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwvktk" level=info timestamp=2018-07-26T11:53:46.024590Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminzvr4 kind= uid=8c88a3e7-90ca-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:53:46.025342Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminzvr4 kind= uid=8c88a3e7-90ca-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:53:46.244155Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminzvr4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminzvr4" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-n2w27 Pod phase: Running level=info timestamp=2018-07-26T11:51:59.174889Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-26T11:51:59.175157Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmi9dplq kind=Domain uid=39ba1bbe-90ca-11e8-90ce-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-07-26T11:51:59.175218Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi9dplq, existing: false\n" level=info timestamp=2018-07-26T11:51:59.175239Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T11:51:59.175256Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T11:51:59.175340Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmi9dplq kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T11:51:59.175372Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmi9dplq kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T11:51:59.176349Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmi9dplq kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmi9dplq" level=info timestamp=2018-07-26T11:51:59.181499Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi9dplq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:51:59.818321Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:51:59.860149Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi9dplq kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:51:59.860337Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi9dplq, existing: false\n" level=info timestamp=2018-07-26T11:51:59.860374Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:51:59.860484Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9dplq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:51:59.861727Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi9dplq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-qxrvv Pod phase: Running level=info timestamp=2018-07-26T11:54:14.809077Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvminzvr4 kind= uid=8c88a3e7-90ca-11e8-90ce-525500d15501 msg="Removing domain and ephemeral data for finalized vmi." level=info timestamp=2018-07-26T11:54:14.809113Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvminzvr4 kind= uid=8c88a3e7-90ca-11e8-90ce-525500d15501 msg="Processing deletion." level=info timestamp=2018-07-26T11:54:14.810445Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvminzvr4 kind= uid=8c88a3e7-90ca-11e8-90ce-525500d15501 msg="Signaled deletion for testvminzvr4" level=info timestamp=2018-07-26T11:54:14.821189Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T11:54:14.821424Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvminzvr4 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T11:54:14.821558Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminzvr4 kind= uid=8c88a3e7-90ca-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:54:14.821616Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminzvr4, existing: true\n" level=info timestamp=2018-07-26T11:54:14.821642Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-26T11:54:14.829779Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:54:14.829863Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzvr4 kind= uid=8c88a3e7-90ca-11e8-90ce-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:54:14.830141Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminzvr4 kind= uid=8c88a3e7-90ca-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T11:54:14.987734Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminzvr4, existing: false\n" level=info timestamp=2018-07-26T11:54:14.987927Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T11:54:14.988188Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminzvr4 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T11:54:14.988397Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminzvr4 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure [120.807 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler is responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:339 should indicate that a node is ready for vmis [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:340 Timed out after 120.000s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:360 ------------------------------ STEP: adding a heartbeat annotation and a schedulable label to the node STEP: setting the schedulable label back to true • [SLOW TEST:81.399 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler is not responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:370 the node controller should react /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:409 ------------------------------ • [SLOW TEST:18.681 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with node tainted /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:462 the vmi with tolerations should be scheduled /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:484 ------------------------------ • ------------------------------ • [SLOW TEST:45.044 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:534 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-default /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:22.059 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:534 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-alternative /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.298 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:591 should enable emulation in virt-launcher [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:603 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:599 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.249 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:591 should be reflected in domain XML [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:640 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:599 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.269 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:591 should request a TUN device but not KVM [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:684 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:599 ------------------------------ •••• ------------------------------ • [SLOW TEST:18.877 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance's Pod /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:836 should result in the VirtualMachineInstance moving to a finalized state /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:837 ------------------------------ • [SLOW TEST:36.010 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:868 with an active pod. /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 should result in pod being terminated /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:870 ------------------------------ • [SLOW TEST:43.872 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:868 with ACPI and 0 grace period seconds /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:894 should result in vmi status failed /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:895 ------------------------------ • [SLOW TEST:45.892 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:868 with ACPI and some grace period seconds /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:919 should result in vmi status succeeded /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:920 ------------------------------ • [SLOW TEST:34.093 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:868 with grace period greater than 0 /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:944 should run graceful shutdown /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:945 ------------------------------ • [SLOW TEST:30.562 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Killed VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:996 should be in Failed phase /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:997 ------------------------------ Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running level=info timestamp=2018-07-26T12:02:10.820529Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T12:02:12.891186Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T12:02:12.920903Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 12:02:20 http: TLS handshake error from 10.129.0.1:56886: EOF level=info timestamp=2018-07-26T12:02:21.046007Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 12:02:30 http: TLS handshake error from 10.129.0.1:56898: EOF level=info timestamp=2018-07-26T12:02:31.339779Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T12:02:37.858066Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T12:02:40.047377Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 12:02:40 http: TLS handshake error from 10.129.0.1:56910: EOF level=info timestamp=2018-07-26T12:02:41.735595Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T12:02:43.132241Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T12:02:43.151905Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 12:02:50 http: TLS handshake error from 10.129.0.1:56922: EOF level=info timestamp=2018-07-26T12:02:51.930287Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 12:00:44 http: TLS handshake error from 10.129.0.1:40596: EOF 2018/07/26 12:00:54 http: TLS handshake error from 10.129.0.1:40610: EOF 2018/07/26 12:01:04 http: TLS handshake error from 10.129.0.1:40624: EOF level=info timestamp=2018-07-26T12:01:07.832434Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 12:01:14 http: TLS handshake error from 10.129.0.1:40636: EOF 2018/07/26 12:01:24 http: TLS handshake error from 10.129.0.1:40648: EOF 2018/07/26 12:01:34 http: TLS handshake error from 10.129.0.1:40660: EOF level=info timestamp=2018-07-26T12:01:37.910128Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 12:01:44 http: TLS handshake error from 10.129.0.1:40672: EOF 2018/07/26 12:01:54 http: TLS handshake error from 10.129.0.1:40684: EOF 2018/07/26 12:02:04 http: TLS handshake error from 10.129.0.1:40696: EOF 2018/07/26 12:02:14 http: TLS handshake error from 10.129.0.1:40708: EOF 2018/07/26 12:02:24 http: TLS handshake error from 10.129.0.1:40720: EOF 2018/07/26 12:02:34 http: TLS handshake error from 10.129.0.1:40732: EOF 2018/07/26 12:02:44 http: TLS handshake error from 10.129.0.1:40744: EOF Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:59:26.431369Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2bmbr kind= uid=576dbbd1-90cb-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:59:26.431985Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2bmbr kind= uid=576dbbd1-90cb-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:59:26.585286Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2bmbr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2bmbr" level=info timestamp=2018-07-26T12:00:02.473349Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdfrg kind= uid=6ce6a8a0-90cb-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T12:00:02.473952Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdfrg kind= uid=6ce6a8a0-90cb-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T12:00:46.270107Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi42f5g kind= uid=8704ae3c-90cb-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T12:00:46.270908Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi42f5g kind= uid=8704ae3c-90cb-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T12:01:32.196765Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6tk5 kind= uid=a264067b-90cb-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T12:01:32.197700Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6tk5 kind= uid=a264067b-90cb-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T12:02:06.417961Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi24jcm kind= uid=b6cabf3b-90cb-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T12:02:06.418762Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi24jcm kind= uid=b6cabf3b-90cb-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T12:02:06.528848Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi24jcm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi24jcm" level=info timestamp=2018-07-26T12:02:06.573829Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi24jcm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi24jcm" level=info timestamp=2018-07-26T12:02:36.877218Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T12:02:36.877847Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-ls6fd Pod phase: Running level=info timestamp=2018-07-26T12:02:05.301438Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T12:02:05.301519Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T12:02:05.308196Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T12:02:05.308722Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmim6tk5" level=info timestamp=2018-07-26T12:02:05.316216Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T12:02:05.317477Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T12:02:05.317847Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T12:02:05.317925Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmim6tk5, existing: false\n" level=info timestamp=2018-07-26T12:02:05.317946Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T12:02:05.318037Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T12:02:05.320075Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T12:02:05.320264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmim6tk5, existing: false\n" level=info timestamp=2018-07-26T12:02:05.320287Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T12:02:05.320369Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T12:02:05.320448Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-wfqb2 Pod phase: Running level=info timestamp=2018-07-26T12:02:36.678712Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi24jcm kind= uid=b6cabf3b-90cb-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T12:02:36.763444Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi24jcm, existing: false\n" level=info timestamp=2018-07-26T12:02:36.763630Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T12:02:36.763888Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi24jcm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T12:02:36.764130Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi24jcm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T12:02:52.449550Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmip79xk, existing: true\n" level=info timestamp=2018-07-26T12:02:52.450463Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T12:02:52.450596Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T12:02:52.451037Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-26T12:02:52.511943Z pos=vm.go:424 component=virt-handler namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 reason="server error. command Launcher.Sync failed: virError(Code=0, Domain=0, Message='Missing error')" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-26T12:02:52.553774Z pos=vm.go:251 component=virt-handler reason="server error. command Launcher.Sync failed: virError(Code=0, Domain=0, Message='Missing error')" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmip79xk" level=info timestamp=2018-07-26T12:02:52.555773Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmip79xk, existing: true\n" level=info timestamp=2018-07-26T12:02:52.557498Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-26T12:02:52.558490Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T12:02:52.559442Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Processing vmi update" Pod name: virt-launcher-testvmip79xk-n4zvx Pod phase: Running level=info timestamp=2018-07-26T12:02:41.663292Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-26T12:02:41.667103Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-26T12:02:41.670004Z pos=libvirt.go:256 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-26T12:02:51.679002Z pos=libvirt.go:271 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-26T12:02:51.734470Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmip79xk" level=info timestamp=2018-07-26T12:02:51.736213Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-26T12:02:51.736737Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" level=error timestamp=2018-07-26T12:02:52.481829Z pos=manager.go:160 component=virt-launcher namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 reason="virError(Code=0, Domain=0, Message='Missing error')" msg="Getting the domain failed." level=error timestamp=2018-07-26T12:02:52.482219Z pos=server.go:68 component=virt-launcher namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 reason="virError(Code=0, Domain=0, Message='Missing error')" msg="Failed to sync vmi" level=error timestamp=2018-07-26T12:02:52.621925Z pos=common.go:126 component=virt-launcher msg="updated MAC for interface: eth0 - 0a:58:0a:0d:e2:4a" level=info timestamp=2018-07-26T12:02:52.636639Z pos=converter.go:751 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-26T12:02:52.636831Z pos=converter.go:752 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-26T12:02:52.647091Z pos=dhcp.go:62 component=virt-launcher msg="Starting SingleClientDHCPServer" level=info timestamp=2018-07-26T12:02:52.956359Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Domain defined." level=info timestamp=2018-07-26T12:02:52.957230Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" Pod name: disks-images-provider-8ftmp Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-gmnhx Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-56ccl Pod phase: Running level=info timestamp=2018-07-26T12:03:40.722952Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T12:03:42.930761Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T12:03:43.976483Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T12:03:43.982293Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 12:03:50 http: TLS handshake error from 10.129.0.1:56994: EOF level=info timestamp=2018-07-26T12:03:52.830134Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T12:03:52.879819Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T12:03:52.908846Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T12:03:53.052282Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-26T12:03:59.636429Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-26T12:03:59.641828Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/26 12:04:00 http: TLS handshake error from 10.129.0.1:57008: EOF level=info timestamp=2018-07-26T12:04:03.167111Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/26 12:04:10 http: TLS handshake error from 10.129.0.1:57020: EOF level=info timestamp=2018-07-26T12:04:11.012417Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-jq8hh Pod phase: Running 2018/07/26 12:02:04 http: TLS handshake error from 10.129.0.1:40696: EOF 2018/07/26 12:02:14 http: TLS handshake error from 10.129.0.1:40708: EOF 2018/07/26 12:02:24 http: TLS handshake error from 10.129.0.1:40720: EOF 2018/07/26 12:02:34 http: TLS handshake error from 10.129.0.1:40732: EOF 2018/07/26 12:02:44 http: TLS handshake error from 10.129.0.1:40744: EOF 2018/07/26 12:02:54 http: TLS handshake error from 10.129.0.1:40756: EOF 2018/07/26 12:03:04 http: TLS handshake error from 10.129.0.1:40768: EOF 2018/07/26 12:03:14 http: TLS handshake error from 10.129.0.1:40780: EOF 2018/07/26 12:03:24 http: TLS handshake error from 10.129.0.1:40792: EOF 2018/07/26 12:03:34 http: TLS handshake error from 10.129.0.1:40804: EOF level=info timestamp=2018-07-26T12:03:37.558018Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/26 12:03:44 http: TLS handshake error from 10.129.0.1:40816: EOF 2018/07/26 12:03:54 http: TLS handshake error from 10.129.0.1:40830: EOF 2018/07/26 12:04:04 http: TLS handshake error from 10.129.0.1:40842: EOF level=info timestamp=2018-07-26T12:04:07.665041Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-48ghm Pod phase: Running level=info timestamp=2018-07-26T11:59:26.431369Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2bmbr kind= uid=576dbbd1-90cb-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T11:59:26.431985Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2bmbr kind= uid=576dbbd1-90cb-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T11:59:26.585286Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2bmbr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2bmbr" level=info timestamp=2018-07-26T12:00:02.473349Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdfrg kind= uid=6ce6a8a0-90cb-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T12:00:02.473952Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijdfrg kind= uid=6ce6a8a0-90cb-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T12:00:46.270107Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi42f5g kind= uid=8704ae3c-90cb-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T12:00:46.270908Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi42f5g kind= uid=8704ae3c-90cb-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T12:01:32.196765Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6tk5 kind= uid=a264067b-90cb-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T12:01:32.197700Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim6tk5 kind= uid=a264067b-90cb-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T12:02:06.417961Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi24jcm kind= uid=b6cabf3b-90cb-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T12:02:06.418762Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi24jcm kind= uid=b6cabf3b-90cb-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-26T12:02:06.528848Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi24jcm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi24jcm" level=info timestamp=2018-07-26T12:02:06.573829Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi24jcm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi24jcm" level=info timestamp=2018-07-26T12:02:36.877218Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-26T12:02:36.877847Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-fkcm2 Pod phase: Running level=info timestamp=2018-07-26T11:27:19.071367Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-ls6fd Pod phase: Running level=info timestamp=2018-07-26T12:02:05.301438Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-26T12:02:05.301519Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-26T12:02:05.308196Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-26T12:02:05.308722Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmim6tk5" level=info timestamp=2018-07-26T12:02:05.316216Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T12:02:05.317477Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-26T12:02:05.317847Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-26T12:02:05.317925Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmim6tk5, existing: false\n" level=info timestamp=2018-07-26T12:02:05.317946Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T12:02:05.318037Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T12:02:05.320075Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T12:02:05.320264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmim6tk5, existing: false\n" level=info timestamp=2018-07-26T12:02:05.320287Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-26T12:02:05.320369Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-26T12:02:05.320448Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmim6tk5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-wfqb2 Pod phase: Running level=info timestamp=2018-07-26T12:04:11.172020Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmip79xk kind=Domain uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Domain is in state Shutoff reason Crashed" level=info timestamp=2018-07-26T12:04:11.173596Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmip79xk, existing: true\n" level=info timestamp=2018-07-26T12:04:11.174255Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-26T12:04:11.174396Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T12:04:11.174496Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Crashed\n" level=info timestamp=2018-07-26T12:04:11.174868Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="No update processing required" level=info timestamp=2018-07-26T12:04:11.288331Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-26T12:04:11.293894Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmip79xk, existing: true\n" level=info timestamp=2018-07-26T12:04:11.295049Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-26T12:04:11.295215Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-26T12:04:11.295269Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Crashed\n" level=info timestamp=2018-07-26T12:04:11.295518Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Removing domain and ephemeral data for finalized vmi." level=info timestamp=2018-07-26T12:04:11.295647Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Processing deletion." level=info timestamp=2018-07-26T12:04:11.299193Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Signaled deletion for testvmip79xk" level=info timestamp=2018-07-26T12:04:11.306499Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmip79xk-n4zvx Pod phase: Running level=info timestamp=2018-07-26T12:02:54.498136Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T12:02:54.498815Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-26T12:02:54.517000Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-26T12:02:54.519242Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T12:02:54.826031Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T12:02:54.970925Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-26T12:02:55.117411Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a80923d8-bde7-493b-b33f-3d8a21b8cd78: 183" level=info timestamp=2018-07-26T12:04:11.099045Z pos=monitor.go:231 component=virt-launcher msg="Process a80923d8-bde7-493b-b33f-3d8a21b8cd78 and pid 183 is gone!" level=info timestamp=2018-07-26T12:04:11.101230Z pos=client.go:136 component=virt-launcher msg="Libvirt event 5 with reason 5 received" level=info timestamp=2018-07-26T12:04:11.104551Z pos=manager.go:306 component=virt-launcher namespace=kubevirt-test-default name=testvmip79xk kind=VirtualMachineInstance uid= msg="Domain not running or paused, nothing to do." level=info timestamp=2018-07-26T12:04:11.104996Z pos=virt-launcher.go:234 component=virt-launcher msg="Waiting on final notifications to be sent to virt-handler." level=info timestamp=2018-07-26T12:04:11.160932Z pos=client.go:119 component=virt-launcher msg="domain status: 5:3" level=info timestamp=2018-07-26T12:04:11.175770Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-26T12:04:11.305420Z pos=manager.go:329 component=virt-launcher namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Domain undefined." level=info timestamp=2018-07-26T12:04:11.305953Z pos=server.go:140 component=virt-launcher namespace=kubevirt-test-default name=testvmip79xk kind= uid=c8efe248-90cb-11e8-90ce-525500d15501 msg="Signaled vmi deletion" Pod name: vmi-killertnv6b Pod phase: Succeeded • Failure [95.727 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Killed VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:996 should be left alone by virt-handler [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:1024 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ STEP: Starting a VirtualMachineInstance level=info timestamp=2018-07-26T12:02:37.338483Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmip79xk-n4zvx" level=info timestamp=2018-07-26T12:02:52.577338Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmip79xk-n4zvx" level=error timestamp=2018-07-26T12:02:52.699219Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="server error. command Launcher.Sync failed: virError(Code=0, Domain=0, Message='Missing error')" STEP: Killing the VirtualMachineInstance level=info timestamp=2018-07-26T12:04:07.477943Z pos=utils.go:254 component=tests msg="Created virtual machine pod virt-launcher-testvmip79xk-n4zvx" level=info timestamp=2018-07-26T12:04:07.478145Z pos=utils.go:254 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmip79xk-n4zvx" level=error timestamp=2018-07-26T12:04:07.478827Z pos=utils.go:252 component=tests reason="unexpected warning event received" msg="server error. command Launcher.Sync failed: virError(Code=0, Domain=0, Message='Missing error')" level=info timestamp=2018-07-26T12:04:07.479271Z pos=utils.go:254 component=tests msg="VirtualMachineInstance defined." level=info timestamp=2018-07-26T12:04:07.479477Z pos=utils.go:254 component=tests msg="VirtualMachineInstance started." level=error timestamp=2018-07-26T12:04:11.481753Z pos=utils.go:252 component=tests reason="unexpected warning event received" msg="The VirtualMachineInstance crashed." STEP: Checking that virt-handler does not try to sync stopped VirtualMachineInstance level=info timestamp=2018-07-26T12:04:11.841427Z pos=utils.go:254 component=tests msg="Created virtual machine pod virt-launcher-testvmip79xk-n4zvx" level=info timestamp=2018-07-26T12:04:11.841521Z pos=utils.go:254 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmip79xk-n4zvx" level=error timestamp=2018-07-26T12:04:11.842137Z pos=utils.go:252 component=tests reason="unexpected warning event received" msg="server error. command Launcher.Sync failed: virError(Code=0, Domain=0, Message='Missing error')" • [SLOW TEST:42.845 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a cirros image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:67 should return that we are running cirros /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:68 ------------------------------ • [SLOW TEST:43.647 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a fedora image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:77 should return that we are running fedora /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:78 ------------------------------ • [SLOW TEST:37.818 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should be able to reconnect to console multiple times /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:87 ------------------------------ • [SLOW TEST:18.215 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should wait until the virtual machine is in running state and return a stream interface /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:103 ------------------------------ • [SLOW TEST:30.433 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the virtual machine instance to be running /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:111 ------------------------------ • [SLOW TEST:30.251 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the expecter /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:134 ------------------------------ Waiting for namespace kubevirt-test-default to be removed, this can take a while ... Waiting for namespace kubevirt-test-alternative to be removed, this can take a while ... Summarizing 29 Failures: [Fail] Storage Starting a VirtualMachineInstance with Alpine PVC should be successfully started [It] with CDRom PVC /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 [Fail] Storage Starting a VirtualMachineInstance With ephemeral alpine PVC [It] should be successfully started /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:214 [Fail] Storage Starting a VirtualMachineInstance With ephemeral alpine PVC [It] should not persist data /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 [Fail] Storage Starting a VirtualMachineInstance With VirtualMachineInstance with two PVCs [It] should start vmi multiple times /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 [Fail] RegistryDisk Starting and stopping the same VirtualMachineInstance with ephemeral registry disk [It] should success multiple times /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:98 [Fail] RegistryDisk Starting a VirtualMachineInstance with ephemeral registry disk [It] should not modify the spec on status update /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:119 [Fail] RegistryDisk Starting multiple VMIs with ephemeral registry disk [It] should success /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:55 [Fail] Version Check that version parameters where loaded by ldflags in build time [It] Should return a good version information struct /root/go/src/kubevirt.io/kubevirt/tests/version_test.go:49 [Fail] Configurations VirtualMachineInstance definition with 3 CPU cores [It] should report 3 cpu cores under guest OS /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:74 [Fail] Configurations VirtualMachineInstance definition with hugepages should consume hugepages [It] hugepages-2Mi /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:184 [Fail] Configurations VirtualMachineInstance definition with hugepages with usupported page size [It] should failed to schedule the pod /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:214 [Fail] Configurations with CPU spec [BeforeEach] when CPU model defined should report defined CPU model /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1576 [Fail] Configurations with CPU spec [BeforeEach] when CPU model equals to passthrough should report exactly the same model as node CPU /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1576 [Fail] Configurations with CPU spec [BeforeEach] when CPU model not defined should report CPU model from libvirt capabilities /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1576 [Fail] Configurations New VirtualMachineInstance with all supported drives [It] should have all the device nodes /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:382 [Fail] Expose Expose service on a VM [BeforeEach] Expose ClusterIP service Should expose a Cluster IP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose service on a VM [BeforeEach] Expose ClusterIP service with string target-port Should expose a ClusterIP service and connect to the vm on port 80 /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose service on a VM [BeforeEach] Expose NodePort service Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose UDP service on a VMI [BeforeEach] Expose ClusterIP UDP service Should expose a ClusterIP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose UDP service on a VMI [BeforeEach] Expose NodePort UDP service Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:27 [Fail] Expose Expose service on a VMI replica set [BeforeEach] Expose ClusterIP service Should create a ClusterIP service on VMRS and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:264 [Fail] Expose Expose service on an VM [BeforeEach] Expose ClusterIP service Connect to ClusterIP services that was set when VM was offline /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:331 [Fail] Networking [BeforeEach] should be able to reach the Inbound VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:131 [Fail] Networking [BeforeEach] should be able to reach the Inbound VirtualMachineInstance with pod network connectivity explicitly set /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:131 [Fail] Networking [BeforeEach] should be able to reach the Inbound VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:131 [Fail] Networking [BeforeEach] should be able to reach the internet /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:131 [Fail] Networking [BeforeEach] should be reachable via the propagated IP from a Pod on the same node from Pod /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1086 [Fail] VMIlifecycle Creating a VirtualMachineInstance when virt-handler is responsive [It] should indicate that a node is ready for vmis /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:360 [Fail] VMIlifecycle Killed VirtualMachineInstance [It] should be left alone by virt-handler /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 Ran 140 of 150 Specs in 4419.073 seconds FAIL! -- 111 Passed | 29 Failed | 0 Pending | 10 Skipped --- FAIL: TestTests (4419.08s) FAIL make: *** [functest] Error 1 + make cluster-down ./cluster/down.sh