+ export WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + [[ openshift-3.10-release =~ openshift-.* ]] + [[ openshift-3.10-release =~ .*-crio-.* ]] + export KUBEVIRT_PROVIDER=os-3.10.0 + KUBEVIRT_PROVIDER=os-3.10.0 + export KUBEVIRT_NUM_NODES=2 + KUBEVIRT_NUM_NODES=2 + export NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + export NAMESPACE=kube-system + NAMESPACE=kube-system + trap '{ make cluster-down; }' EXIT SIGINT SIGTERM SIGSTOP + make cluster-down ./cluster/down.sh + make cluster-up ./cluster/up.sh Downloading .......................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................... Downloading ....... 2018/08/02 20:30:09 Waiting for host: 192.168.66.102:22 2018/08/02 20:30:12 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/02 20:30:20 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/02 20:30:28 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/02 20:30:36 Connected to tcp://192.168.66.102:22 + systemctl stop origin-node.service + rm -rf /etc/origin/ /etc/etcd/ /var/lib/origin /var/lib/etcd/ ++ docker ps -q + containers= + '[' -n '' ']' ++ docker ps -q -a + containers='8d985261fee2 cdd5cb050f63 3b39c685d71c 1fe63ccc7eb5 7fb562e6d373 7357d7573809 4cd9d31e59d6 3dd2b4e034df b8bd1bfbd6fe 2b04452ad9d2 5a8d8e270d07 5bcee7ba14b9 d317f6c94b21 249a96e5b5be bf8eede72349 37a4fab28e2d 08ce4e71eea8 e4e0f326f8c6 0a856ce6e727 64a97837a605 4e67e156ec7c 6b2d2d048d59 8b938906ea49 6572244ef7bc b44328453c0f f0bde1a8d259 61984dd36d89 d0b161b029c9 6b12e033271c 7cd73847cb28 c98ef990eea8' + '[' -n '8d985261fee2 cdd5cb050f63 3b39c685d71c 1fe63ccc7eb5 7fb562e6d373 7357d7573809 4cd9d31e59d6 3dd2b4e034df b8bd1bfbd6fe 2b04452ad9d2 5a8d8e270d07 5bcee7ba14b9 d317f6c94b21 249a96e5b5be bf8eede72349 37a4fab28e2d 08ce4e71eea8 e4e0f326f8c6 0a856ce6e727 64a97837a605 4e67e156ec7c 6b2d2d048d59 8b938906ea49 6572244ef7bc b44328453c0f f0bde1a8d259 61984dd36d89 d0b161b029c9 6b12e033271c 7cd73847cb28 c98ef990eea8' ']' + docker rm -f 8d985261fee2 cdd5cb050f63 3b39c685d71c 1fe63ccc7eb5 7fb562e6d373 7357d7573809 4cd9d31e59d6 3dd2b4e034df b8bd1bfbd6fe 2b04452ad9d2 5a8d8e270d07 5bcee7ba14b9 d317f6c94b21 249a96e5b5be bf8eede72349 37a4fab28e2d 08ce4e71eea8 e4e0f326f8c6 0a856ce6e727 64a97837a605 4e67e156ec7c 6b2d2d048d59 8b938906ea49 6572244ef7bc b44328453c0f f0bde1a8d259 61984dd36d89 d0b161b029c9 6b12e033271c 7cd73847cb28 c98ef990eea8 8d985261fee2 cdd5cb050f63 3b39c685d71c 1fe63ccc7eb5 7fb562e6d373 7357d7573809 4cd9d31e59d6 3dd2b4e034df b8bd1bfbd6fe 2b04452ad9d2 5a8d8e270d07 5bcee7ba14b9 d317f6c94b21 249a96e5b5be bf8eede72349 37a4fab28e2d 08ce4e71eea8 e4e0f326f8c6 0a856ce6e727 64a97837a605 4e67e156ec7c 6b2d2d048d59 8b938906ea49 6572244ef7bc b44328453c0f f0bde1a8d259 61984dd36d89 d0b161b029c9 6b12e033271c 7cd73847cb28 c98ef990eea8 2018/08/02 20:30:42 Waiting for host: 192.168.66.101:22 2018/08/02 20:30:45 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/02 20:30:53 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/02 20:31:01 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/02 20:31:09 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: connection refused. Sleeping 5s 2018/08/02 20:31:14 Connected to tcp://192.168.66.101:22 + inventory_file=/root/inventory + openshift_ansible=/root/openshift-ansible + echo '[new_nodes]' + sed -i '/\[OSEv3:children\]/a new_nodes' /root/inventory + nodes_found=false ++ seq 2 100 + for i in '$(seq 2 100)' ++ printf node%02d 2 + node=node02 ++ printf 192.168.66.1%02d 2 + node_ip=192.168.66.102 + set +e + ping 192.168.66.102 -c 1 PING 192.168.66.102 (192.168.66.102) 56(84) bytes of data. 64 bytes from 192.168.66.102: icmp_seq=1 ttl=64 time=4.13 ms --- 192.168.66.102 ping statistics --- 1 packets transmitted, 1 received, 0% packet loss, time 0ms rtt min/avg/max/mdev = 4.136/4.136/4.136/0.000 ms Found node02. Adding it to the inventory. + '[' 0 -ne 0 ']' + nodes_found=true + set -e + echo '192.168.66.102 node02' + echo 'Found node02. Adding it to the inventory.' + echo 'node02 openshift_node_group_name="node-config-compute" openshift_schedulable=true openshift_ip=192.168.66.102' + for i in '$(seq 2 100)' ++ printf node%02d 3 + node=node03 ++ printf 192.168.66.1%02d 3 + node_ip=192.168.66.103 + set +e + ping 192.168.66.103 -c 1 PING 192.168.66.103 (192.168.66.103) 56(84) bytes of data. From 192.168.66.101 icmp_seq=1 Destination Host Unreachable --- 192.168.66.103 ping statistics --- 1 packets transmitted, 0 received, +1 errors, 100% packet loss, time 0ms + '[' 1 -ne 0 ']' + break + '[' true = true ']' + ansible-playbook -i /root/inventory /root/openshift-ansible/playbooks/openshift-node/scaleup.yml PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) PLAY [Ensure there are new_nodes] ********************************************** TASK [fail] ******************************************************************** skipping: [localhost] TASK [fail] ******************************************************************** skipping: [localhost] PLAY [Initialization Checkpoint Start] ***************************************** TASK [Set install initialization 'In Progress'] ******************************** ok: [node01] PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) [WARNING]: Could not match supplied host pattern, ignoring: oo_lb_to_config PLAY [Ensure that all non-node hosts are accessible] *************************** TASK [Gathering Facts] ********************************************************* ok: [node01] PLAY [Initialize basic host facts] ********************************************* TASK [Gathering Facts] ********************************************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml for node01, node02 TASK [openshift_sanitize_inventory : Check for usage of deprecated variables] *** ok: [node02] ok: [node01] TASK [openshift_sanitize_inventory : debug] ************************************ skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_stats] ******************************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Assign deprecated variables to correct counterparts] *** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml for node01, node02 included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml for node01, node02 TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Standardize on latest variable names] ***** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Normalize openshift_release] ************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Abort when openshift_release is invalid] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml for node01, node02 TASK [openshift_sanitize_inventory : Ensure that openshift_use_dnsmasq is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that openshift_node_dnsmasq_install_network_manager_hook is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* skipping: [node01] => (item=openshift_hosted_etcd_storage_kind) skipping: [node02] => (item=openshift_hosted_etcd_storage_kind) TASK [openshift_sanitize_inventory : Ensure that dynamic provisioning is set if using dynamic storage] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure clusterid is set along with the cloudprovider] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure ansible_service_broker_remove and ansible_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure template_service_broker_remove and template_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that all requires vsphere configuration variables are set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : ensure provider configuration variables are defined] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure removed web console extension variables are not set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that web console port matches API server port] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : At least one master is schedulable] ******* skipping: [node01] skipping: [node02] TASK [Detecting Operating System from ostree_booted] *************************** ok: [node02] ok: [node01] TASK [set openshift_deployment_type if unset] ********************************** skipping: [node01] skipping: [node02] TASK [check for node already bootstrapped] ************************************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_bootstrapped] ********************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_atomic and openshift_is_containerized] *** ok: [node01] ok: [node02] TASK [Determine Atomic Host Docker Version] ************************************ skipping: [node01] skipping: [node02] TASK [assert atomic host docker version is 1.12 or later] ********************** skipping: [node01] skipping: [node02] PLAY [Retrieve existing master configs and validate] *************************** TASK [openshift_control_plane : stat] ****************************************** ok: [node01] TASK [openshift_control_plane : slurp] ***************************************** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [openshift_control_plane : Check for file paths outside of /etc/origin/master in master's config] *** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Initialize special first-master variables] ******************************* TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Disable web console if required] ***************************************** TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Setup yum repositories for all hosts] ************************************ TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Install Red Hat Subscription manager] ******************* skipping: [node02] TASK [rhel_subscribe : Is host already registered?] **************************** skipping: [node02] TASK [rhel_subscribe : Register host] ****************************************** skipping: [node02] TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Determine if OpenShift Pool Already Attached] *********** skipping: [node02] TASK [rhel_subscribe : Attach to OpenShift Pool] ******************************* skipping: [node02] TASK [rhel_subscribe : Satellite preparation] ********************************** skipping: [node02] TASK [openshift_repos : openshift_repos detect ostree] ************************* ok: [node02] TASK [openshift_repos : Ensure libselinux-python is installed] ***************** ok: [node02] TASK [openshift_repos : Remove openshift_additional.repo file] ***************** ok: [node02] TASK [openshift_repos : Create any additional repos that are defined] ********** TASK [openshift_repos : include_tasks] ***************************************** skipping: [node02] TASK [openshift_repos : include_tasks] ***************************************** included: /root/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml for node02 TASK [openshift_repos : Configure origin gpg keys] ***************************** ok: [node02] TASK [openshift_repos : Configure correct origin release repository] *********** ok: [node02] => (item=/root/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin.repo.j2) TASK [openshift_repos : Ensure clean repo cache in the event repos have been changed manually] *** changed: [node02] => { "msg": "First run of openshift_repos" } TASK [openshift_repos : Record that openshift_repos already ran] *************** ok: [node02] RUNNING HANDLER [openshift_repos : refresh cache] ****************************** changed: [node02] PLAY [Install packages necessary for installer] ******************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Determine if chrony is installed] **************************************** [WARNING]: Consider using the yum, dnf or zypper module rather than running rpm. If you need to use command because yum, dnf or zypper is insufficient you can add warn=False to this command task or set command_warnings=False in ansible.cfg to get rid of this message. changed: [node02] TASK [Install ntp package] ***************************************************** skipping: [node02] TASK [Start and enable ntpd/chronyd] ******************************************* changed: [node02] TASK [Ensure openshift-ansible installer package deps are installed] *********** ok: [node02] => (item=iproute) ok: [node02] => (item=dbus-python) ok: [node02] => (item=PyYAML) ok: [node02] => (item=python-ipaddress) ok: [node02] => (item=libsemanage-python) ok: [node02] => (item=yum-utils) ok: [node02] => (item=python-docker) PLAY [Initialize cluster facts] ************************************************ TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [get openshift_current_version] ******************************************* ok: [node02] ok: [node01] TASK [set_fact openshift_portal_net if present on masters] ********************* ok: [node01] ok: [node02] TASK [Gather Cluster facts] **************************************************** changed: [node02] changed: [node01] TASK [Set fact of no_proxy_internal_hostnames] ********************************* skipping: [node01] skipping: [node02] TASK [Initialize openshift.node.sdn_mtu] *************************************** changed: [node02] ok: [node01] PLAY [Initialize etcd host variables] ****************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** ok: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** ok: [node01] => { "msg": "openshift_pkg_version was not defined. Falling back to -3.10.0" } TASK [openshift_version : set_fact] ******************************************** ok: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Verify Requirements] ***************************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [Run variable sanity checks] ********************************************** ok: [node01] TASK [Validate openshift_node_groups and openshift_node_group_name] ************ ok: [node01] PLAY [Initialization Checkpoint End] ******************************************* TASK [Set install initialization 'Complete'] *********************************** ok: [node01] PLAY [Validate node hostnames] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Query DNS for IP address of node02] ************************************** ok: [node02] TASK [Validate openshift_hostname when defined] ******************************** skipping: [node02] TASK [Validate openshift_ip exists on node when defined] *********************** skipping: [node02] PLAY [Configure os_firewall] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [os_firewall : Detecting Atomic Host Operating System] ******************** ok: [node02] TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** ok: [node02] TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** skipping: [node02] TASK [os_firewall : Install firewalld packages] ******************************** skipping: [node02] TASK [os_firewall : Ensure iptables services are not enabled] ****************** skipping: [node02] => (item=iptables) skipping: [node02] => (item=ip6tables) TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** skipping: [node02] TASK [os_firewall : Start and enable firewalld service] ************************ skipping: [node02] TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** skipping: [node02] TASK [os_firewall : Restart polkitd] ******************************************* skipping: [node02] TASK [os_firewall : Wait for polkit action to have been created] *************** skipping: [node02] TASK [os_firewall : Ensure firewalld service is not enabled] ******************* ok: [node02] TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** skipping: [node02] TASK [os_firewall : Install iptables packages] ********************************* ok: [node02] => (item=iptables) ok: [node02] => (item=iptables-services) TASK [os_firewall : Start and enable iptables service] ************************* ok: [node02 -> node02] => (item=node02) TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** skipping: [node02] PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [container_runtime : Setup the docker-storage for overlay] **************** skipping: [node02] TASK [container_runtime : Create file system on extra volume device] *********** TASK [container_runtime : Create mount entry for extra volume] ***************** PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** ok: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* ok: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Add http_proxy to /etc/atomic.conf] ****************** skipping: [node02] TASK [container_runtime : Add https_proxy to /etc/atomic.conf] ***************** skipping: [node02] TASK [container_runtime : Add no_proxy to /etc/atomic.conf] ******************** skipping: [node02] TASK [container_runtime : Get current installed Docker version] **************** ok: [node02] TASK [container_runtime : Error out if Docker pre-installed but too old] ******* skipping: [node02] TASK [container_runtime : Error out if requested Docker is too old] ************ skipping: [node02] TASK [container_runtime : Install Docker] ************************************** skipping: [node02] TASK [container_runtime : Ensure docker.service.d directory exists] ************ ok: [node02] TASK [container_runtime : Configure Docker service unit file] ****************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Set registry params] ********************************* skipping: [node02] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) TASK [container_runtime : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** skipping: [node02] TASK [container_runtime : Set Proxy Settings] ********************************** skipping: [node02] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) TASK [container_runtime : Set various Docker options] ************************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Configure Docker Network OPTIONS] ******************** ok: [node02] TASK [container_runtime : Detect if docker is already started] ***************** ok: [node02] TASK [container_runtime : Start the Docker service] **************************** ok: [node02] TASK [container_runtime : set_fact] ******************************************** ok: [node02] TASK [container_runtime : Check for docker_storage_path/overlay2] ************** ok: [node02] TASK [container_runtime : Fixup SELinux permissions for docker] **************** changed: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* ok: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** ok: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** ok: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] TASK [container_runtime : Fail if Atomic Host since this is an rpm request] **** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Check that overlay is in the kernel] ***************** skipping: [node02] TASK [container_runtime : Add overlay to modprobe.d] *************************** skipping: [node02] TASK [container_runtime : Manually modprobe overlay into the kernel] *********** skipping: [node02] TASK [container_runtime : Enable and start systemd-modules-load] *************** skipping: [node02] TASK [container_runtime : Install cri-o] *************************************** skipping: [node02] TASK [container_runtime : Remove CRI-O default configuration files] ************ skipping: [node02] => (item=/etc/cni/net.d/200-loopback.conf) skipping: [node02] => (item=/etc/cni/net.d/100-crio-bridge.conf) TASK [container_runtime : Create the CRI-O configuration] ********************** skipping: [node02] TASK [container_runtime : Ensure CNI configuration directory exists] *********** skipping: [node02] TASK [container_runtime : Add iptables allow rules] **************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove iptables rules] ******************************* TASK [container_runtime : Add firewalld allow rules] *************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove firewalld allow rules] ************************ TASK [container_runtime : Configure the CNI network] *************************** skipping: [node02] TASK [container_runtime : Create /etc/sysconfig/crio-network] ****************** skipping: [node02] TASK [container_runtime : Start the CRI-O service] ***************************** skipping: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* skipping: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** skipping: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** skipping: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** skipping: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Node Preparation Checkpoint Start] *************************************** TASK [Set Node preparation 'In Progress'] ************************************** ok: [node01] PLAY [Only target nodes that have not yet been bootstrapped] ******************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [add_host] **************************************************************** skipping: [localhost] => (item=node02) ok: [localhost] => (item=node01) PLAY [Disable excluders] ******************************************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for node02 TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-docker-excluder version detected] ************ skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-docker-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-excluder version detected] ******************* skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** skipping: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] PLAY [Configure nodes] ********************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_cloud_provider : Set cloud provider facts] ********************* skipping: [node02] TASK [openshift_cloud_provider : Create cloudprovider config dir] ************** skipping: [node02] TASK [openshift_cloud_provider : include the defined cloud provider files] ***** skipping: [node02] TASK [openshift_node : fail] *************************************************** skipping: [node02] TASK [openshift_node : Check for NetworkManager service] *********************** ok: [node02] TASK [openshift_node : Set fact using_network_manager] ************************* ok: [node02] TASK [openshift_node : Install dnsmasq] **************************************** ok: [node02] TASK [openshift_node : ensure origin/node directory exists] ******************** changed: [node02] => (item=/etc/origin) changed: [node02] => (item=/etc/origin/node) TASK [openshift_node : Install NetworkManager during node_bootstrap provisioning] *** skipping: [node02] TASK [openshift_node : Install network manager dispatch script] **************** skipping: [node02] TASK [openshift_node : Install dnsmasq configuration] ************************** ok: [node02] TASK [openshift_node : Deploy additional dnsmasq.conf] ************************* skipping: [node02] TASK [openshift_node : Enable dnsmasq] ***************************************** ok: [node02] TASK [openshift_node : Install network manager dispatch script] **************** ok: [node02] TASK [openshift_node : Add iptables allow rules] ******************************* ok: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) ok: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) ok: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) ok: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) ok: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove iptables rules] ********************************** TASK [openshift_node : Add firewalld allow rules] ****************************** skipping: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) skipping: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) skipping: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) skipping: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) skipping: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove firewalld allow rules] *************************** TASK [openshift_node : Checking for journald.conf] ***************************** ok: [node02] TASK [openshift_node : Create journald persistence directories] **************** ok: [node02] TASK [openshift_node : Update journald setup] ********************************** ok: [node02] => (item={u'var': u'Storage', u'val': u'persistent'}) ok: [node02] => (item={u'var': u'Compress', u'val': True}) ok: [node02] => (item={u'var': u'SyncIntervalSec', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitInterval', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitBurst', u'val': 10000}) ok: [node02] => (item={u'var': u'SystemMaxUse', u'val': u'8G'}) ok: [node02] => (item={u'var': u'SystemKeepFree', u'val': u'20%'}) ok: [node02] => (item={u'var': u'SystemMaxFileSize', u'val': u'10M'}) ok: [node02] => (item={u'var': u'MaxRetentionSec', u'val': u'1month'}) ok: [node02] => (item={u'var': u'MaxFileSec', u'val': u'1day'}) ok: [node02] => (item={u'var': u'ForwardToSyslog', u'val': False}) ok: [node02] => (item={u'var': u'ForwardToWall', u'val': False}) TASK [openshift_node : Restart journald] *************************************** skipping: [node02] TASK [openshift_node : Disable swap] ******************************************* ok: [node02] TASK [openshift_node : Install node, clients, and conntrack packages] ********** ok: [node02] => (item={u'name': u'origin-node-3.10.0*'}) ok: [node02] => (item={u'name': u'origin-clients-3.10.0*'}) ok: [node02] => (item={u'name': u'conntrack-tools'}) TASK [openshift_node : Restart cri-o] ****************************************** skipping: [node02] TASK [openshift_node : restart NetworkManager to ensure resolv.conf is present] *** changed: [node02] TASK [openshift_node : sysctl] ************************************************* ok: [node02] TASK [openshift_node : Check for credentials file for registry auth] *********** skipping: [node02] TASK [openshift_node : Create credentials for registry auth] ******************* skipping: [node02] TASK [openshift_node : Create credentials for registry auth (alternative)] ***** skipping: [node02] TASK [openshift_node : Setup ro mount of /root/.docker for containerized hosts] *** skipping: [node02] TASK [openshift_node : Check that node image is present] *********************** changed: [node02] TASK [openshift_node : Pre-pull node image] ************************************ skipping: [node02] TASK [openshift_node : Copy node script to the node] *************************** ok: [node02] TASK [openshift_node : Install Node service file] ****************************** ok: [node02] TASK [openshift_node : Ensure old system path is set] ************************** skipping: [node02] => (item=/etc/origin/openvswitch) skipping: [node02] => (item=/var/lib/kubelet) skipping: [node02] => (item=/opt/cni/bin) TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Copy node container image to ostree storage] ************ skipping: [node02] TASK [openshift_node : Install or Update node system container] **************** skipping: [node02] TASK [openshift_node : Restart network manager to ensure networking configuration is in place] *** skipping: [node02] TASK [openshift_node : Configure Node settings] ******************************** ok: [node02] => (item={u'regex': u'^OPTIONS=', u'line': u'OPTIONS='}) ok: [node02] => (item={u'regex': u'^DEBUG_LOGLEVEL=', u'line': u'DEBUG_LOGLEVEL=2'}) ok: [node02] => (item={u'regex': u'^IMAGE_VERSION=', u'line': u'IMAGE_VERSION=v3.10.0-rc.0'}) TASK [openshift_node : Configure Proxy Settings] ******************************* skipping: [node02] => (item={u'regex': u'^HTTP_PROXY=', u'line': u'HTTP_PROXY='}) skipping: [node02] => (item={u'regex': u'^HTTPS_PROXY=', u'line': u'HTTPS_PROXY='}) skipping: [node02] => (item={u'regex': u'^NO_PROXY=', u'line': u'NO_PROXY=[],172.30.0.0/16,10.128.0.0/14'}) TASK [openshift_node : file] *************************************************** skipping: [node02] TASK [openshift_node : Create the Node config] ********************************* changed: [node02] TASK [openshift_node : Configure Node Environment Variables] ******************* TASK [openshift_node : Ensure the node static pod directory exists] ************ changed: [node02] TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Install NFS storage plugin dependencies] **************** ok: [node02] TASK [openshift_node : Check for existence of nfs sebooleans] ****************** ok: [node02] => (item=virt_use_nfs) ok: [node02] => (item=virt_sandbox_use_nfs) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 20:39:50.138556', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.008485', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 20:39:50.130071', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 20:39:51.682577', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.011654', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 20:39:51.670923', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 20:39:50.138556', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.008485', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 20:39:50.130071', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 20:39:51.682577', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.011654', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 20:39:51.670923', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install GlusterFS storage plugin dependencies] ********** ok: [node02] TASK [openshift_node : Check for existence of fusefs sebooleans] *************** ok: [node02] => (item=virt_use_fusefs) ok: [node02] => (item=virt_sandbox_use_fusefs) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 20:39:58.899954', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.006904', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 20:39:58.893050', '_ansible_ignore_errors': None, 'failed': False}) ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 20:40:00.399216', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.010603', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 20:40:00.388613', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 20:39:58.899954', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.006904', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 20:39:58.893050', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 20:40:00.399216', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.010603', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 20:40:00.388613', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install Ceph storage plugin dependencies] *************** ok: [node02] TASK [openshift_node : Install iSCSI storage plugin dependencies] ************** ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=device-mapper-multipath) TASK [openshift_node : restart services] *************************************** ok: [node02] => (item=multipathd) ok: [node02] => (item=rpcbind) ok: [node02] => (item=iscsid) TASK [openshift_node : Template multipath configuration] *********************** changed: [node02] TASK [openshift_node : Enable and start multipath] ***************************** changed: [node02] TASK [tuned : Check for tuned package] ***************************************** ok: [node02] TASK [tuned : Set tuned OpenShift variables] *********************************** ok: [node02] TASK [tuned : Ensure directory structure exists] ******************************* ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9317749, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9317749}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9327748}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9327748}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Ensure files are populated from templates] *********************** skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9317749, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9317749}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9327748}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9327748}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': u's0', 'seuser': u'unconfined_u', 'serole': u'object_r', 'ctime': 1533205381.9327748, 'state': u'file', 'gid': 0, 'mode': u'0644', 'mtime': 1533205381.9327748, 'owner': u'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': u'admin_home_t'}) TASK [tuned : Make tuned use the recommended tuned profile on restart] ********* changed: [node02] => (item=/etc/tuned/active_profile) changed: [node02] => (item=/etc/tuned/profile_mode) TASK [tuned : Restart tuned service] ******************************************* changed: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] ******* ok: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] *** PLAY [node bootstrap config] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_node : install needed rpm(s)] ********************************** ok: [node02] => (item=origin-node) ok: [node02] => (item=origin-docker-excluder) ok: [node02] => (item=ansible) ok: [node02] => (item=bash-completion) ok: [node02] => (item=docker) ok: [node02] => (item=haproxy) ok: [node02] => (item=dnsmasq) ok: [node02] => (item=ntp) ok: [node02] => (item=logrotate) ok: [node02] => (item=httpd-tools) ok: [node02] => (item=bind-utils) ok: [node02] => (item=firewalld) ok: [node02] => (item=libselinux-python) ok: [node02] => (item=conntrack-tools) ok: [node02] => (item=openssl) ok: [node02] => (item=iproute) ok: [node02] => (item=python-dbus) ok: [node02] => (item=PyYAML) ok: [node02] => (item=yum-utils) ok: [node02] => (item=glusterfs-fuse) ok: [node02] => (item=device-mapper-multipath) ok: [node02] => (item=nfs-utils) ok: [node02] => (item=cockpit-ws) ok: [node02] => (item=cockpit-system) ok: [node02] => (item=cockpit-bridge) ok: [node02] => (item=cockpit-docker) ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=ceph-common) TASK [openshift_node : create the directory for node] ************************** skipping: [node02] TASK [openshift_node : laydown systemd override] ******************************* skipping: [node02] TASK [openshift_node : update the sysconfig to have necessary variables] ******* ok: [node02] => (item={u'regexp': u'^KUBECONFIG=.*', u'line': u'KUBECONFIG=/etc/origin/node/bootstrap.kubeconfig'}) TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : disable origin-node service] **************************** changed: [node02] => (item=origin-node.service) TASK [openshift_node : Check for RPM generated config marker file .config_managed] *** ok: [node02] TASK [openshift_node : create directories for bootstrapping] ******************* ok: [node02] => (item=/root/openshift_bootstrap) changed: [node02] => (item=/var/lib/origin/openshift.local.config) changed: [node02] => (item=/var/lib/origin/openshift.local.config/node) ok: [node02] => (item=/etc/docker/certs.d/docker-registry.default.svc:5000) TASK [openshift_node : laydown the bootstrap.yml file for on boot configuration] *** ok: [node02] TASK [openshift_node : Create a symlink to the node client CA for the docker registry] *** ok: [node02] TASK [openshift_node : Remove RPM generated config files if present] *********** skipping: [node02] => (item=master) skipping: [node02] => (item=.config_managed) TASK [openshift_node : find all files in /etc/origin/node so we can remove them] *** skipping: [node02] TASK [openshift_node : Remove everything except the resolv.conf required for node] *** skipping: [node02] TASK [openshift_node_group : create node config template] ********************** changed: [node02] TASK [openshift_node_group : remove existing node config] ********************** changed: [node02] TASK [openshift_node_group : Ensure required directories are present] ********** ok: [node02] => (item=/etc/origin/node/pods) changed: [node02] => (item=/etc/origin/node/certificates) TASK [openshift_node_group : Update the sysconfig to group "node-config-compute"] *** changed: [node02] TASK [set_fact] **************************************************************** ok: [node02] PLAY [Re-enable excluder if it was previously enabled] ************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/enable.yml for node02 TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] PLAY [Node Preparation Checkpoint End] ***************************************** TASK [Set Node preparation 'Complete'] ***************************************** ok: [node01] PLAY [Distribute bootstrap and start nodes] ************************************ TASK [openshift_node : Gather node information] ******************************** changed: [node02] ok: [node01] TASK [openshift_node : Copy master bootstrap config locally] ******************* ok: [node02 -> node01] TASK [openshift_node : Distribute bootstrap kubeconfig if one does not exist] *** ok: [node01] changed: [node02] TASK [openshift_node : Start and enable node for bootstrapping] **************** changed: [node01] changed: [node02] TASK [openshift_node : Get node logs] ****************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : debug] ************************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : fail] *************************************************** skipping: [node02] skipping: [node01] PLAY [Approve any pending CSR requests from inventory nodes] ******************* TASK [Dump all candidate bootstrap hostnames] ********************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Find all hostnames for bootstrapping] ************************************ ok: [node01] TASK [Dump the bootstrap hostnames] ******************************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Approve bootstrap nodes] ************************************************* changed: [node01] TASK [Get CSRs] **************************************************************** skipping: [node01] TASK [Report approval errors] ************************************************** skipping: [node01] PLAY [Ensure any inventory labels are applied to the nodes] ******************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_manage_node : Wait for master API to become available before proceeding] *** skipping: [node02] TASK [openshift_manage_node : Wait for Node Registration] ********************** ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/config.yml for node02, node01 TASK [openshift_manage_node : Set node schedulability] ************************* ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/set_default_node_role.yml for node02, node01 TASK [openshift_manage_node : Retrieve nodes that are marked with the infra selector or the legacy infra selector] *** ok: [node02 -> node01] TASK [openshift_manage_node : Label infra or legacy infra nodes with the new role label] *** TASK [openshift_manage_node : Retrieve non-infra, non-master nodes that are not yet labeled compute] *** ok: [node02 -> node01] TASK [openshift_manage_node : label non-master non-infra nodes compute] ******** TASK [openshift_manage_node : Label all-in-one master as a compute node] ******* skipping: [node02] PLAY RECAP ********************************************************************* localhost : ok=30 changed=0 unreachable=0 failed=0 node01 : ok=71 changed=3 unreachable=0 failed=0 node02 : ok=155 changed=33 unreachable=0 failed=0 INSTALLER STATUS *************************************************************** Initialization : Complete (0:03:53) Node Preparation : Complete (0:05:07) + set +e + crio=false + grep crio /root/inventory + '[' 1 -eq 0 ']' + set -e + cat + ansible-playbook -i /root/inventory post_deployment_configuration --extra-vars=crio=false PLAY [nodes, new_nodes] ******************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [replace] ***************************************************************** skipping: [node01] skipping: [node02] TASK [replace] ***************************************************************** skipping: [node01] skipping: [node02] TASK [service] ***************************************************************** skipping: [node01] skipping: [node02] PLAY RECAP ********************************************************************* node01 : ok=1 changed=0 unreachable=0 failed=0 node02 : ok=1 changed=0 unreachable=0 failed=0 + set -x + /usr/bin/oc get nodes --no-headers node01 Ready compute,infra,master 10h v1.10.0+b81c8f8 node02 Ready compute 37s v1.10.0+b81c8f8 + os_rc=0 + retry_counter=0 + [[ 0 -lt 20 ]] + [[ 0 -ne 0 ]] + /usr/bin/oc create -f /tmp/local-volume.yaml storageclass.storage.k8s.io "local" created configmap "local-storage-config" created clusterrolebinding.rbac.authorization.k8s.io "local-storage-provisioner-pv-binding" created clusterrole.rbac.authorization.k8s.io "local-storage-provisioner-node-clusterrole" created clusterrolebinding.rbac.authorization.k8s.io "local-storage-provisioner-node-binding" created role.rbac.authorization.k8s.io "local-storage-provisioner-jobs-role" created rolebinding.rbac.authorization.k8s.io "local-storage-provisioner-jobs-rolebinding" created serviceaccount "local-storage-admin" created daemonset.extensions "local-volume-provisioner" created Sending file modes: C0755 110489328 oc Sending file modes: C0600 5645 admin.kubeconfig Cluster "node01:8443" set. Cluster "node01:8443" set. + set +e + kubectl get nodes --no-headers + cluster/kubectl.sh get nodes --no-headers node01 Ready compute,infra,master 10h v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + kubectl_rc=0 + '[' 0 -ne 0 ']' ++ kubectl get nodes --no-headers ++ cluster/kubectl.sh get nodes --no-headers ++ grep NotReady + '[' -n '' ']' + set -e + echo 'Nodes are ready:' Nodes are ready: + kubectl get nodes + cluster/kubectl.sh get nodes NAME STATUS ROLES AGE VERSION node01 Ready compute,infra,master 10h v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + make cluster-sync ./cluster/build.sh Building ... Untagged: localhost:33187/kubevirt/virt-controller:devel Untagged: localhost:33187/kubevirt/virt-controller@sha256:6b89e2bb50407ad93c74e179c46536affb3f9c03ee757a095d09aef8703a1899 Deleted: sha256:8b441387a56b1b6cad80dfe0a2d1fa927289c8aa122304ce56f150ef73cb0fe0 Untagged: localhost:33187/kubevirt/virt-launcher:devel Untagged: localhost:33187/kubevirt/virt-launcher@sha256:85a1b25cda9cbeb2b442304f420b30565438013e7099c5bc1267979b6c623316 Deleted: sha256:7acf9196d4090c0f30bfb06307eeaff3b4876f8890acbc5a028f785228b302d6 Untagged: localhost:33187/kubevirt/virt-handler:devel Untagged: localhost:33187/kubevirt/virt-handler@sha256:67547b1056346914759973801cc8746d09c15c1cd036a823d9fb2dc90d413789 Deleted: sha256:e149bd17f143099bdd429655146347e2531635157a1574dff12e2f390b492e35 Untagged: localhost:33187/kubevirt/virt-api:devel Untagged: localhost:33187/kubevirt/virt-api@sha256:65249c63ea3b3c72f8080f1a2e69625607ff4b43cb0bac3db8ac845d0fc75f8b Deleted: sha256:1e25dad25314b93b0cea37fc07f65114af9800b861b9a7d9d7ecb69a01cc8216 Untagged: localhost:33187/kubevirt/subresource-access-test:devel Untagged: localhost:33187/kubevirt/subresource-access-test@sha256:2f4f211fcd6f6fadf756e8f8697b033ba844d155a8bbdabad00dd46a8bc188c3 Deleted: sha256:2a7fb8ff07850bb544ffb38c9df1050e154b9471620394daafba07a4cc3cb0ca Untagged: localhost:33187/kubevirt/example-hook-sidecar:devel Untagged: localhost:33187/kubevirt/example-hook-sidecar@sha256:bb0956aad7b06812fcc8f3b8f8a7f78f3ffafd48122dfbfe6ddb11db9b54fedb Deleted: sha256:64f6014bb90a0ca38a05f369947a2b1e6a47a78812630201e4e359f5f12e46fc sha256:dcf2b21fa2ed11dcf9dbba21b1cca0ee3fad521a0e9aee61c06d0b0b66a4b200 go version go1.10 linux/amd64 go version go1.10 linux/amd64 make[1]: Entering directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' hack/dockerized "./hack/check.sh && KUBEVIRT_VERSION= ./hack/build-go.sh install " && ./hack/build-copy-artifacts.sh sha256:dcf2b21fa2ed11dcf9dbba21b1cca0ee3fad521a0e9aee61c06d0b0b66a4b200 go version go1.10 linux/amd64 go version go1.10 linux/amd64 find: '/root/go/src/kubevirt.io/kubevirt/_out/cmd': No such file or directory Compiling tests... compiled tests.test hack/build-docker.sh build Sending build context to Docker daemon 40.39 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-controller ---> Using cache ---> b00c84523b53 Step 4/8 : WORKDIR /home/virt-controller ---> Using cache ---> b76b8bd8cd39 Step 5/8 : USER 1001 ---> Using cache ---> b6d9ad9ed232 Step 6/8 : COPY virt-controller /usr/bin/virt-controller ---> Using cache ---> 10025c1f29ba Step 7/8 : ENTRYPOINT /usr/bin/virt-controller ---> Using cache ---> d4b693752e4d Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-controller" '' ---> Running in 5749e044916b ---> 5799f798d0bb Removing intermediate container 5749e044916b Successfully built 5799f798d0bb Sending build context to Docker daemon 43.31 MB Step 1/10 : FROM kubevirt/libvirt:4.2.0 ---> 5f0bfe81a3e0 Step 2/10 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 945996802736 Step 3/10 : RUN dnf -y install socat genisoimage util-linux libcgroup-tools ethtool net-tools sudo && dnf -y clean all && test $(id -u qemu) = 107 # make sure that the qemu user really is 107 ---> Using cache ---> 672f9ab56316 Step 4/10 : COPY virt-launcher /usr/bin/virt-launcher ---> Using cache ---> 0dcc6a7754eb Step 5/10 : COPY kubevirt-sudo /etc/sudoers.d/kubevirt ---> Using cache ---> b28b30a02c75 Step 6/10 : RUN setcap CAP_NET_BIND_SERVICE=+eip /usr/bin/qemu-system-x86_64 ---> Using cache ---> 74730d235198 Step 7/10 : RUN mkdir -p /usr/share/kubevirt/virt-launcher ---> Using cache ---> af611f78dce1 Step 8/10 : COPY entrypoint.sh libvirtd.sh sock-connector /usr/share/kubevirt/virt-launcher/ ---> Using cache ---> 7ba719e5943d Step 9/10 : ENTRYPOINT /usr/share/kubevirt/virt-launcher/entrypoint.sh ---> Using cache ---> 1ce13e8c6205 Step 10/10 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-launcher" '' ---> Running in f75b02d2f923 ---> 0a70bf0db2b3 Removing intermediate container f75b02d2f923 Successfully built 0a70bf0db2b3 Sending build context to Docker daemon 41.69 MB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/5 : COPY virt-handler /usr/bin/virt-handler ---> Using cache ---> 66a3f05d7e7e Step 4/5 : ENTRYPOINT /usr/bin/virt-handler ---> Using cache ---> 85ed862c1d14 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-handler" '' ---> Running in 82dc13643a4d ---> 329fd14619d1 Removing intermediate container 82dc13643a4d Successfully built 329fd14619d1 Sending build context to Docker daemon 38.81 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-api ---> Using cache ---> ed1ebf600ee1 Step 4/8 : WORKDIR /home/virt-api ---> Using cache ---> 0769dad023e5 Step 5/8 : USER 1001 ---> Using cache ---> 0cb65afb0c2b Step 6/8 : COPY virt-api /usr/bin/virt-api ---> Using cache ---> 024d8a1f8014 Step 7/8 : ENTRYPOINT /usr/bin/virt-api ---> Using cache ---> 649f6a1514e4 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-api" '' ---> Running in f709d3af9ab3 ---> b4fd06570a52 Removing intermediate container f709d3af9ab3 Successfully built b4fd06570a52 Sending build context to Docker daemon 4.096 kB Step 1/7 : FROM fedora:28 ---> cc510acfcd70 Step 2/7 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/7 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/7 : RUN mkdir -p /images/custom /images/alpine && truncate -s 64M /images/custom/disk.img && curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /images/alpine/disk.img ---> Using cache ---> 02134835a6aa Step 5/7 : ADD entrypoint.sh / ---> Using cache ---> ec0843818da7 Step 6/7 : CMD /entrypoint.sh ---> Using cache ---> 754029bb4bd2 Step 7/7 : LABEL "disks-images-provider" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> 6327b8256318 Successfully built 6327b8256318 Sending build context to Docker daemon 2.56 kB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/5 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/5 : RUN dnf -y install procps-ng nmap-ncat && dnf -y clean all ---> Using cache ---> 207487abe7b2 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "vm-killer" '' ---> Using cache ---> 27cf5472530f Successfully built 27cf5472530f Sending build context to Docker daemon 5.12 kB Step 1/7 : FROM debian:sid ---> 68f33cf86aab Step 2/7 : MAINTAINER "David Vossel" \ ---> Using cache ---> 5734d749eb5c Step 3/7 : ENV container docker ---> Using cache ---> f8775a77966f Step 4/7 : RUN apt-get update && apt-get install -y bash curl bzip2 qemu-utils && mkdir -p /disk && rm -rf /var/lib/apt/lists/* ---> Using cache ---> 1a40cf222a61 Step 5/7 : ADD entry-point.sh / ---> Using cache ---> 77b545d92fe7 Step 6/7 : CMD /entry-point.sh ---> Using cache ---> dfe20d463305 Step 7/7 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "registry-disk-v1alpha" '' ---> Using cache ---> 5efdf368e732 Successfully built 5efdf368e732 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33521/kubevirt/registry-disk-v1alpha:devel ---> 5efdf368e732 Step 2/4 : MAINTAINER "David Vossel" \ ---> Using cache ---> 386f7e924456 Step 3/4 : RUN curl https://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img > /disk/cirros.img ---> Using cache ---> f473a86e4d6a Step 4/4 : LABEL "cirros-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> a4ca4c67d45c Successfully built a4ca4c67d45c Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33521/kubevirt/registry-disk-v1alpha:devel ---> 5efdf368e732 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 369bca39dcc2 Step 3/4 : RUN curl -g -L https://download.fedoraproject.org/pub/fedora/linux/releases/27/CloudImages/x86_64/images/Fedora-Cloud-Base-27-1.6.x86_64.qcow2 > /disk/fedora.qcow2 ---> Using cache ---> de1e81f43a59 Step 4/4 : LABEL "fedora-cloud-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> a5867eac6e05 Successfully built a5867eac6e05 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33521/kubevirt/registry-disk-v1alpha:devel ---> 5efdf368e732 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 369bca39dcc2 Step 3/4 : RUN curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /disk/alpine.iso ---> Using cache ---> 1083d820f9c8 Step 4/4 : LABEL "alpine-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> 11512d828b9c Successfully built 11512d828b9c Sending build context to Docker daemon 35.59 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virtctl ---> Using cache ---> 985fe391c056 Step 4/8 : WORKDIR /home/virtctl ---> Using cache ---> 3b2cae8ac543 Step 5/8 : USER 1001 ---> Using cache ---> 0c06e5b4a900 Step 6/8 : COPY subresource-access-test /subresource-access-test ---> Using cache ---> 5fb52ad585f4 Step 7/8 : ENTRYPOINT /subresource-access-test ---> Using cache ---> 4027b869eeff Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "subresource-access-test" '' ---> Running in 2d3b6122a007 ---> 663f74f99b86 Removing intermediate container 2d3b6122a007 Successfully built 663f74f99b86 Sending build context to Docker daemon 3.072 kB Step 1/9 : FROM fedora:28 ---> cc510acfcd70 Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/9 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/9 : RUN dnf -y install make git gcc && dnf -y clean all ---> Using cache ---> d3456b1644b1 Step 5/9 : ENV GIMME_GO_VERSION 1.9.2 ---> Using cache ---> 0ba81fddbba1 Step 6/9 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 5d33abe3f819 Step 7/9 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 783826523be1 Step 8/9 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/masterzen/winrm-cli ---> Using cache ---> 711bc8d15952 Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "winrmcli" '' ---> Using cache ---> fe40426b785b Successfully built fe40426b785b Sending build context to Docker daemon 36.8 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> e3238544ad97 Step 3/5 : COPY example-hook-sidecar /example-hook-sidecar ---> Using cache ---> 9e92ab15c30e Step 4/5 : ENTRYPOINT /example-hook-sidecar ---> Using cache ---> 8f5241001baa Step 5/5 : LABEL "example-hook-sidecar" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Running in 10fe435dfd8a ---> bd30c8bec6b0 Removing intermediate container 10fe435dfd8a Successfully built bd30c8bec6b0 hack/build-docker.sh push The push refers to a repository [localhost:33521/kubevirt/virt-controller] 1249ea14d6b2: Preparing aa89340cf7a8: Preparing 891e1e4ef82a: Preparing aa89340cf7a8: Pushed 1249ea14d6b2: Pushed 891e1e4ef82a: Pushed devel: digest: sha256:f7feda81e37f4b6ce1c7bab203777f5973f4b9c5a703efd32117696a10239f59 size: 949 The push refers to a repository [localhost:33521/kubevirt/virt-launcher] d42300ef9291: Preparing 11a1136f77fa: Preparing 3d262858092e: Preparing 67dc0924c34b: Preparing cc561ae6c195: Preparing 633427c64a24: Preparing da38cf808aa5: Preparing b83399358a92: Preparing 186d8b3e4fd8: Preparing fa6154170bf5: Preparing da38cf808aa5: Waiting 633427c64a24: Waiting 5eefb9960a36: Preparing 891e1e4ef82a: Preparing 5eefb9960a36: Waiting fa6154170bf5: Waiting b83399358a92: Waiting 891e1e4ef82a: Waiting 11a1136f77fa: Pushed d42300ef9291: Pushed 67dc0924c34b: Pushed b83399358a92: Pushed da38cf808aa5: Pushed fa6154170bf5: Pushed 186d8b3e4fd8: Pushed 891e1e4ef82a: Mounted from kubevirt/virt-controller 3d262858092e: Pushed 633427c64a24: Pushed cc561ae6c195: Pushed 5eefb9960a36: Pushed devel: digest: sha256:49dcdc5db95259c692c388bcd013b7a2c357ef3a9456f18a8ebfe20aad20dfa5 size: 2828 The push refers to a repository [localhost:33521/kubevirt/virt-handler] da56a9df7308: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-launcher da56a9df7308: Pushed devel: digest: sha256:309559d3c71d6221d2dd4737e27364b68daca7769ae44b5e288859673c416ed8 size: 741 The push refers to a repository [localhost:33521/kubevirt/virt-api] abaa2bff8d93: Preparing 82fc744c99b4: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-handler 82fc744c99b4: Pushed abaa2bff8d93: Pushed devel: digest: sha256:740372a903c4252655331f26b825efeaa48cd9923fe5a15be13d81699616545f size: 948 The push refers to a repository [localhost:33521/kubevirt/disks-images-provider] 71ad31feb2c5: Preparing 21d4b721776e: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-api 71ad31feb2c5: Pushed 21d4b721776e: Pushed devel: digest: sha256:5dc088106df85eb01f2ad0566624239b95b34986820107944e36d309183fd4cd size: 948 The push refers to a repository [localhost:33521/kubevirt/vm-killer] c4cfadeeaf5f: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/disks-images-provider c4cfadeeaf5f: Pushed devel: digest: sha256:39b817b79b1fbce75dbb476bc261b2752fd6466bf98d373208d5144579da22b0 size: 740 The push refers to a repository [localhost:33521/kubevirt/registry-disk-v1alpha] 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 661cce8d8e52: Pushed 41e0baba3077: Pushed 25edbec0eaea: Pushed devel: digest: sha256:0df707a55243af8792380fba68a76307017494c503e0e9071ed55d7d3c3611d4 size: 948 The push refers to a repository [localhost:33521/kubevirt/cirros-registry-disk-demo] f9f97de3966a: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 41e0baba3077: Mounted from kubevirt/registry-disk-v1alpha 25edbec0eaea: Mounted from kubevirt/registry-disk-v1alpha 661cce8d8e52: Mounted from kubevirt/registry-disk-v1alpha f9f97de3966a: Pushed devel: digest: sha256:3f818f67105a36bdc42bdbfad87fc29d0028e39a0dceef92d12efbcf8e16e5ed size: 1160 The push refers to a repository [localhost:33521/kubevirt/fedora-cloud-registry-disk-demo] 24cdf3b545f2: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 25edbec0eaea: Mounted from kubevirt/cirros-registry-disk-demo 41e0baba3077: Mounted from kubevirt/cirros-registry-disk-demo 661cce8d8e52: Mounted from kubevirt/cirros-registry-disk-demo 24cdf3b545f2: Pushed devel: digest: sha256:a6a571626690141c7da4cf0e1eb4fd75e5dd9ae427d5070c2729214cfbd6a192 size: 1161 The push refers to a repository [localhost:33521/kubevirt/alpine-registry-disk-demo] d8e356e905f4: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 25edbec0eaea: Mounted from kubevirt/fedora-cloud-registry-disk-demo 661cce8d8e52: Mounted from kubevirt/fedora-cloud-registry-disk-demo 41e0baba3077: Mounted from kubevirt/fedora-cloud-registry-disk-demo d8e356e905f4: Pushed devel: digest: sha256:c27568048aa8e031860d98cdced0370763745ad80581e62432568dac45abf1fb size: 1160 The push refers to a repository [localhost:33521/kubevirt/subresource-access-test] cabd126d82e9: Preparing 25cb73590a9d: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/vm-killer 25cb73590a9d: Pushed cabd126d82e9: Pushed devel: digest: sha256:43c243e2e3cd22c129de8269369c7e77f106a3b2f9d2ed3d170dfc8c841e166f size: 948 The push refers to a repository [localhost:33521/kubevirt/winrmcli] f8083e002d0b: Preparing 53c709abc882: Preparing 9ca98a0f492b: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/subresource-access-test f8083e002d0b: Pushed 9ca98a0f492b: Pushed 53c709abc882: Pushed devel: digest: sha256:4fe6c9666a841b61b962d7fb73ccb7cb0dabc3b56e1657cfdfd9005e1a36d38c size: 1165 The push refers to a repository [localhost:33521/kubevirt/example-hook-sidecar] c94b580b8ebb: Preparing 39bae602f753: Preparing c94b580b8ebb: Pushed 39bae602f753: Pushed devel: digest: sha256:39bb7ea03505b5f610a819a9d600107bfd27a638945f980ce3c84d4191666870 size: 740 make[1]: Leaving directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' Done ./cluster/clean.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release1 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release1 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-178-gac42c50 ++ KUBEVIRT_VERSION=v0.7.0-178-gac42c50 + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:14ffc4a28e24a2510c9b455b56f35f6193a00b71c9150705f6afec41b003fc76 ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33521/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Cleaning up ...' Cleaning up ... + cluster/kubectl.sh get vmis --all-namespaces -o=custom-columns=NAME:.metadata.name,NAMESPACE:.metadata.namespace,FINALIZERS:.metadata.finalizers --no-headers + grep foregroundDeleteVirtualMachine + read p error: the server doesn't have a resource type "vmis" + _kubectl delete ds -l kubevirt.io -n kube-system --cascade=false --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=libvirt --force --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=virt-handler --force --grace-period 0 No resources found + namespaces=(default ${namespace}) + for i in '${namespaces[@]}' + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete deployment -l kubevirt.io No resources found + _kubectl -n default delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rs -l kubevirt.io No resources found + _kubectl -n default delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete services -l kubevirt.io No resources found + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n default delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete secrets -l kubevirt.io No resources found + _kubectl -n default delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pv -l kubevirt.io No resources found + _kubectl -n default delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pvc -l kubevirt.io No resources found + _kubectl -n default delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete ds -l kubevirt.io No resources found + _kubectl -n default delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n default delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pods -l kubevirt.io No resources found + _kubectl -n default delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n default delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rolebinding -l kubevirt.io No resources found + _kubectl -n default delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete roles -l kubevirt.io No resources found + _kubectl -n default delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterroles -l kubevirt.io No resources found + _kubectl -n default delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n default get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n default get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + for i in '${namespaces[@]}' + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete deployment -l kubevirt.io No resources found + _kubectl -n kube-system delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rs -l kubevirt.io No resources found + _kubectl -n kube-system delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete services -l kubevirt.io No resources found + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n kube-system delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete secrets -l kubevirt.io No resources found + _kubectl -n kube-system delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pv -l kubevirt.io No resources found + _kubectl -n kube-system delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pvc -l kubevirt.io No resources found + _kubectl -n kube-system delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete ds -l kubevirt.io No resources found + _kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n kube-system delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pods -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete roles -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterroles -l kubevirt.io No resources found + _kubectl -n kube-system delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + sleep 2 + echo Done Done ./cluster/deploy.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release1 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release1 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-178-gac42c50 ++ KUBEVIRT_VERSION=v0.7.0-178-gac42c50 + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:14ffc4a28e24a2510c9b455b56f35f6193a00b71c9150705f6afec41b003fc76 ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33521/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Deploying ...' Deploying ... + [[ -z openshift-3.10-release ]] + [[ openshift-3.10-release =~ .*-dev ]] + [[ openshift-3.10-release =~ .*-release ]] + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/demo-content.yaml =~ .*demo.* ]] + continue + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml =~ .*demo.* ]] + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml clusterrole.rbac.authorization.k8s.io "kubevirt.io:admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:edit" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:view" created serviceaccount "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver-auth-delegator" created rolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created role.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-controller" created serviceaccount "kubevirt-controller" created serviceaccount "kubevirt-privileged" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller-cluster-admin" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-privileged-cluster-admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:default" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt.io:default" created service "virt-api" created deployment.extensions "virt-api" created deployment.extensions "virt-controller" created daemonset.extensions "virt-handler" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstances.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancereplicasets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancepresets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachines.kubevirt.io" created + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R persistentvolumeclaim "disk-alpine" created persistentvolume "host-path-disk-alpine" created persistentvolumeclaim "disk-custom" created persistentvolume "host-path-disk-custom" created daemonset.extensions "disks-images-provider" created serviceaccount "kubevirt-testing" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-testing-cluster-admin" created + [[ os-3.10.0 =~ os-* ]] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-controller"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-testing"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-privileged"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-apiserver"] + _kubectl adm policy add-scc-to-user privileged admin + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged admin scc "privileged" added to: ["admin"] + echo Done Done + namespaces=(kube-system default) + [[ kube-system != \k\u\b\e\-\s\y\s\t\e\m ]] + timeout=300 + sample=30 + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n 'disks-images-provider-hrc9c 0/1 ContainerCreating 0 3s disks-images-provider-lrcb2 0/1 ContainerCreating 0 4s virt-api-7d79764579-r89xz 0/1 ContainerCreating 0 6s virt-api-7d79764579-wcv25 0/1 ContainerCreating 0 6s virt-controller-7d57d96b65-frjqz 0/1 ContainerCreating 0 6s virt-controller-7d57d96b65-vpqqb 0/1 ContainerCreating 0 6s virt-handler-b8bjc 0/1 ContainerCreating 0 6s virt-handler-vxhhj 0/1 ContainerCreating 0 6s' ']' + echo 'Waiting for kubevirt pods to enter the Running state ...' Waiting for kubevirt pods to enter the Running state ... + kubectl get pods -n kube-system --no-headers + cluster/kubectl.sh get pods -n kube-system --no-headers + grep -v Running disks-images-provider-hrc9c 0/1 ContainerCreating 0 4s disks-images-provider-lrcb2 0/1 ContainerCreating 0 5s virt-api-7d79764579-r89xz 0/1 ContainerCreating 0 7s virt-api-7d79764579-wcv25 0/1 ContainerCreating 0 7s virt-controller-7d57d96b65-vpqqb 0/1 ContainerCreating 0 7s virt-handler-b8bjc 0/1 ContainerCreating 0 7s virt-handler-vxhhj 0/1 ContainerCreating 0 7s + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n false ']' + echo 'Waiting for KubeVirt containers to become ready ...' Waiting for KubeVirt containers to become ready ... + kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + grep false + cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers false + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n kube-system + cluster/kubectl.sh get pods -n kube-system NAME READY STATUS RESTARTS AGE disks-images-provider-hrc9c 1/1 Running 0 1m disks-images-provider-lrcb2 1/1 Running 0 1m master-api-node01 1/1 Running 1 10h master-controllers-node01 1/1 Running 1 10h master-etcd-node01 1/1 Running 1 10h virt-api-7d79764579-r89xz 1/1 Running 1 1m virt-api-7d79764579-wcv25 1/1 Running 0 1m virt-controller-7d57d96b65-frjqz 1/1 Running 0 1m virt-controller-7d57d96b65-vpqqb 1/1 Running 0 1m virt-handler-b8bjc 1/1 Running 0 1m virt-handler-vxhhj 1/1 Running 0 1m + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n default --no-headers ++ cluster/kubectl.sh get pods -n default --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ cluster/kubectl.sh get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false + '[' -n '' ']' + kubectl get pods -n default + cluster/kubectl.sh get pods -n default NAME READY STATUS RESTARTS AGE docker-registry-1-92ssr 1/1 Running 1 10h local-volume-provisioner-4j26h 1/1 Running 0 14m local-volume-provisioner-j5w9j 1/1 Running 0 14m registry-console-1-k87ct 1/1 Running 1 10h router-1-49jwl 1/1 Running 1 10h + kubectl version + cluster/kubectl.sh version oc v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 features: Basic-Auth GSSAPI Kerberos SPNEGO Server https://127.0.0.1:33518 openshift v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 + ginko_params='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + [[ openshift-3.10-release =~ windows.* ]] + FUNC_TEST_ARGS='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + make functest hack/dockerized "hack/build-func-tests.sh" sha256:dcf2b21fa2ed11dcf9dbba21b1cca0ee3fad521a0e9aee61c06d0b0b66a4b200 go version go1.10 linux/amd64 go version go1.10 linux/amd64 Compiling tests... compiled tests.test hack/functests.sh Running Suite: Tests Suite ========================== Random Seed: 1533243539 Will run 151 of 151 specs • [SLOW TEST:49.228 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 should have cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:82 ------------------------------ 2018/08/02 16:59:51 read closing down: EOF • [SLOW TEST:117.068 seconds] CloudInit UserData 2018/08/02 17:01:48 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 with injected ssh-key /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:92 should have ssh-key under authorized keys /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:93 ------------------------------ 2018/08/02 17:02:32 read closing down: EOF • [SLOW TEST:55.074 seconds] 2018/08/02 17:02:43 read closing down: EOF CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userData source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:118 should process provided cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:119 ------------------------------ 2018/08/02 17:03:26 read closing down: EOF • [SLOW TEST:43.498 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 should take user-data from k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:162 ------------------------------ • [SLOW TEST:18.498 seconds] VNC /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:54 with VNC connection /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:62 should allow accessing the VNC device /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:64 ------------------------------ ••• ------------------------------ • [SLOW TEST:7.550 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should scale /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 to five, to six and then to zero replicas /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:7.391 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should be rejected on POST if spec is invalid /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:107 ------------------------------ • ------------------------------ • [SLOW TEST:18.761 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should update readyReplicas once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:157 ------------------------------ • [SLOW TEST:6.731 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should remove VMIs once it is marked for deletion /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:169 ------------------------------ • ------------------------------ • [SLOW TEST:5.762 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should not scale when paused and scale when resume /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:223 ------------------------------ • [SLOW TEST:7.179 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should remove the finished VM /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:279 ------------------------------ 2018/08/02 17:05:26 read closing down: EOF Service cluster-ip-vmi successfully exposed for virtualmachineinstance testvmimtvcv • [SLOW TEST:46.955 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose ClusterIP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:68 Should expose a Cluster IP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:71 ------------------------------ Service cluster-ip-target-vmi successfully exposed for virtualmachineinstance testvmimtvcv •Service node-port-vmi successfully exposed for virtualmachineinstance testvmimtvcv ------------------------------ • [SLOW TEST:9.705 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose NodePort service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:124 Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:129 ------------------------------ 2018/08/02 17:06:25 read closing down: EOF Service cluster-ip-udp-vmi successfully exposed for virtualmachineinstance testvmi896p9 • [SLOW TEST:45.588 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose UDP service on a VMI /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:166 Expose ClusterIP UDP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:173 Should expose a ClusterIP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:177 ------------------------------ Service node-port-udp-vmi successfully exposed for virtualmachineinstance testvmi896p9 • [SLOW TEST:9.637 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose UDP service on a VMI /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:166 Expose NodePort UDP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:205 Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:210 ------------------------------ 2018/08/02 17:12:19 read closing down: EOF Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running level=info timestamp=2018-08-02T21:11:45.870655Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 21:11:51 http: TLS handshake error from 10.129.0.1:52530: EOF level=info timestamp=2018-08-02T21:11:53.427991Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:11:53.475477Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:11:53.515262Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:11:56.102018Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:11:59.450367Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T21:11:59.907997Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:12:01 http: TLS handshake error from 10.129.0.1:52538: EOF level=info timestamp=2018-08-02T21:12:06.537258Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:12:08.829679Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:12:08.972420Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:12:08.973307Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 21:12:11 http: TLS handshake error from 10.129.0.1:52546: EOF level=info timestamp=2018-08-02T21:12:17.440601Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-wcv25 Pod phase: Running level=info timestamp=2018-08-02T21:10:29.620243Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:10:36 http: TLS handshake error from 10.129.0.1:37682: EOF 2018/08/02 21:10:46 http: TLS handshake error from 10.129.0.1:37690: EOF 2018/08/02 21:10:56 http: TLS handshake error from 10.129.0.1:37698: EOF level=info timestamp=2018-08-02T21:10:59.807170Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:11:06 http: TLS handshake error from 10.129.0.1:37706: EOF 2018/08/02 21:11:16 http: TLS handshake error from 10.129.0.1:37714: EOF 2018/08/02 21:11:26 http: TLS handshake error from 10.129.0.1:37722: EOF level=info timestamp=2018-08-02T21:11:29.702557Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:11:36 http: TLS handshake error from 10.129.0.1:37730: EOF 2018/08/02 21:11:46 http: TLS handshake error from 10.129.0.1:37738: EOF 2018/08/02 21:11:56 http: TLS handshake error from 10.129.0.1:37746: EOF level=info timestamp=2018-08-02T21:11:59.384122Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:12:06 http: TLS handshake error from 10.129.0.1:37754: EOF 2018/08/02 21:12:16 http: TLS handshake error from 10.129.0.1:37762: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:05:43.085647Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi896p9 kind= uid=d15c0949-9697-11e8-b0c6-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:05:43.113233Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi896p9 kind= uid=d15c0949-9697-11e8-b0c6-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:05:43.313593Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi896p9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi896p9" level=info timestamp=2018-08-02T21:06:38.412477Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz84lxl74wb kind= uid=f250fdc6-9697-11e8-b0c6-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-08-02T21:06:38.413251Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz84lxl74wb kind= uid=f250fdc6-9697-11e8-b0c6-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmiz84lxl74wb" level=info timestamp=2018-08-02T21:06:38.413561Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz84lxl74wb kind= uid=f250fdc6-9697-11e8-b0c6-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:06:38.413861Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz84lxl74wb kind= uid=f250fdc6-9697-11e8-b0c6-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:06:38.423405Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz84lxc82x8 kind= uid=f2515209-9697-11e8-b0c6-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-08-02T21:06:38.423622Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz84lxc82x8 kind= uid=f2515209-9697-11e8-b0c6-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmiz84lxc82x8" level=info timestamp=2018-08-02T21:06:38.426277Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz84lxc82x8 kind= uid=f2515209-9697-11e8-b0c6-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:06:38.426565Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz84lxc82x8 kind= uid=f2515209-9697-11e8-b0c6-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=error timestamp=2018-08-02T21:06:38.908627Z pos=replicaset.go:230 component=virt-controller service=http namespace=kubevirt-test-default name=replicasetnhcg9 kind= uid=f24762cd-9697-11e8-b0c6-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstancereplicasets.kubevirt.io \"replicasetnhcg9\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the replicaset status failed." level=info timestamp=2018-08-02T21:06:38.908951Z pos=replicaset.go:137 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstancereplicasets.kubevirt.io \"replicasetnhcg9\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstanceReplicaSet kubevirt-test-default/replicasetnhcg9" level=info timestamp=2018-08-02T21:06:39.497877Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz84lxl74wb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz84lxl74wb" level=info timestamp=2018-08-02T21:06:39.553307Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz84lxc82x8\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz84lxc82x8" Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T20:57:20.559523Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:06:55.335503Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmiz84lxc82x8 kind=Domain uid=f2515209-9697-11e8-b0c6-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-08-02T21:06:55.379626Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:06:55.382231Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiz84lxc82x8 kind= uid=f2515209-9697-11e8-b0c6-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:06:55.383969Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiz84lxc82x8, existing: true\n" level=info timestamp=2018-08-02T21:06:55.384453Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T21:06:55.384654Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:06:55.384968Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:06:55.385631Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmiz84lxc82x8 kind= uid=f2515209-9697-11e8-b0c6-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:06:55.519679Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiz84lxc82x8 kind= uid=f2515209-9697-11e8-b0c6-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:06:55.519776Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiz84lxc82x8, existing: true\n" level=info timestamp=2018-08-02T21:06:55.520343Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-08-02T21:06:55.520393Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:06:55.520414Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:06:55.520497Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmiz84lxc82x8 kind= uid=f2515209-9697-11e8-b0c6-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-02T21:06:55.525266Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiz84lxc82x8 kind= uid=f2515209-9697-11e8-b0c6-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:06:58.623231Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmiz84lxl74wb kind=Domain uid=f250fdc6-9697-11e8-b0c6-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-08-02T21:06:58.626145Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiz84lxl74wb kind= uid=f250fdc6-9697-11e8-b0c6-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:06:58.626704Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiz84lxl74wb, existing: true\n" level=info timestamp=2018-08-02T21:06:58.626766Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T21:06:58.626794Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:06:58.626819Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:06:58.626890Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmiz84lxl74wb kind= uid=f250fdc6-9697-11e8-b0c6-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:06:58.644918Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:06:58.681178Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiz84lxl74wb kind= uid=f250fdc6-9697-11e8-b0c6-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:06:58.681268Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiz84lxl74wb, existing: true\n" level=info timestamp=2018-08-02T21:06:58.681291Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-08-02T21:06:58.681316Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:06:58.681334Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:06:58.681413Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmiz84lxl74wb kind= uid=f250fdc6-9697-11e8-b0c6-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-02T21:06:58.708238Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiz84lxl74wb kind= uid=f250fdc6-9697-11e8-b0c6-525500d15501 msg="Synchronization loop succeeded." Pod name: netcatjlbm4 Pod phase: Succeeded ++ head -n 1 +++ nc -ul 28016 +++ echo +++ nc -up 28016 172.30.143.230 28017 -i 1 -w 1 Hello UDP World! succeeded + x='Hello UDP World!' + echo 'Hello UDP World!' + '[' 'Hello UDP World!' = 'Hello UDP World!' ']' + echo succeeded + exit 0 Pod name: netcatl9lwt Pod phase: Succeeded ++ head -n 1 +++ nc -ul 31016 +++ echo +++ nc -up 31016 192.168.66.102 31017 -i 1 -w 1 Hello UDP World! succeeded + x='Hello UDP World!' + echo 'Hello UDP World!' + '[' 'Hello UDP World!' = 'Hello UDP World!' ']' + echo succeeded + exit 0 Pod name: netcatngk5z Pod phase: Succeeded ++ head -n 1 +++ nc 192.168.66.102 30017 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcatnq89k Pod phase: Succeeded ++ head -n 1 +++ echo +++ nc -ul 31016 +++ nc -up 31016 192.168.66.101 31017 -i 1 -w 1 Hello UDP World! succeeded + x='Hello UDP World!' + echo 'Hello UDP World!' + '[' 'Hello UDP World!' = 'Hello UDP World!' ']' + echo succeeded + exit 0 Pod name: netcatqczjg Pod phase: Succeeded ++ head -n 1 +++ nc 172.30.29.227 27017 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcatr5fbh Pod phase: Succeeded ++ head -n 1 +++ nc -ul 29016 +++ echo +++ nc -up 29016 172.30.39.151 29017 -i 1 -w 1 + x='Hello UDP World!' + echo 'Hello UDP World!' + '[' 'Hello UDP World!' = 'Hello UDP World!' ']' + echo succeeded + exit 0 Hello UDP World! succeeded Pod name: netcatvxpl9 Pod phase: Succeeded ++ head -n 1 +++ nc 192.168.66.101 30017 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: virt-launcher-testvmi896p9-8jqvh Pod phase: Running level=info timestamp=2018-08-02T21:05:59.988163Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:06:00.017817Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:06:00.048070Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID b75603b8-132a-4705-901b-55730aa32cdf" level=info timestamp=2018-08-02T21:06:00.048479Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:06:00.314122Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:06:00.338409Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:06:00.354532Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:06:00.356651Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi896p9 kind= uid=d15c0949-9697-11e8-b0c6-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:06:00.360074Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi896p9 kind= uid=d15c0949-9697-11e8-b0c6-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:06:00.361475Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:06:00.376559Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:06:00.384091Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:06:00.504260Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi896p9 kind= uid=d15c0949-9697-11e8-b0c6-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:06:00.514324Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi896p9 kind= uid=d15c0949-9697-11e8-b0c6-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:06:01.052310Z pos=monitor.go:222 component=virt-launcher msg="Found PID for b75603b8-132a-4705-901b-55730aa32cdf: 189" Pod name: virt-launcher-testvmifp25bfnhxk-hgrlf Pod phase: Running level=info timestamp=2018-08-02T21:04:56.169477Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-08-02T21:04:56.772579Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:04:56.821666Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:04:57.003454Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 2ae4eb98-842d-479c-ae56-2472f432a4f4" level=info timestamp=2018-08-02T21:04:57.003899Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:04:57.058921Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:04:57.100541Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:04:57.101135Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmifp25bfnhxk kind= uid=abcb2ebe-9697-11e8-b0c6-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:04:57.102688Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmifp25bfnhxk kind= uid=abcb2ebe-9697-11e8-b0c6-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:04:57.330348Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:04:57.330929Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:04:57.346218Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:04:57.503046Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:04:57.822123Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmifp25bfnhxk kind= uid=abcb2ebe-9697-11e8-b0c6-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:04:58.008567Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 2ae4eb98-842d-479c-ae56-2472f432a4f4: 178" Pod name: virt-launcher-testvmifp25bzrqv7-sgcgp Pod phase: Running level=info timestamp=2018-08-02T21:04:56.135845Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:04:56.140993Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 6627ec0b-3cb7-467b-abb9-1396094131c8" level=info timestamp=2018-08-02T21:04:56.141424Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:04:56.146220Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:04:56.935958Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:04:57.032955Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:04:57.076087Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmifp25bzrqv7 kind= uid=ab14123f-9697-11e8-b0c6-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:04:57.090990Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmifp25bzrqv7 kind= uid=ab14123f-9697-11e8-b0c6-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:04:57.182136Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:04:57.185699Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:04:57.379323Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 6627ec0b-3cb7-467b-abb9-1396094131c8: 178" level=info timestamp=2018-08-02T21:04:57.428712Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:04:57.613704Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:04:58.078260Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmifp25bzrqv7 kind= uid=ab14123f-9697-11e8-b0c6-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:04:58.088389Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmifp25bzrqv7 kind= uid=ab14123f-9697-11e8-b0c6-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmimtvcv-qgx92 Pod phase: Running level=info timestamp=2018-08-02T21:05:01.281876Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-08-02T21:05:01.984765Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:05:01.995590Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:05:02.011314Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID a1da29b2-93ad-4cbd-a611-cd337d2ec81a" level=info timestamp=2018-08-02T21:05:02.011585Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:05:02.287346Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:05:02.312046Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:05:02.314049Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmimtvcv kind= uid=aed96a16-9697-11e8-b0c6-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:05:02.319440Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmimtvcv kind= uid=aed96a16-9697-11e8-b0c6-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:05:02.511072Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:05:02.511640Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:05:02.525372Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:05:02.864383Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:05:03.016716Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a1da29b2-93ad-4cbd-a611-cd337d2ec81a: 187" level=info timestamp=2018-08-02T21:05:03.658327Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmimtvcv kind= uid=aed96a16-9697-11e8-b0c6-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmiz84lxc82x8-4gj8x Pod phase: Running level=info timestamp=2018-08-02T21:06:54.383113Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmiz84lxc82x8 kind= uid=f2515209-9697-11e8-b0c6-525500d15501 msg="Domain defined." level=info timestamp=2018-08-02T21:06:54.881118Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:06:54.891236Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:06:55.295154Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 3662df2b-2abc-41c9-926b-26dc5559b40a" level=info timestamp=2018-08-02T21:06:55.297712Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:06:55.312106Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:06:55.328247Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:06:55.334759Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:06:55.347932Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:06:55.349750Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmiz84lxc82x8 kind= uid=f2515209-9697-11e8-b0c6-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:06:55.371205Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiz84lxc82x8 kind= uid=f2515209-9697-11e8-b0c6-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:06:55.373411Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:06:55.381491Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:06:55.524394Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiz84lxc82x8 kind= uid=f2515209-9697-11e8-b0c6-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:06:56.301943Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 3662df2b-2abc-41c9-926b-26dc5559b40a: 182" Pod name: virt-launcher-testvmiz84lxl74wb-rm686 Pod phase: Running level=info timestamp=2018-08-02T21:06:57.303081Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmiz84lxl74wb kind= uid=f250fdc6-9697-11e8-b0c6-525500d15501 msg="Domain defined." level=info timestamp=2018-08-02T21:06:57.794641Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:06:57.811869Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 01778837-8f62-4bec-bc5d-94aac8b8f85f" level=info timestamp=2018-08-02T21:06:57.812328Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:06:57.822547Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:06:58.356949Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:06:58.407975Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:06:58.414820Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmiz84lxl74wb kind= uid=f250fdc6-9697-11e8-b0c6-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:06:58.418094Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiz84lxl74wb kind= uid=f250fdc6-9697-11e8-b0c6-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:06:58.623585Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:06:58.623779Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:06:58.641395Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:06:58.646071Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:06:58.688259Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiz84lxl74wb kind= uid=f250fdc6-9697-11e8-b0c6-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:06:58.816762Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 01778837-8f62-4bec-bc5d-94aac8b8f85f: 187" • Failure in Spec Setup (BeforeEach) [343.480 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VMI replica set /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:253 Expose ClusterIP service [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:286 Should create a ClusterIP service on VMRS and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:290 Expected error: : 180000000000 expect: timer expired after 180 seconds not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:36 ------------------------------ STEP: Creating a VMRS object with 2 replicas STEP: Start the replica set STEP: Checking the number of ready replicas STEP: add an 'hello world' server on each VMI in the replica set level=info timestamp=2018-08-02T21:12:19.965919Z pos=utils.go:1269 component=tests namespace=kubevirt-test-default name=testvmifp25bfnhxk kind=VirtualMachineInstance uid=abcb2ebe-9697-11e8-b0c6-525500d15501 msg="Login: [{2 \r\n\r\nfailed 10/20: up 116.69. request failed\r\nfailed 11/20: up 128.74. request failed\r\nfailed 12/20: up 140.82. request failed\r\nfailed 13/20: up 153.08. request failed\r\nfailed 14/20: up 165.18. request failed\r\nfailed 15/20: up 177.25. request failed\r\nfailed 16/20: up 189.31. request failed\r\nfailed 17/20: up 201.39. request failed\r\nfailed 18/20: up 213.48. request failed\r\nfailed 19/20: up 225.57. request failed\r\nfailed 20/20: up 237.63. request failed\r\nfailed to read iid from metadata. tried 20\r\nfailed to get instance-id of datasource\r\nTop of dropbear init script\r\nStarting dropbear sshd: failed to get instance-id of datasource\r\nOK\r\nGROWROOT: NOCHANGE: partition 1 is size 71647. it cannot be grown\r\n=== system information ===\r\n/dev/root resized successfully [took 0.06s]\r\nPlatform: QEMU Standard PC (Q35 + ICH9, 2009)\r\nContainer: none\r\nArch: x86_64\r\nCPU(s): 1 @ 2099.998 MHz\r\nCores/Sockets/Threads: 1/1/1\r\nVirt-type: \r\nRAM Size: 44MB\r\nDisks:\r\nNAME MAJ:MIN SIZE LABEL MOUNTPOINT\r\nvda 253:0 46137344 \r\nvda1 253:1 36683264 cirros-rootfs /\r\nvda15 253:15 8388608 \r\n=== sshd host keys ===\r\n-----BEGIN SSH HOST KEY KEYS-----\r\nssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCmjeD+BoY4vcBTw/inDCbhjfMpTh+Is52jH0I5fvBO/bl97cWmXReXAq5cYEF6UE56rvGf1N+xnW2GbdAgCnK0/LhKEyzAt1/luxVO8L12LQ3+i2H7Y7Y1Ap8GkqPDgT6Pq2Y0jCGkTnZmjqDx+KZpH3dWTYN4Z0PqFPCg7Kimos3QUSGH7NgRKZb66ASCFVk7iIGjWUSoMD7j4tq4e4BVQYohxN2P7tGCg+KJYCMJrnrWQMDrTzL5xKdFqfJYxMGLUBdJ+Q9QX65rziBp/YcDjwoqJWX+JA00hwI80TEVbFBUueCEz21095CXSfc+X6vdt4Qgp0XEgOhjB9yssNIj root@cirros\r\nssh-dss AAAAB3NzaC1kc3MAAACBAM0wOPORmZea/mqEOkMVgJZNcqDWtCq6j1V6zzvA0i4YeGqd3DRJgMhb0ever+SXJVwYYPUYIJhuABgrIR3Lj5P1X/qnrmRFx447T5KjDrCbpFCIBxJ4rVEYu8Kfjvw8UbuKoIavBcauE5uXWx5x5db929QFbqmJgIaNzi/7+mHnAAAAFQCzew7InRQPBmv3yvYBlZZ8eEDEmQAAAIBlPfYEuHl6+Z2wBIRpRI4bOam2FbvsPTa8vPd6l2hiis6s2JII05CdDcCZiuKjZgsVoEGT0qz/oFgsQKO8w4DXvK+SBF/7Ny8Gnn0LJznOeWKw8lyKeV9VzzxvAqvS2CGpY2qqgL3Ak6eBbRqpPXUJgta1Vuee/AuwNRQ+l+3TzQAAAIB54GDByzWn9DLgNoFJ4qHiyjOEFDdY0/h8UNuFbT/HaxbPvPb+kJmNrPgIydYj8XKAXm+FMVrrNymvOYnp7QRvsZ7wGYRfJdOcsUxP2P/Q0M1jLAKn70+z9uYtHqBX0ClicYglZ32FBSgjdk94idmhXGDvGyA3L0BoN0eZy0dscQ== root@cirros\r\n-----END SSH HOST KEY KEYS-----\r\n=== network info ===\r\nif-info: lo,up,127.0.0.1,8,,\r\nif-info: eth0,up,10.129.0.26,23,fe80::858:aff:fe81:1a/64,\r\nip-route:default via 10.129.0.1 dev eth0 \r\nip-route:10.128.0.0/14 dev eth0 \r\nip-route:10.129.0.0/23 dev eth0 src 10.129.0.26 \r\nip-route:224.0.0.0/4 dev eth0 \r\nip-route6:fe80::/64 dev eth0 metric 256 \r\nip-route6:unreachable default dev lo metric -1 error -101\r\nip-route6:ff00::/8 dev eth0 metric 256 \r\nip-route6:unreachable default dev lo metric -1 error -101\r\n=== datasource: None None ===\r\n=== cirros: current=0.4.0 uptime=251.38 ===\r\n ____ ____ ____\r\n / __/ __ ____ ____ / __ \\/ __/\r\n/ /__ / // __// __// /_/ /\\ \\ \r\n\\___//_//_/ /_/ \\____/___/ \r\n http://cirros-cloud.net\r\n\r\n\r\r\nlogin as 'cirros' user. default password: 'gocubsgo'. use 'sudo' for root. [login as 'cirros' user. default password: 'gocubsgo'. use 'sudo' for root.]} {4 \r\n\rcirros login: \r\r\nlogin as 'cirros' user. default password: 'gocubsgo'. use 'sudo' for root.\r\n\rcirros login: []}]" Service cluster-ip-vm successfully exposed for virtualmachine testvmim85kz VM testvmim85kz was scheduled to start 2018/08/02 17:13:04 read closing down: EOF • [SLOW TEST:48.341 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on an VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:318 Expose ClusterIP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:362 Connect to ClusterIP services that was set when VM was offline /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:363 ------------------------------ •• ------------------------------ • [SLOW TEST:19.415 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should update VirtualMachine once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:195 ------------------------------ •• ------------------------------ • [SLOW TEST:47.456 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if it gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:245 ------------------------------ • [SLOW TEST:41.044 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if the VirtualMachineInstance's pod gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:265 ------------------------------ • [SLOW TEST:55.614 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should stop VirtualMachineInstance if running set to false /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:325 ------------------------------ • [SLOW TEST:170.097 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should start and stop VirtualMachineInstance multiple times /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:333 ------------------------------ • [SLOW TEST:48.301 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should not update the VirtualMachineInstance spec if Running /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:346 ------------------------------ • [SLOW TEST:147.846 seconds] 2018/08/02 17:22:06 read closing down: EOF VirtualMachine 2018/08/02 17:22:06 read closing down: EOF 2018/08/02 17:22:06 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should survive guest shutdown, multiple times /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:387 ------------------------------ VM testvmirglrm was scheduled to start • [SLOW TEST:19.046 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should start a VirtualMachineInstance once /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:436 ------------------------------ VM testvminch5d was scheduled to stop • [SLOW TEST:34.241 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should stop a VirtualMachineInstance once /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:467 ------------------------------ 2018/08/02 17:23:34 read closing down: EOF • [SLOW TEST:35.334 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/08/02 17:24:07 read closing down: EOF • [SLOW TEST:33.044 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/08/02 17:26:17 read closing down: EOF • [SLOW TEST:165.873 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/08/02 17:31:44 read closing down: EOF Get https://127.0.0.1:33518/api/v1/namespaces/kube-system/pods?labelSelector=kubevirt.io: unexpected EOF • Failure [290.778 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected : 180000000000 to be nil /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:99 ------------------------------ STEP: Starting and stopping the VirtualMachineInstance number of times STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-08-02T21:26:54.413531Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmifd7nk kind=VirtualMachineInstance uid=c6549980-969a-11e8-b0c6-525500d15501 msg="Created virtual machine pod virt-launcher-testvmifd7nk-pwf8r" level=info timestamp=2018-08-02T21:27:10.591079Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmifd7nk kind=VirtualMachineInstance uid=c6549980-969a-11e8-b0c6-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmifd7nk-pwf8r" level=info timestamp=2018-08-02T21:27:12.076609Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmifd7nk kind=VirtualMachineInstance uid=c6549980-969a-11e8-b0c6-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-08-02T21:27:12.342672Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmifd7nk kind=VirtualMachineInstance uid=c6549980-969a-11e8-b0c6-525500d15501 msg="VirtualMachineInstance started." STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-08-02T21:27:45.913419Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmifd7nk kind=VirtualMachineInstance uid=c6549980-969a-11e8-b0c6-525500d15501 msg="Created virtual machine pod virt-launcher-testvmifd7nk-pwf8r" level=info timestamp=2018-08-02T21:27:45.914098Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmifd7nk kind=VirtualMachineInstance uid=c6549980-969a-11e8-b0c6-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmifd7nk-pwf8r" level=info timestamp=2018-08-02T21:27:45.914375Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmifd7nk kind=VirtualMachineInstance uid=c6549980-969a-11e8-b0c6-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-08-02T21:27:45.914846Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmifd7nk kind=VirtualMachineInstance uid=c6549980-969a-11e8-b0c6-525500d15501 msg="VirtualMachineInstance started." STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-08-02T21:28:26.275710Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmifd7nk kind=VirtualMachineInstance uid=c6549980-969a-11e8-b0c6-525500d15501 msg="Created virtual machine pod virt-launcher-testvmifd7nk-pwf8r" level=info timestamp=2018-08-02T21:28:26.275927Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmifd7nk kind=VirtualMachineInstance uid=c6549980-969a-11e8-b0c6-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmifd7nk-pwf8r" level=info timestamp=2018-08-02T21:28:26.276672Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmifd7nk kind=VirtualMachineInstance uid=c6549980-969a-11e8-b0c6-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-08-02T21:28:26.277002Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmifd7nk kind=VirtualMachineInstance uid=c6549980-969a-11e8-b0c6-525500d15501 msg="VirtualMachineInstance started." STEP: Checking that the VirtualMachineInstance console has expected output level=info timestamp=2018-08-02T21:31:44.404169Z pos=utils.go:1291 component=tests namespace=kubevirt-test-default name=testvmifd7nk kind=VirtualMachineInstance uid=fd3bba61-969a-11e8-b0c6-525500d15501 msg="Login: [{2 \r\n\r\n\u001b[?7h\r\n []}]" • [SLOW TEST:40.168 seconds] 2018/08/02 17:32:24 read closing down: EOF Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:113 should create a writeable emptyDisk with the right capacity /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:115 ------------------------------ • [SLOW TEST:44.405 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 2018/08/02 17:33:08 read closing down: EOF With an emptyDisk defined and a specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:163 should create a writeable emptyDisk with the specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:165 ------------------------------ 2018/08/02 17:33:40 read closing down: EOF • [SLOW TEST:31.491 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should be successfully started /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:207 ------------------------------ 2018/08/02 17:34:51 read closing down: EOF • [SLOW TEST:71.061 seconds] 2018/08/02 17:34:51 read closing down: EOF Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should not persist data /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:218 ------------------------------ 2018/08/02 17:36:51 read closing down: EOF • [SLOW TEST:119.880 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With VirtualMachineInstance with two PVCs /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:266 should start vmi multiple times /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:278 ------------------------------ • [SLOW TEST:21.073 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:21.302 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given an vm /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:21.016 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi preset /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:19.096 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi replica set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:75.319 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting and stopping the same VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:90 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:91 should success multiple times /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:92 ------------------------------ • [SLOW TEST:19.910 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:111 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:112 should not modify the spec on status update /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:113 ------------------------------ • [SLOW TEST:25.306 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting multiple VMIs /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:129 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:130 should success /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:131 ------------------------------ Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:40:01 http: TLS handshake error from 10.129.0.1:54386: EOF level=info timestamp=2018-08-02T21:40:01.693814Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T21:40:09.974511Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:40:09.975467Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:40:11.502298Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 21:40:11 http: TLS handshake error from 10.129.0.1:54394: EOF level=info timestamp=2018-08-02T21:40:15.687673Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:40:20.272313Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 21:40:21 http: TLS handshake error from 10.129.0.1:54402: EOF level=info timestamp=2018-08-02T21:40:30.383320Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 21:40:31 http: TLS handshake error from 10.129.0.1:54410: EOF level=info timestamp=2018-08-02T21:40:34.073883Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:40:34.235699Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:40:35.190677Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 21:40:41 http: TLS handshake error from 10.129.0.1:54420: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:38:56 http: TLS handshake error from 10.129.0.1:39546: EOF 2018/08/02 21:39:06 http: TLS handshake error from 10.129.0.1:39554: EOF 2018/08/02 21:39:16 http: TLS handshake error from 10.129.0.1:39562: EOF level=info timestamp=2018-08-02T21:39:25.205062Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:39:26 http: TLS handshake error from 10.129.0.1:39570: EOF 2018/08/02 21:39:36 http: TLS handshake error from 10.129.0.1:39578: EOF 2018/08/02 21:39:46 http: TLS handshake error from 10.129.0.1:39586: EOF 2018/08/02 21:39:56 http: TLS handshake error from 10.129.0.1:39594: EOF level=info timestamp=2018-08-02T21:39:57.327498Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:40:06 http: TLS handshake error from 10.129.0.1:39602: EOF 2018/08/02 21:40:16 http: TLS handshake error from 10.129.0.1:39610: EOF 2018/08/02 21:40:26 http: TLS handshake error from 10.129.0.1:39618: EOF level=info timestamp=2018-08-02T21:40:27.711583Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:40:36 http: TLS handshake error from 10.129.0.1:39628: EOF 2018/08/02 21:40:46 http: TLS handshake error from 10.129.0.1:39636: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:39:48.926252Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9xtdb kind= uid=94f8ccce-969c-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:39:49.966960Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7hn68\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7hn68" level=info timestamp=2018-08-02T21:39:50.168348Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9xtdb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9xtdb" level=info timestamp=2018-08-02T21:40:15.584916Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisp6r8 kind= uid=a4d4af3a-969c-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:40:15.585856Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisp6r8 kind= uid=a4d4af3a-969c-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:40:15.689204Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij76gh kind= uid=a4e66f42-969c-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:40:15.690416Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij76gh kind= uid=a4e66f42-969c-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:40:15.700754Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic6dqc kind= uid=a4f00fef-969c-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:40:15.703373Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic6dqc kind= uid=a4f00fef-969c-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:40:15.824167Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4fnwv kind= uid=a4f39c07-969c-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:40:15.824427Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4fnwv kind= uid=a4f39c07-969c-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:40:16.053217Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisp6r8\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisp6r8" level=info timestamp=2018-08-02T21:40:16.070307Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic6dqc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic6dqc" level=info timestamp=2018-08-02T21:40:16.127515Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmij76gh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmij76gh" level=info timestamp=2018-08-02T21:40:16.600565Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic6dqc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic6dqc" Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:30:20.646264Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:30:21.332084 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host E0802 21:30:27.342022 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:40:48.531208Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmic6dqc kind= uid=a4f00fef-969c-11e8-8cb1-525500d15501 msg="No update processing required" level=error timestamp=2018-08-02T21:40:51.163957Z pos=vm.go:431 component=virt-handler namespace=kubevirt-test-default name=testvmic6dqc kind= uid=a4f00fef-969c-11e8-8cb1-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic6dqc\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T21:40:51.164158Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic6dqc\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmic6dqc" level=info timestamp=2018-08-02T21:40:51.164234Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmic6dqc, existing: true\n" level=info timestamp=2018-08-02T21:40:51.164259Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-08-02T21:40:51.164285Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:40:51.164304Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:40:51.164434Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmic6dqc kind= uid=a4f00fef-969c-11e8-8cb1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-02T21:40:51.174389Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmic6dqc kind= uid=a4f00fef-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:40:51.183134Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmic6dqc, existing: true\n" level=info timestamp=2018-08-02T21:40:51.183173Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-08-02T21:40:51.183201Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:40:51.183219Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:40:51.183381Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmic6dqc kind= uid=a4f00fef-969c-11e8-8cb1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-02T21:40:51.297250Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmic6dqc kind= uid=a4f00fef-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:40:36.967082Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi4fnwv, existing: true\n" level=info timestamp=2018-08-02T21:40:36.967131Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T21:40:36.967171Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:40:36.967203Z pos=vm.go:331 component=virt-handler msg="Domain status: Paused, reason: StartingUp\n" level=info timestamp=2018-08-02T21:40:36.967271Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi4fnwv kind= uid=a4f39c07-969c-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:40:36.969203Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:40:36.969428Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmi4fnwv kind=Domain uid=a4f39c07-969c-11e8-8cb1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-08-02T21:40:37.024586Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:40:37.120581Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi4fnwv kind= uid=a4f39c07-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:40:37.121389Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi4fnwv, existing: true\n" level=info timestamp=2018-08-02T21:40:37.122561Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-08-02T21:40:37.122698Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:40:37.123447Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:40:37.123645Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi4fnwv kind= uid=a4f39c07-969c-11e8-8cb1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-02T21:40:37.130001Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi4fnwv kind= uid=a4f39c07-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi4fnwv-prwv6 Pod phase: Running level=info timestamp=2018-08-02T21:40:34.807157Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-08-02T21:40:35.414720Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:40:35.457981Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 834445d6-aee8-4a9e-aece-6733016df48a" level=info timestamp=2018-08-02T21:40:35.458911Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:40:35.469056Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:40:36.566412Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 834445d6-aee8-4a9e-aece-6733016df48a: 184" level=info timestamp=2018-08-02T21:40:36.876040Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:40:36.949377Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:40:36.951073Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi4fnwv kind= uid=a4f39c07-969c-11e8-8cb1-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:40:36.953003Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi4fnwv kind= uid=a4f39c07-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:40:36.969771Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:40:36.969918Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:40:37.010929Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:40:37.026901Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:40:37.129125Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi4fnwv kind= uid=a4f39c07-969c-11e8-8cb1-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmic6dqc-5fcwm Pod phase: Running level=info timestamp=2018-08-02T21:40:35.058905Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:40:35.062866Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 01aa7e72-6908-4b54-8d40-36503e455e1f" level=info timestamp=2018-08-02T21:40:35.063286Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:40:35.072205Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:40:35.337111Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:40:35.387754Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:40:35.388389Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmic6dqc kind= uid=a4f00fef-969c-11e8-8cb1-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:40:35.397806Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmic6dqc kind= uid=a4f00fef-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:40:35.779424Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:40:35.780032Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:40:35.818299Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:40:36.067057Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 01aa7e72-6908-4b54-8d40-36503e455e1f: 192" level=info timestamp=2018-08-02T21:40:42.195345Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:40:51.172208Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmic6dqc kind= uid=a4f00fef-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:40:51.187367Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmic6dqc kind= uid=a4f00fef-969c-11e8-8cb1-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmij76gh-lzxlw Pod phase: Running level=info timestamp=2018-08-02T21:40:32.904190Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-08-02T21:40:33.551607Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:40:33.557871Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 699852b4-1b5e-4bf3-bcf4-5dd3a2da01c0" level=info timestamp=2018-08-02T21:40:33.558232Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:40:33.562410Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:40:34.055605Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:40:34.080049Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:40:34.092907Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmij76gh kind= uid=a4e66f42-969c-11e8-8cb1-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:40:34.094899Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij76gh kind= uid=a4e66f42-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:40:34.100282Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:40:34.100449Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:40:34.111619Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:40:34.115127Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:40:34.565193Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 699852b4-1b5e-4bf3-bcf4-5dd3a2da01c0: 184" level=info timestamp=2018-08-02T21:40:35.525871Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmij76gh kind= uid=a4e66f42-969c-11e8-8cb1-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmisp6r8-8w6xx Pod phase: Running level=info timestamp=2018-08-02T21:40:33.425929Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:40:33.444172Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:40:33.448507Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID c921ef92-0e59-4d9d-91f7-2ffa3d6bc1d6" level=info timestamp=2018-08-02T21:40:33.449864Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:40:33.726733Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:40:33.761849Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:40:33.767769Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:40:33.768979Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:40:33.788074Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmisp6r8 kind= uid=a4d4af3a-969c-11e8-8cb1-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:40:33.792119Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisp6r8 kind= uid=a4d4af3a-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:40:33.793287Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:40:33.805141Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:40:34.456501Z pos=monitor.go:222 component=virt-launcher msg="Found PID for c921ef92-0e59-4d9d-91f7-2ffa3d6bc1d6: 182" level=info timestamp=2018-08-02T21:40:35.674926Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisp6r8 kind= uid=a4d4af3a-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:40:40.142446Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmisp6r8 kind= uid=a4d4af3a-969c-11e8-8cb1-525500d15501 msg="Synced vmi" • Failure in Spec Setup (BeforeEach) [39.233 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.errorString | 0xc42039aa70>: { s: "Timeout trying to connect to the virtual machine instance", } Timeout trying to connect to the virtual machine instance not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:135 ------------------------------ level=info timestamp=2018-08-02T21:40:17.079852Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmisp6r8 kind=VirtualMachineInstance uid=a4d4af3a-969c-11e8-8cb1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmisp6r8-8w6xx" level=info timestamp=2018-08-02T21:40:32.933497Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmisp6r8 kind=VirtualMachineInstance uid=a4d4af3a-969c-11e8-8cb1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmisp6r8-8w6xx" level=info timestamp=2018-08-02T21:40:35.052122Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmisp6r8 kind=VirtualMachineInstance uid=a4d4af3a-969c-11e8-8cb1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-08-02T21:40:36.031555Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmisp6r8 kind=VirtualMachineInstance uid=a4d4af3a-969c-11e8-8cb1-525500d15501 msg="VirtualMachineInstance started." Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running level=info timestamp=2018-08-02T21:40:50.993364Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 21:40:51 http: TLS handshake error from 10.129.0.1:54428: EOF level=info timestamp=2018-08-02T21:40:52.250593Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:40:52.251147Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:40:55.470082Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T21:41:01.611567Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 21:41:01 http: TLS handshake error from 10.129.0.1:54438: EOF 2018/08/02 21:41:11 http: TLS handshake error from 10.129.0.1:54446: EOF level=info timestamp=2018-08-02T21:41:11.706463Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T21:41:11.737205Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T21:41:12.955971Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:41:21.602319Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 21:41:21 http: TLS handshake error from 10.129.0.1:54454: EOF 2018/08/02 21:41:31 http: TLS handshake error from 10.129.0.1:54462: EOF level=info timestamp=2018-08-02T21:41:38.056548Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:40:26 http: TLS handshake error from 10.129.0.1:39618: EOF level=info timestamp=2018-08-02T21:40:27.711583Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:40:36 http: TLS handshake error from 10.129.0.1:39628: EOF 2018/08/02 21:40:46 http: TLS handshake error from 10.129.0.1:39636: EOF level=info timestamp=2018-08-02T21:40:52.218602Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" level=error timestamp=2018-08-02T21:40:54.031396Z pos=subresource.go:85 component=virt-api msg="connection failed: command terminated with exit code 1" 2018/08/02 21:40:54 http: response.WriteHeader on hijacked connection level=error timestamp=2018-08-02T21:40:54.031703Z pos=subresource.go:97 component=virt-api reason="read tcp 10.129.0.6:8443->10.128.0.1:38132: use of closed network connection" msg="error ecountered reading from websocket stream" level=info timestamp=2018-08-02T21:40:54.031833Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmisp6r8/console proto=HTTP/1.1 statusCode=500 contentLength=0 2018/08/02 21:40:56 http: TLS handshake error from 10.129.0.1:39646: EOF 2018/08/02 21:41:06 http: TLS handshake error from 10.129.0.1:39654: EOF 2018/08/02 21:41:17 http: TLS handshake error from 10.129.0.1:39662: EOF 2018/08/02 21:41:26 http: TLS handshake error from 10.129.0.1:39670: EOF 2018/08/02 21:41:36 http: TLS handshake error from 10.129.0.1:39678: EOF level=info timestamp=2018-08-02T21:41:37.889516Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:40:15.824427Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4fnwv kind= uid=a4f39c07-969c-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:40:16.053217Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisp6r8\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisp6r8" level=info timestamp=2018-08-02T21:40:16.070307Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic6dqc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic6dqc" level=info timestamp=2018-08-02T21:40:16.127515Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmij76gh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmij76gh" level=info timestamp=2018-08-02T21:40:16.600565Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmic6dqc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmic6dqc" level=info timestamp=2018-08-02T21:40:53.929766Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidgfnf kind= uid=bbb84687-969c-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:40:53.931401Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidgfnf kind= uid=bbb84687-969c-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:40:53.969591Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8tvtl kind= uid=bbbffd44-969c-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:40:53.969847Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8tvtl kind= uid=bbbffd44-969c-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:40:53.993474Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipvlst kind= uid=bbc43504-969c-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:40:53.993591Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipvlst kind= uid=bbc43504-969c-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:40:54.083703Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik8n8w kind= uid=bbca3e2e-969c-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:40:54.083873Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik8n8w kind= uid=bbca3e2e-969c-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:40:54.714770Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipvlst\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipvlst" level=info timestamp=2018-08-02T21:40:54.767141Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik8n8w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik8n8w" Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:30:20.646264Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:30:21.332084 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host E0802 21:30:27.342022 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:41:13.652230Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmipvlst kind=Domain uid=bbc43504-969c-11e8-8cb1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-08-02T21:41:13.784124Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:41:13.918134Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipvlst kind= uid=bbc43504-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:41:13.918638Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipvlst, existing: true\n" level=info timestamp=2018-08-02T21:41:13.918977Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T21:41:13.919238Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:41:13.919488Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:41:13.920058Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmipvlst kind= uid=bbc43504-969c-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:41:14.632476Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipvlst kind= uid=bbc43504-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:41:14.718287Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmipvlst, existing: true\n" level=info timestamp=2018-08-02T21:41:14.718468Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-08-02T21:41:14.718578Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:41:14.718630Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:41:14.727736Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmipvlst kind= uid=bbc43504-969c-11e8-8cb1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-02T21:41:14.774325Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmipvlst kind= uid=bbc43504-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:41:16.530178Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi8tvtl kind= uid=bbbffd44-969c-11e8-8cb1-525500d15501 msg="No update processing required" level=error timestamp=2018-08-02T21:41:16.728168Z pos=vm.go:431 component=virt-handler namespace=kubevirt-test-default name=testvmi8tvtl kind= uid=bbbffd44-969c-11e8-8cb1-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8tvtl\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T21:41:16.729031Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8tvtl\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8tvtl" level=info timestamp=2018-08-02T21:41:16.729482Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi8tvtl, existing: true\n" level=info timestamp=2018-08-02T21:41:16.729945Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-08-02T21:41:16.730412Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:41:16.730715Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:41:16.731125Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi8tvtl kind= uid=bbbffd44-969c-11e8-8cb1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-02T21:41:16.737833Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi8tvtl kind= uid=bbbffd44-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:41:17.052410Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi8tvtl, existing: true\n" level=info timestamp=2018-08-02T21:41:17.111673Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-08-02T21:41:17.152397Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:41:17.178454Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:41:20.091536Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi8tvtl kind= uid=bbbffd44-969c-11e8-8cb1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-02T21:41:20.630472Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi8tvtl kind= uid=bbbffd44-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi8tvtl-qqtgz Pod phase: Running level=info timestamp=2018-08-02T21:41:14.578468Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:41:14.586500Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID f61b8896-0133-4648-9ba4-3d3ddfa35714" level=info timestamp=2018-08-02T21:41:14.592722Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:41:14.899250Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:15.165508Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:41:15.260794Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:15.268875Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi8tvtl kind= uid=bbbffd44-969c-11e8-8cb1-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:41:15.270660Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi8tvtl kind= uid=bbbffd44-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:41:15.323019Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:15.323286Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:41:15.370619Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:15.596637Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f61b8896-0133-4648-9ba4-3d3ddfa35714: 193" level=info timestamp=2018-08-02T21:41:16.320067Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:16.736694Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi8tvtl kind= uid=bbbffd44-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:41:20.095091Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi8tvtl kind= uid=bbbffd44-969c-11e8-8cb1-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmidgfnf-6wzb4 Pod phase: Running level=info timestamp=2018-08-02T21:41:11.036741Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-08-02T21:41:11.503828Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:41:11.511446Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:12.090947Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 1d996a9a-d96a-4b1a-8ae9-361265963a72" level=info timestamp=2018-08-02T21:41:12.093147Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:41:12.335978Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:41:12.371113Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:12.379179Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmidgfnf kind= uid=bbb84687-969c-11e8-8cb1-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:41:12.381246Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmidgfnf kind= uid=bbb84687-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:41:12.590649Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:12.590838Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:41:12.621681Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:12.705388Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:13.104933Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 1d996a9a-d96a-4b1a-8ae9-361265963a72: 190" level=info timestamp=2018-08-02T21:41:13.234452Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmidgfnf kind= uid=bbb84687-969c-11e8-8cb1-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmik8n8w-lbrgf Pod phase: Running level=info timestamp=2018-08-02T21:41:12.501020Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-08-02T21:41:13.293196Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:41:13.293453Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 38931466-8bb1-4e90-a23b-7e1f875fdcaf" level=info timestamp=2018-08-02T21:41:13.301726Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:41:13.306597Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:13.697604Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:41:13.748374Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:13.763945Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmik8n8w kind= uid=bbca3e2e-969c-11e8-8cb1-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:41:13.773928Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmik8n8w kind= uid=bbca3e2e-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:41:13.835060Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:13.835228Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:41:13.872094Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:13.975894Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:14.312940Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 38931466-8bb1-4e90-a23b-7e1f875fdcaf: 190" level=info timestamp=2018-08-02T21:41:14.566386Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmik8n8w kind= uid=bbca3e2e-969c-11e8-8cb1-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmipvlst-xtvjs Pod phase: Running level=info timestamp=2018-08-02T21:41:11.885838Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-08-02T21:41:12.781942Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:41:12.800149Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 6ff1b361-d991-4596-804b-f01e5cca042b" level=info timestamp=2018-08-02T21:41:12.805908Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:41:12.991191Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:13.365125Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:41:13.411096Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmipvlst kind= uid=bbc43504-969c-11e8-8cb1-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:41:13.411664Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:13.421351Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipvlst kind= uid=bbc43504-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:41:13.657983Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:13.658288Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:41:13.679867Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:13.784673Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:13.810713Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 6ff1b361-d991-4596-804b-f01e5cca042b: 190" level=info timestamp=2018-08-02T21:41:14.765394Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipvlst kind= uid=bbc43504-969c-11e8-8cb1-525500d15501 msg="Synced vmi" • Failure in Spec Setup (BeforeEach) [46.394 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with pod network connectivity explicitly set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.errorString | 0xc420a024e0>: { s: "Timeout trying to connect to the virtual machine instance", } Timeout trying to connect to the virtual machine instance not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:135 ------------------------------ level=info timestamp=2018-08-02T21:40:55.309944Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmidgfnf kind=VirtualMachineInstance uid=bbb84687-969c-11e8-8cb1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmidgfnf-6wzb4" level=info timestamp=2018-08-02T21:41:11.570837Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmidgfnf kind=VirtualMachineInstance uid=bbb84687-969c-11e8-8cb1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmidgfnf-6wzb4" level=info timestamp=2018-08-02T21:41:14.645687Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmidgfnf kind=VirtualMachineInstance uid=bbb84687-969c-11e8-8cb1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-08-02T21:41:14.977796Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmidgfnf kind=VirtualMachineInstance uid=bbb84687-969c-11e8-8cb1-525500d15501 msg="VirtualMachineInstance started." 2018/08/02 17:42:23 read closing down: EOF Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:41:21 http: TLS handshake error from 10.129.0.1:54454: EOF 2018/08/02 21:41:31 http: TLS handshake error from 10.129.0.1:54462: EOF level=info timestamp=2018-08-02T21:41:38.056548Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T21:41:38.889262Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:41:38.976170Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:41:38.993997Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 21:41:41 http: TLS handshake error from 10.129.0.1:54472: EOF 2018/08/02 21:41:51 http: TLS handshake error from 10.129.0.1:54480: EOF level=info timestamp=2018-08-02T21:41:51.974474Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T21:41:52.669671Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 21:42:01 http: TLS handshake error from 10.129.0.1:54488: EOF level=info timestamp=2018-08-02T21:42:02.849570Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" level=info timestamp=2018-08-02T21:42:03.587171Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 21:42:11 http: TLS handshake error from 10.129.0.1:54498: EOF 2018/08/02 21:42:21 http: TLS handshake error from 10.129.0.1:54506: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:41:06 http: TLS handshake error from 10.129.0.1:39654: EOF 2018/08/02 21:41:17 http: TLS handshake error from 10.129.0.1:39662: EOF 2018/08/02 21:41:26 http: TLS handshake error from 10.129.0.1:39670: EOF 2018/08/02 21:41:36 http: TLS handshake error from 10.129.0.1:39678: EOF level=info timestamp=2018-08-02T21:41:37.889516Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" level=error timestamp=2018-08-02T21:41:39.961493Z pos=subresource.go:85 component=virt-api msg= 2018/08/02 21:41:39 http: response.WriteHeader on hijacked connection level=error timestamp=2018-08-02T21:41:39.962264Z pos=subresource.go:97 component=virt-api reason="read tcp 10.129.0.6:8443->10.128.0.1:38304: use of closed network connection" msg="error ecountered reading from websocket stream" level=info timestamp=2018-08-02T21:41:39.962919Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmidgfnf/console proto=HTTP/1.1 statusCode=200 contentLength=0 2018/08/02 21:41:46 http: TLS handshake error from 10.129.0.1:39688: EOF 2018/08/02 21:41:56 http: TLS handshake error from 10.129.0.1:39696: EOF 2018/08/02 21:42:06 http: TLS handshake error from 10.129.0.1:39706: EOF 2018/08/02 21:42:17 http: TLS handshake error from 10.129.0.1:39714: EOF level=info timestamp=2018-08-02T21:42:22.169149Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T21:42:23.652281Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:40:54.714770Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipvlst\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipvlst" level=info timestamp=2018-08-02T21:40:54.767141Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik8n8w\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik8n8w" level=info timestamp=2018-08-02T21:41:40.090035Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6llvm kind= uid=d73d788e-969c-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:41:40.090594Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6llvm kind= uid=d73d788e-969c-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:41:40.128240Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiklffm kind= uid=d7420426-969c-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:41:40.129363Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiklffm kind= uid=d7420426-969c-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:41:40.193386Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirwgcw kind= uid=d74860c7-969c-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:41:40.193981Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirwgcw kind= uid=d74860c7-969c-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:41:40.292126Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivlztp kind= uid=d7517993-969c-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:41:40.292356Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivlztp kind= uid=d7517993-969c-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:41:40.382062Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6llvm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6llvm" level=info timestamp=2018-08-02T21:41:40.655026Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirwgcw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirwgcw" level=info timestamp=2018-08-02T21:41:40.656051Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiklffm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiklffm" level=info timestamp=2018-08-02T21:41:41.131304Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivlztp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivlztp" level=info timestamp=2018-08-02T21:41:41.504338Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivlztp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivlztp" Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:30:20.646264Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:30:21.332084 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host E0802 21:30:27.342022 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:41:58.626040Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmirwgcw kind=Domain uid=d74860c7-969c-11e8-8cb1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-08-02T21:41:58.783401Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmirwgcw kind= uid=d74860c7-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:41:58.783511Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirwgcw, existing: true\n" level=info timestamp=2018-08-02T21:41:58.783535Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T21:41:58.783561Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:41:58.783579Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:41:58.783628Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmirwgcw kind= uid=d74860c7-969c-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:41:58.802682Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:41:59.920376Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmirwgcw kind= uid=d74860c7-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:42:00.190923Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirwgcw, existing: true\n" level=info timestamp=2018-08-02T21:42:00.202545Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-08-02T21:42:00.290909Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:42:00.305504Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:42:00.331622Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmirwgcw kind= uid=d74860c7-969c-11e8-8cb1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-02T21:42:00.520020Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmirwgcw kind= uid=d74860c7-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:41:59.902842Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmivlztp kind=Domain uid=d7517993-969c-11e8-8cb1-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-08-02T21:41:59.955575Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:41:59.962295Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmivlztp kind= uid=d7517993-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:41:59.968510Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmivlztp, existing: true\n" level=info timestamp=2018-08-02T21:41:59.968596Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T21:41:59.968625Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:41:59.968645Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:41:59.968869Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmivlztp kind= uid=d7517993-969c-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:42:00.239615Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmivlztp kind= uid=d7517993-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:42:00.239708Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmivlztp, existing: true\n" level=info timestamp=2018-08-02T21:42:00.239730Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-08-02T21:42:00.239757Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:42:00.239774Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-02T21:42:00.248496Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmivlztp kind= uid=d7517993-969c-11e8-8cb1-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-02T21:42:00.265458Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmivlztp kind= uid=d7517993-969c-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi6llvm-95n2x Pod phase: Running level=info timestamp=2018-08-02T21:41:55.729641Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-08-02T21:41:56.426672Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:41:56.436713Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID d84399be-31ef-4fbb-8d5d-b28fb55bf02b" level=info timestamp=2018-08-02T21:41:56.439992Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:56.440189Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:41:57.192427Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:41:57.249520Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:57.284296Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:57.295807Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:41:57.296654Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi6llvm kind= uid=d73d788e-969c-11e8-8cb1-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:41:57.342058Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6llvm kind= uid=d73d788e-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:41:57.350744Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:57.410098Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:57.456877Z pos=monitor.go:222 component=virt-launcher msg="Found PID for d84399be-31ef-4fbb-8d5d-b28fb55bf02b: 183" level=info timestamp=2018-08-02T21:41:57.786226Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6llvm kind= uid=d73d788e-969c-11e8-8cb1-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmiklffm-rf8rb Pod phase: Running level=info timestamp=2018-08-02T21:41:57.551774Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmiklffm kind= uid=d7420426-969c-11e8-8cb1-525500d15501 msg="Domain defined." level=info timestamp=2018-08-02T21:41:58.189003Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:41:58.203363Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:58.205654Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID a68d1441-edb3-4f30-b71e-451f76493ca1" level=info timestamp=2018-08-02T21:41:58.206066Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:41:58.502846Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:41:58.535649Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:58.539829Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmiklffm kind= uid=d7420426-969c-11e8-8cb1-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:41:58.542690Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiklffm kind= uid=d7420426-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:41:58.570836Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:58.571569Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:41:58.581890Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:58.600488Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:58.630408Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmiklffm kind= uid=d7420426-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:41:59.209706Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a68d1441-edb3-4f30-b71e-451f76493ca1: 184" Pod name: virt-launcher-testvmirwgcw-fk9c6 Pod phase: Running level=info timestamp=2018-08-02T21:41:57.214005Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-08-02T21:41:58.026715Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:41:58.036965Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 7d9adae3-f467-457d-8dfd-e506b37c18e7" level=info timestamp=2018-08-02T21:41:58.037192Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:41:58.093065Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:58.327000Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:41:58.355381Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:58.375920Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmirwgcw kind= uid=d74860c7-969c-11e8-8cb1-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:41:58.380564Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmirwgcw kind= uid=d74860c7-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:41:58.543073Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:58.543254Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:41:58.568078Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:58.825635Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:59.059657Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 7d9adae3-f467-457d-8dfd-e506b37c18e7: 188" level=info timestamp=2018-08-02T21:42:00.479831Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmirwgcw kind= uid=d74860c7-969c-11e8-8cb1-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmivlztp-rk9jm Pod phase: Running level=info timestamp=2018-08-02T21:41:58.382537Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-08-02T21:41:59.361736Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-02T21:41:59.378002Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 6061b943-8a5c-450d-bee0-fb43308e9313" level=info timestamp=2018-08-02T21:41:59.378964Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-02T21:41:59.512438Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:59.845721Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-02T21:41:59.888192Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:59.903500Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:41:59.907539Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-02T21:41:59.945395Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmivlztp kind= uid=d7517993-969c-11e8-8cb1-525500d15501 msg="Domain started." level=info timestamp=2018-08-02T21:41:59.948173Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivlztp kind= uid=d7517993-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:41:59.951510Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-02T21:41:59.957863Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-08-02T21:42:00.254165Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmivlztp kind= uid=d7517993-969c-11e8-8cb1-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-02T21:42:00.383112Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 6061b943-8a5c-450d-bee0-fb43308e9313: 182" • Failure in Spec Setup (BeforeEach) [46.980 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*kubecli.AsyncSubresourceError | 0xc420d8cde0>: { err: "Can't connect to websocket (503): service unavailable\n\n", StatusCode: 503, } Can't connect to websocket (503): service unavailable not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:136 ------------------------------ level=info timestamp=2018-08-02T21:41:41.469922Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi6llvm kind=VirtualMachineInstance uid=d73d788e-969c-11e8-8cb1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi6llvm-95n2x" level=info timestamp=2018-08-02T21:41:56.133225Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi6llvm kind=VirtualMachineInstance uid=d73d788e-969c-11e8-8cb1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi6llvm-95n2x" level=info timestamp=2018-08-02T21:41:58.348600Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi6llvm kind=VirtualMachineInstance uid=d73d788e-969c-11e8-8cb1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-08-02T21:41:58.642149Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi6llvm kind=VirtualMachineInstance uid=d73d788e-969c-11e8-8cb1-525500d15501 msg="VirtualMachineInstance started." level=info timestamp=2018-08-02T21:42:25.618394Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiklffm kind=VirtualMachineInstance uid=d7420426-969c-11e8-8cb1-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiklffm-rf8rb" level=info timestamp=2018-08-02T21:42:25.618647Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiklffm kind=VirtualMachineInstance uid=d7420426-969c-11e8-8cb1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmiklffm-rf8rb" level=info timestamp=2018-08-02T21:42:25.619351Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiklffm kind=VirtualMachineInstance uid=d7420426-969c-11e8-8cb1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-08-02T21:42:25.619565Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiklffm kind=VirtualMachineInstance uid=d7420426-969c-11e8-8cb1-525500d15501 msg="VirtualMachineInstance started." 2018/08/02 17:44:06 read closing down: EOF 2018/08/02 17:44:17 read closing down: EOF 2018/08/02 17:44:28 read closing down: EOF 2018/08/02 17:44:39 read closing down: EOF 2018/08/02 17:44:40 read closing down: EOF 2018/08/02 17:44:42 read closing down: EOF 2018/08/02 17:44:43 read closing down: EOF • [SLOW TEST:136.609 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the internet /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/08/02 17:44:43 read closing down: EOF • [SLOW TEST:5.478 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on the same node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:6.214 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on a different node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ ••• ------------------------------ • [SLOW TEST:6.860 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a service matching the vmi exposed /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:272 should fail to reach the vmi if an invalid servicename is used /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:303 ------------------------------ • [SLOW TEST:5.367 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a subdomain and a headless service given /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:319 should be able to reach the vmi via its unique fully qualified domain name /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:342 ------------------------------ 2018/08/02 17:46:02 read closing down: EOF 2018/08/02 17:46:03 read closing down: EOF • [SLOW TEST:45.184 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom interface model /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:368 should expose the right device type to the guest /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:369 ------------------------------ 2018/08/02 17:46:04 read closing down: EOF 2018/08/02 17:46:05 read closing down: EOF •2018/08/02 17:46:37 read closing down: EOF 2018/08/02 17:46:38 read closing down: EOF ------------------------------ • [SLOW TEST:32.529 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:402 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:403 ------------------------------ 2018/08/02 17:47:10 read closing down: EOF • [SLOW TEST:32.643 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address in non-conventional format /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:414 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:415 2018/08/02 17:47:10 read closing down: EOF ------------------------------ 2018/08/02 17:47:44 read closing down: EOF 2018/08/02 17:47:45 read closing down: EOF • [SLOW TEST:34.370 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address and slirp interface /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:427 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:428 ------------------------------ 2018/08/02 17:48:28 read closing down: EOF • [SLOW TEST:44.933 seconds] Networking 2018/08/02 17:48:30 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with disabled automatic attachment of interfaces /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:440 should not configure any external interfaces /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:441 ------------------------------ ••••••••••••• ------------------------------ • [SLOW TEST:18.908 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should start it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:80 ------------------------------ • [SLOW TEST:19.230 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should attach virt-launcher to it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:86 ------------------------------ ••••2018/08/02 17:51:08 read closing down: EOF Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:50:40 http: TLS handshake error from 10.129.0.1:46672: EOF level=info timestamp=2018-08-02T21:50:41.654541Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T21:50:41.722128Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:50:50 http: TLS handshake error from 10.129.0.1:46680: EOF level=info timestamp=2018-08-02T21:50:52.379445Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:51:00 http: TLS handshake error from 10.129.0.1:46688: EOF level=info timestamp=2018-08-02T21:51:07.811034Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:50:52 http: TLS handshake error from 10.129.0.1:43552: EOF 2018/08/02 21:51:02 http: TLS handshake error from 10.129.0.1:43560: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running E0802 21:30:21.332084 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host E0802 21:30:27.342022 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host level=info timestamp=2018-08-02T21:49:59.218640Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-08-02T21:49:59.223396Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer limitrangeInformer" level=info timestamp=2018-08-02T21:49:59.223698Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-08-02T21:49:59.223819Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-08-02T21:49:59.224010Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-08-02T21:49:59.224109Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-08-02T21:49:59.224160Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-08-02T21:49:59.224260Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-08-02T21:49:59.224750Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-08-02T21:49:59.231416Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-08-02T21:49:59.250700Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-08-02T21:49:59.254156Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-08-02T21:49:59.256543Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:20.477085Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-02T21:50:20.575482Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-02T21:50:20.625395Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-02T21:50:20.627168Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" E0802 21:50:23.779215 3728 reflector.go:205] kubevirt.io/kubevirt/pkg/virt-handler/vm.go:224: Failed to list *v1.VirtualMachineInstance: Get https://172.30.0.1:443/apis/kubevirt.io/v1alpha2/virtualmachineinstances?labelSelector=kubevirt.io%!F(MISSING)nodeName+in+%!n(MISSING)ode02%!&(MISSING)limit=500&resourceVersion=0: dial tcp 172.30.0.1:443: connect: no route to host level=info timestamp=2018-08-02T21:50:25.281560Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.281642Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-08-02T21:50:25.281670Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.281744Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvminbztq-cw9bw Pod phase: Running level=info timestamp=2018-08-02T21:50:25.557204Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T21:50:25.557423Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T21:50:25.558952Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T21:50:35.981228Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T21:50:36.053000Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvminbztq" level=info timestamp=2018-08-02T21:50:36.057100Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T21:50:36.057634Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure [109.907 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:174 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Alpine as first boot [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: : 90000000000 expect: timer expired after 90 seconds not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:198 ------------------------------ STEP: defining a VirtualMachineInstance with an Alpine disk STEP: adding a Cirros Disk STEP: setting boot order STEP: starting VirtualMachineInstance STEP: Waiting the VirtualMachineInstance start level=info timestamp=2018-08-02T21:49:20.344387Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Created virtual machine pod virt-launcher-testvminbztq-cw9bw" level=info timestamp=2018-08-02T21:49:36.215885Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvminbztq-cw9bw" level=info timestamp=2018-08-02T21:49:38.342783Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-08-02T21:49:38.415711Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="VirtualMachineInstance started." STEP: Checking console text level=info timestamp=2018-08-02T21:51:08.704233Z pos=utils.go:1232 component=tests namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="[{2 \r\n\r\n\u001b[?7h\r\n []}]" Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:50:40 http: TLS handshake error from 10.129.0.1:46672: EOF level=info timestamp=2018-08-02T21:50:41.654541Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T21:50:41.722128Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:50:50 http: TLS handshake error from 10.129.0.1:46680: EOF level=info timestamp=2018-08-02T21:50:52.379445Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:51:00 http: TLS handshake error from 10.129.0.1:46688: EOF level=info timestamp=2018-08-02T21:51:07.811034Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:51:10 http: TLS handshake error from 10.129.0.1:46696: EOF 2018/08/02 21:51:20 http: TLS handshake error from 10.129.0.1:46704: EOF 2018/08/02 21:51:30 http: TLS handshake error from 10.129.0.1:46712: EOF level=info timestamp=2018-08-02T21:51:37.898628Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:50:52 http: TLS handshake error from 10.129.0.1:43552: EOF 2018/08/02 21:51:02 http: TLS handshake error from 10.129.0.1:43560: EOF 2018/08/02 21:51:12 http: TLS handshake error from 10.129.0.1:43568: EOF 2018/08/02 21:51:22 http: TLS handshake error from 10.129.0.1:43576: EOF 2018/08/02 21:51:32 http: TLS handshake error from 10.129.0.1:43584: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:49:59.223396Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer limitrangeInformer" level=info timestamp=2018-08-02T21:49:59.223698Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-08-02T21:49:59.223819Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-08-02T21:49:59.224010Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-08-02T21:49:59.224109Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-08-02T21:49:59.224160Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-08-02T21:49:59.224260Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-08-02T21:49:59.224750Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-08-02T21:49:59.231416Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-08-02T21:49:59.250700Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-08-02T21:49:59.254156Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-08-02T21:49:59.256543Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-08-02T21:51:39.534669Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijqqnt kind= uid=2a60590d-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:51:39.538147Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijqqnt kind= uid=2a60590d-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:51:40.043376Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijqqnt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijqqnt" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmijqqnt-kfrh9 Pod phase: Pending • Failure [31.317 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:174 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Cirros as first boot [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected <*errors.StatusError | 0xc420f221b0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:187 ------------------------------ STEP: defining a VirtualMachineInstance with an Alpine disk STEP: adding a Cirros Disk STEP: setting boot order STEP: starting VirtualMachineInstance Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:50:40 http: TLS handshake error from 10.129.0.1:46672: EOF level=info timestamp=2018-08-02T21:50:41.654541Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T21:50:41.722128Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:50:50 http: TLS handshake error from 10.129.0.1:46680: EOF level=info timestamp=2018-08-02T21:50:52.379445Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:51:00 http: TLS handshake error from 10.129.0.1:46688: EOF level=info timestamp=2018-08-02T21:51:07.811034Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:51:10 http: TLS handshake error from 10.129.0.1:46696: EOF 2018/08/02 21:51:20 http: TLS handshake error from 10.129.0.1:46704: EOF 2018/08/02 21:51:30 http: TLS handshake error from 10.129.0.1:46712: EOF level=info timestamp=2018-08-02T21:51:37.898628Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:51:40 http: TLS handshake error from 10.129.0.1:46720: EOF 2018/08/02 21:51:50 http: TLS handshake error from 10.129.0.1:46728: EOF 2018/08/02 21:52:00 http: TLS handshake error from 10.129.0.1:46736: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:50:52 http: TLS handshake error from 10.129.0.1:43552: EOF 2018/08/02 21:51:02 http: TLS handshake error from 10.129.0.1:43560: EOF 2018/08/02 21:51:12 http: TLS handshake error from 10.129.0.1:43568: EOF 2018/08/02 21:51:22 http: TLS handshake error from 10.129.0.1:43576: EOF 2018/08/02 21:51:32 http: TLS handshake error from 10.129.0.1:43584: EOF 2018/08/02 21:51:42 http: TLS handshake error from 10.129.0.1:43592: EOF 2018/08/02 21:51:52 http: TLS handshake error from 10.129.0.1:43600: EOF 2018/08/02 21:52:02 http: TLS handshake error from 10.129.0.1:43608: EOF level=info timestamp=2018-08-02T21:52:07.799545Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:49:59.224109Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-08-02T21:49:59.224160Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-08-02T21:49:59.224260Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-08-02T21:49:59.224750Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-08-02T21:49:59.231416Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-08-02T21:49:59.250700Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-08-02T21:49:59.254156Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-08-02T21:49:59.256543Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-08-02T21:51:39.534669Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijqqnt kind= uid=2a60590d-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:51:39.538147Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijqqnt kind= uid=2a60590d-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:51:40.043376Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijqqnt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijqqnt" level=info timestamp=2018-08-02T21:51:40.554200Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijqqnt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmijqqnt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2a60590d-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijqqnt" level=info timestamp=2018-08-02T21:52:10.842003Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96ftk kind= uid=3d05f55c-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:52:10.842936Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96ftk kind= uid=3d05f55c-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:52:10.998204Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96ftk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96ftk" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi96ftk-rfpx8 Pod phase: Pending • Failure [30.863 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:205 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:206 should retry starting the VirtualMachineInstance [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:207 Expected <*errors.StatusError | 0xc420153d40>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:221 ------------------------------ STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:50:50 http: TLS handshake error from 10.129.0.1:46680: EOF level=info timestamp=2018-08-02T21:50:52.379445Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:51:00 http: TLS handshake error from 10.129.0.1:46688: EOF level=info timestamp=2018-08-02T21:51:07.811034Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:51:10 http: TLS handshake error from 10.129.0.1:46696: EOF 2018/08/02 21:51:20 http: TLS handshake error from 10.129.0.1:46704: EOF 2018/08/02 21:51:30 http: TLS handshake error from 10.129.0.1:46712: EOF level=info timestamp=2018-08-02T21:51:37.898628Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:51:40 http: TLS handshake error from 10.129.0.1:46720: EOF 2018/08/02 21:51:50 http: TLS handshake error from 10.129.0.1:46728: EOF 2018/08/02 21:52:00 http: TLS handshake error from 10.129.0.1:46736: EOF 2018/08/02 21:52:10 http: TLS handshake error from 10.129.0.1:46744: EOF 2018/08/02 21:52:20 http: TLS handshake error from 10.129.0.1:46752: EOF 2018/08/02 21:52:30 http: TLS handshake error from 10.129.0.1:46760: EOF 2018/08/02 21:52:40 http: TLS handshake error from 10.129.0.1:46768: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:50:52 http: TLS handshake error from 10.129.0.1:43552: EOF 2018/08/02 21:51:02 http: TLS handshake error from 10.129.0.1:43560: EOF 2018/08/02 21:51:12 http: TLS handshake error from 10.129.0.1:43568: EOF 2018/08/02 21:51:22 http: TLS handshake error from 10.129.0.1:43576: EOF 2018/08/02 21:51:32 http: TLS handshake error from 10.129.0.1:43584: EOF 2018/08/02 21:51:42 http: TLS handshake error from 10.129.0.1:43592: EOF 2018/08/02 21:51:52 http: TLS handshake error from 10.129.0.1:43600: EOF 2018/08/02 21:52:02 http: TLS handshake error from 10.129.0.1:43608: EOF level=info timestamp=2018-08-02T21:52:07.799545Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:52:12 http: TLS handshake error from 10.129.0.1:43616: EOF 2018/08/02 21:52:22 http: TLS handshake error from 10.129.0.1:43624: EOF 2018/08/02 21:52:32 http: TLS handshake error from 10.129.0.1:43632: EOF level=info timestamp=2018-08-02T21:52:37.684967Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:49:59.224750Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-08-02T21:49:59.231416Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-08-02T21:49:59.250700Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-08-02T21:49:59.254156Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-08-02T21:49:59.256543Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-08-02T21:51:39.534669Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijqqnt kind= uid=2a60590d-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:51:39.538147Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijqqnt kind= uid=2a60590d-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:51:40.043376Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijqqnt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijqqnt" level=info timestamp=2018-08-02T21:51:40.554200Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijqqnt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmijqqnt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2a60590d-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijqqnt" level=info timestamp=2018-08-02T21:52:10.842003Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96ftk kind= uid=3d05f55c-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:52:10.842936Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96ftk kind= uid=3d05f55c-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:52:10.998204Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96ftk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96ftk" level=info timestamp=2018-08-02T21:52:11.343871Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96ftk\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi96ftk, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3d05f55c-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96ftk" level=info timestamp=2018-08-02T21:52:41.733720Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvminlkh6 kind= uid=4f6b4c59-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:52:41.734519Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminlkh6 kind= uid=4f6b4c59-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvminlkh6-smrpm Pod phase: Pending • Failure [31.148 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:205 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:206 should log warning and proceed once the secret is there [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:237 Expected <*errors.StatusError | 0xc420f23830>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:253 ------------------------------ STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running level=info timestamp=2018-08-02T21:51:07.811034Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:51:10 http: TLS handshake error from 10.129.0.1:46696: EOF 2018/08/02 21:51:20 http: TLS handshake error from 10.129.0.1:46704: EOF 2018/08/02 21:51:30 http: TLS handshake error from 10.129.0.1:46712: EOF level=info timestamp=2018-08-02T21:51:37.898628Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:51:40 http: TLS handshake error from 10.129.0.1:46720: EOF 2018/08/02 21:51:50 http: TLS handshake error from 10.129.0.1:46728: EOF 2018/08/02 21:52:00 http: TLS handshake error from 10.129.0.1:46736: EOF 2018/08/02 21:52:10 http: TLS handshake error from 10.129.0.1:46744: EOF 2018/08/02 21:52:20 http: TLS handshake error from 10.129.0.1:46752: EOF 2018/08/02 21:52:30 http: TLS handshake error from 10.129.0.1:46760: EOF 2018/08/02 21:52:40 http: TLS handshake error from 10.129.0.1:46768: EOF 2018/08/02 21:52:50 http: TLS handshake error from 10.129.0.1:46776: EOF 2018/08/02 21:53:00 http: TLS handshake error from 10.129.0.1:46784: EOF 2018/08/02 21:53:10 http: TLS handshake error from 10.129.0.1:46792: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:51:22 http: TLS handshake error from 10.129.0.1:43576: EOF 2018/08/02 21:51:32 http: TLS handshake error from 10.129.0.1:43584: EOF 2018/08/02 21:51:42 http: TLS handshake error from 10.129.0.1:43592: EOF 2018/08/02 21:51:52 http: TLS handshake error from 10.129.0.1:43600: EOF 2018/08/02 21:52:02 http: TLS handshake error from 10.129.0.1:43608: EOF level=info timestamp=2018-08-02T21:52:07.799545Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:52:12 http: TLS handshake error from 10.129.0.1:43616: EOF 2018/08/02 21:52:22 http: TLS handshake error from 10.129.0.1:43624: EOF 2018/08/02 21:52:32 http: TLS handshake error from 10.129.0.1:43632: EOF level=info timestamp=2018-08-02T21:52:37.684967Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:52:42 http: TLS handshake error from 10.129.0.1:43640: EOF 2018/08/02 21:52:52 http: TLS handshake error from 10.129.0.1:43648: EOF 2018/08/02 21:53:02 http: TLS handshake error from 10.129.0.1:43656: EOF level=info timestamp=2018-08-02T21:53:07.842966Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:53:12 http: TLS handshake error from 10.129.0.1:43664: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:49:59.256543Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-08-02T21:51:39.534669Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijqqnt kind= uid=2a60590d-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:51:39.538147Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijqqnt kind= uid=2a60590d-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:51:40.043376Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijqqnt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijqqnt" level=info timestamp=2018-08-02T21:51:40.554200Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijqqnt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmijqqnt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2a60590d-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijqqnt" level=info timestamp=2018-08-02T21:52:10.842003Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96ftk kind= uid=3d05f55c-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:52:10.842936Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96ftk kind= uid=3d05f55c-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:52:10.998204Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96ftk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96ftk" level=info timestamp=2018-08-02T21:52:11.343871Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96ftk\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi96ftk, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3d05f55c-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96ftk" level=info timestamp=2018-08-02T21:52:41.733720Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvminlkh6 kind= uid=4f6b4c59-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:52:41.734519Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminlkh6 kind= uid=4f6b4c59-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:53:12.768553Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2jdzs kind= uid=61f5e9ed-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:53:12.768954Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2jdzs kind= uid=61f5e9ed-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:53:12.894210Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2jdzs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2jdzs" level=info timestamp=2018-08-02T21:53:12.958449Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2jdzs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2jdzs" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2jdzs-s42tm Pod phase: Pending • Failure [30.981 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-launcher crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:285 should be stopped and have Failed phase [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:286 Expected <*errors.StatusError | 0xc420152900>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:288 ------------------------------ Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:51:30 http: TLS handshake error from 10.129.0.1:46712: EOF level=info timestamp=2018-08-02T21:51:37.898628Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:51:40 http: TLS handshake error from 10.129.0.1:46720: EOF 2018/08/02 21:51:50 http: TLS handshake error from 10.129.0.1:46728: EOF 2018/08/02 21:52:00 http: TLS handshake error from 10.129.0.1:46736: EOF 2018/08/02 21:52:10 http: TLS handshake error from 10.129.0.1:46744: EOF 2018/08/02 21:52:20 http: TLS handshake error from 10.129.0.1:46752: EOF 2018/08/02 21:52:30 http: TLS handshake error from 10.129.0.1:46760: EOF 2018/08/02 21:52:40 http: TLS handshake error from 10.129.0.1:46768: EOF 2018/08/02 21:52:50 http: TLS handshake error from 10.129.0.1:46776: EOF 2018/08/02 21:53:00 http: TLS handshake error from 10.129.0.1:46784: EOF 2018/08/02 21:53:10 http: TLS handshake error from 10.129.0.1:46792: EOF 2018/08/02 21:53:20 http: TLS handshake error from 10.129.0.1:46802: EOF 2018/08/02 21:53:30 http: TLS handshake error from 10.129.0.1:46810: EOF 2018/08/02 21:53:40 http: TLS handshake error from 10.129.0.1:46818: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:52:02 http: TLS handshake error from 10.129.0.1:43608: EOF level=info timestamp=2018-08-02T21:52:07.799545Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:52:12 http: TLS handshake error from 10.129.0.1:43616: EOF 2018/08/02 21:52:22 http: TLS handshake error from 10.129.0.1:43624: EOF 2018/08/02 21:52:32 http: TLS handshake error from 10.129.0.1:43632: EOF level=info timestamp=2018-08-02T21:52:37.684967Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:52:42 http: TLS handshake error from 10.129.0.1:43640: EOF 2018/08/02 21:52:52 http: TLS handshake error from 10.129.0.1:43648: EOF 2018/08/02 21:53:02 http: TLS handshake error from 10.129.0.1:43656: EOF level=info timestamp=2018-08-02T21:53:07.842966Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:53:12 http: TLS handshake error from 10.129.0.1:43664: EOF 2018/08/02 21:53:22 http: TLS handshake error from 10.129.0.1:43674: EOF 2018/08/02 21:53:32 http: TLS handshake error from 10.129.0.1:43682: EOF level=info timestamp=2018-08-02T21:53:37.885116Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:53:42 http: TLS handshake error from 10.129.0.1:43690: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:51:40.043376Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijqqnt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijqqnt" level=info timestamp=2018-08-02T21:51:40.554200Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijqqnt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmijqqnt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2a60590d-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijqqnt" level=info timestamp=2018-08-02T21:52:10.842003Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96ftk kind= uid=3d05f55c-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:52:10.842936Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96ftk kind= uid=3d05f55c-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:52:10.998204Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96ftk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96ftk" level=info timestamp=2018-08-02T21:52:11.343871Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96ftk\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi96ftk, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3d05f55c-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96ftk" level=info timestamp=2018-08-02T21:52:41.733720Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvminlkh6 kind= uid=4f6b4c59-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:52:41.734519Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminlkh6 kind= uid=4f6b4c59-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:53:12.768553Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2jdzs kind= uid=61f5e9ed-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:53:12.768954Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2jdzs kind= uid=61f5e9ed-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:53:12.894210Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2jdzs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2jdzs" level=info timestamp=2018-08-02T21:53:12.958449Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2jdzs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2jdzs" level=info timestamp=2018-08-02T21:53:13.510165Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2jdzs\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2jdzs, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 61f5e9ed-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2jdzs" level=info timestamp=2018-08-02T21:53:43.780578Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipktgr kind= uid=746f0b86-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:53:43.781173Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipktgr kind= uid=746f0b86-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmipktgr-jz8kj Pod phase: Pending • Failure [31.133 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:309 should recover and continue management [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:310 Expected <*errors.StatusError | 0xc420fa3320>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:313 ------------------------------ • [SLOW TEST:87.607 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler is responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:340 should indicate that a node is ready for vmis /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:341 ------------------------------ Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:53:40 http: TLS handshake error from 10.129.0.1:46818: EOF 2018/08/02 21:53:50 http: TLS handshake error from 10.129.0.1:46826: EOF 2018/08/02 21:54:00 http: TLS handshake error from 10.129.0.1:46834: EOF 2018/08/02 21:54:10 http: TLS handshake error from 10.129.0.1:46842: EOF 2018/08/02 21:54:20 http: TLS handshake error from 10.129.0.1:46850: EOF 2018/08/02 21:54:30 http: TLS handshake error from 10.129.0.1:46858: EOF level=info timestamp=2018-08-02T21:54:37.887915Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:54:40 http: TLS handshake error from 10.129.0.1:46866: EOF 2018/08/02 21:54:50 http: TLS handshake error from 10.129.0.1:46874: EOF 2018/08/02 21:55:00 http: TLS handshake error from 10.129.0.1:46882: EOF 2018/08/02 21:55:10 http: TLS handshake error from 10.129.0.1:46890: EOF 2018/08/02 21:55:20 http: TLS handshake error from 10.129.0.1:46898: EOF 2018/08/02 21:55:30 http: TLS handshake error from 10.129.0.1:46906: EOF level=info timestamp=2018-08-02T21:55:37.691028Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:55:40 http: TLS handshake error from 10.129.0.1:46914: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running level=info timestamp=2018-08-02T21:53:37.885116Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:53:42 http: TLS handshake error from 10.129.0.1:43690: EOF 2018/08/02 21:53:52 http: TLS handshake error from 10.129.0.1:43698: EOF 2018/08/02 21:54:02 http: TLS handshake error from 10.129.0.1:43706: EOF level=info timestamp=2018-08-02T21:54:07.688017Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:54:12 http: TLS handshake error from 10.129.0.1:43714: EOF 2018/08/02 21:54:22 http: TLS handshake error from 10.129.0.1:43722: EOF 2018/08/02 21:54:32 http: TLS handshake error from 10.129.0.1:43730: EOF 2018/08/02 21:54:42 http: TLS handshake error from 10.129.0.1:43738: EOF 2018/08/02 21:54:52 http: TLS handshake error from 10.129.0.1:43746: EOF 2018/08/02 21:55:02 http: TLS handshake error from 10.129.0.1:43754: EOF level=info timestamp=2018-08-02T21:55:07.832009Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:55:12 http: TLS handshake error from 10.129.0.1:43762: EOF 2018/08/02 21:55:22 http: TLS handshake error from 10.129.0.1:43770: EOF 2018/08/02 21:55:32 http: TLS handshake error from 10.129.0.1:43778: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:52:10.998204Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96ftk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96ftk" level=info timestamp=2018-08-02T21:52:11.343871Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96ftk\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi96ftk, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3d05f55c-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96ftk" level=info timestamp=2018-08-02T21:52:41.733720Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvminlkh6 kind= uid=4f6b4c59-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:52:41.734519Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminlkh6 kind= uid=4f6b4c59-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:53:12.768553Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2jdzs kind= uid=61f5e9ed-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:53:12.768954Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2jdzs kind= uid=61f5e9ed-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:53:12.894210Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2jdzs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2jdzs" level=info timestamp=2018-08-02T21:53:12.958449Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2jdzs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2jdzs" level=info timestamp=2018-08-02T21:53:13.510165Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2jdzs\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2jdzs, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 61f5e9ed-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2jdzs" level=info timestamp=2018-08-02T21:53:43.780578Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipktgr kind= uid=746f0b86-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:53:43.781173Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipktgr kind= uid=746f0b86-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:53:44.661013Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipktgr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmipktgr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 746f0b86-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipktgr" level=info timestamp=2018-08-02T21:55:42.619708Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiccbnt kind= uid=bb496b00-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:55:42.627106Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiccbnt kind= uid=bb496b00-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:55:42.753958Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiccbnt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiccbnt" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiccbnt-l9qd5 Pod phase: Pending • Failure in Spec Setup (BeforeEach) [31.133 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler is not responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:371 the node controller should react [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:410 Expected error: <*errors.StatusError | 0xc4203457a0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:383 ------------------------------ Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:54:20 http: TLS handshake error from 10.129.0.1:46850: EOF 2018/08/02 21:54:30 http: TLS handshake error from 10.129.0.1:46858: EOF level=info timestamp=2018-08-02T21:54:37.887915Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:54:40 http: TLS handshake error from 10.129.0.1:46866: EOF 2018/08/02 21:54:50 http: TLS handshake error from 10.129.0.1:46874: EOF 2018/08/02 21:55:00 http: TLS handshake error from 10.129.0.1:46882: EOF 2018/08/02 21:55:10 http: TLS handshake error from 10.129.0.1:46890: EOF 2018/08/02 21:55:20 http: TLS handshake error from 10.129.0.1:46898: EOF 2018/08/02 21:55:30 http: TLS handshake error from 10.129.0.1:46906: EOF level=info timestamp=2018-08-02T21:55:37.691028Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:55:40 http: TLS handshake error from 10.129.0.1:46914: EOF 2018/08/02 21:55:50 http: TLS handshake error from 10.129.0.1:46922: EOF 2018/08/02 21:56:00 http: TLS handshake error from 10.129.0.1:46930: EOF level=info timestamp=2018-08-02T21:56:07.855785Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:56:10 http: TLS handshake error from 10.129.0.1:46938: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running level=info timestamp=2018-08-02T21:54:07.688017Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:54:12 http: TLS handshake error from 10.129.0.1:43714: EOF 2018/08/02 21:54:22 http: TLS handshake error from 10.129.0.1:43722: EOF 2018/08/02 21:54:32 http: TLS handshake error from 10.129.0.1:43730: EOF 2018/08/02 21:54:42 http: TLS handshake error from 10.129.0.1:43738: EOF 2018/08/02 21:54:52 http: TLS handshake error from 10.129.0.1:43746: EOF 2018/08/02 21:55:02 http: TLS handshake error from 10.129.0.1:43754: EOF level=info timestamp=2018-08-02T21:55:07.832009Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:55:12 http: TLS handshake error from 10.129.0.1:43762: EOF 2018/08/02 21:55:22 http: TLS handshake error from 10.129.0.1:43770: EOF 2018/08/02 21:55:32 http: TLS handshake error from 10.129.0.1:43778: EOF 2018/08/02 21:55:42 http: TLS handshake error from 10.129.0.1:43786: EOF 2018/08/02 21:55:52 http: TLS handshake error from 10.129.0.1:43794: EOF 2018/08/02 21:56:02 http: TLS handshake error from 10.129.0.1:43802: EOF 2018/08/02 21:56:12 http: TLS handshake error from 10.129.0.1:43810: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:52:41.734519Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvminlkh6 kind= uid=4f6b4c59-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:53:12.768553Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2jdzs kind= uid=61f5e9ed-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:53:12.768954Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2jdzs kind= uid=61f5e9ed-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:53:12.894210Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2jdzs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2jdzs" level=info timestamp=2018-08-02T21:53:12.958449Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2jdzs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2jdzs" level=info timestamp=2018-08-02T21:53:13.510165Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2jdzs\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2jdzs, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 61f5e9ed-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2jdzs" level=info timestamp=2018-08-02T21:53:43.780578Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipktgr kind= uid=746f0b86-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:53:43.781173Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipktgr kind= uid=746f0b86-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:53:44.661013Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipktgr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmipktgr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 746f0b86-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipktgr" level=info timestamp=2018-08-02T21:55:42.619708Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiccbnt kind= uid=bb496b00-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:55:42.627106Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiccbnt kind= uid=bb496b00-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:55:42.753958Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiccbnt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiccbnt" level=info timestamp=2018-08-02T21:55:43.388036Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiccbnt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiccbnt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: bb496b00-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiccbnt" level=info timestamp=2018-08-02T21:56:13.630268Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6lklw kind= uid=cdc16b9e-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:56:13.637847Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6lklw kind= uid=cdc16b9e-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi6lklw-rf2kf Pod phase: Pending • Failure [31.144 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with node tainted /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:463 the vmi with tolerations should be scheduled [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:485 Expected error: <*errors.StatusError | 0xc4206ec630>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:502 ------------------------------ Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:54:40 http: TLS handshake error from 10.129.0.1:46866: EOF 2018/08/02 21:54:50 http: TLS handshake error from 10.129.0.1:46874: EOF 2018/08/02 21:55:00 http: TLS handshake error from 10.129.0.1:46882: EOF 2018/08/02 21:55:10 http: TLS handshake error from 10.129.0.1:46890: EOF 2018/08/02 21:55:20 http: TLS handshake error from 10.129.0.1:46898: EOF 2018/08/02 21:55:30 http: TLS handshake error from 10.129.0.1:46906: EOF level=info timestamp=2018-08-02T21:55:37.691028Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:55:40 http: TLS handshake error from 10.129.0.1:46914: EOF 2018/08/02 21:55:50 http: TLS handshake error from 10.129.0.1:46922: EOF 2018/08/02 21:56:00 http: TLS handshake error from 10.129.0.1:46930: EOF level=info timestamp=2018-08-02T21:56:07.855785Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:56:10 http: TLS handshake error from 10.129.0.1:46938: EOF 2018/08/02 21:56:20 http: TLS handshake error from 10.129.0.1:46948: EOF 2018/08/02 21:56:30 http: TLS handshake error from 10.129.0.1:46956: EOF 2018/08/02 21:56:40 http: TLS handshake error from 10.129.0.1:46964: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:54:42 http: TLS handshake error from 10.129.0.1:43738: EOF 2018/08/02 21:54:52 http: TLS handshake error from 10.129.0.1:43746: EOF 2018/08/02 21:55:02 http: TLS handshake error from 10.129.0.1:43754: EOF level=info timestamp=2018-08-02T21:55:07.832009Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:55:12 http: TLS handshake error from 10.129.0.1:43762: EOF 2018/08/02 21:55:22 http: TLS handshake error from 10.129.0.1:43770: EOF 2018/08/02 21:55:32 http: TLS handshake error from 10.129.0.1:43778: EOF 2018/08/02 21:55:42 http: TLS handshake error from 10.129.0.1:43786: EOF 2018/08/02 21:55:52 http: TLS handshake error from 10.129.0.1:43794: EOF 2018/08/02 21:56:02 http: TLS handshake error from 10.129.0.1:43802: EOF 2018/08/02 21:56:12 http: TLS handshake error from 10.129.0.1:43810: EOF 2018/08/02 21:56:22 http: TLS handshake error from 10.129.0.1:43820: EOF 2018/08/02 21:56:32 http: TLS handshake error from 10.129.0.1:43828: EOF level=info timestamp=2018-08-02T21:56:37.904677Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:56:42 http: TLS handshake error from 10.129.0.1:43836: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:53:12.958449Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2jdzs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2jdzs" level=info timestamp=2018-08-02T21:53:13.510165Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2jdzs\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2jdzs, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 61f5e9ed-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2jdzs" level=info timestamp=2018-08-02T21:53:43.780578Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipktgr kind= uid=746f0b86-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:53:43.781173Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipktgr kind= uid=746f0b86-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:53:44.661013Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipktgr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmipktgr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 746f0b86-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipktgr" level=info timestamp=2018-08-02T21:55:42.619708Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiccbnt kind= uid=bb496b00-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:55:42.627106Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiccbnt kind= uid=bb496b00-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:55:42.753958Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiccbnt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiccbnt" level=info timestamp=2018-08-02T21:55:43.388036Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiccbnt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiccbnt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: bb496b00-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiccbnt" level=info timestamp=2018-08-02T21:56:13.630268Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6lklw kind= uid=cdc16b9e-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:56:13.637847Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6lklw kind= uid=cdc16b9e-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:56:15.441941Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6lklw\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi6lklw, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cdc16b9e-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6lklw" level=info timestamp=2018-08-02T21:56:45.663190Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihmv2d kind= uid=e0d49fb1-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:56:45.663835Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihmv2d kind= uid=e0d49fb1-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:56:45.899400Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihmv2d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihmv2d" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmihmv2d-s8vpc Pod phase: Pending • Failure [31.751 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with node tainted /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:463 the vmi without tolerations should not be scheduled [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:506 Expected error: <*errors.StatusError | 0xc421156990>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:522 ------------------------------ Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:55:10 http: TLS handshake error from 10.129.0.1:46890: EOF 2018/08/02 21:55:20 http: TLS handshake error from 10.129.0.1:46898: EOF 2018/08/02 21:55:30 http: TLS handshake error from 10.129.0.1:46906: EOF level=info timestamp=2018-08-02T21:55:37.691028Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:55:40 http: TLS handshake error from 10.129.0.1:46914: EOF 2018/08/02 21:55:50 http: TLS handshake error from 10.129.0.1:46922: EOF 2018/08/02 21:56:00 http: TLS handshake error from 10.129.0.1:46930: EOF level=info timestamp=2018-08-02T21:56:07.855785Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:56:10 http: TLS handshake error from 10.129.0.1:46938: EOF 2018/08/02 21:56:20 http: TLS handshake error from 10.129.0.1:46948: EOF 2018/08/02 21:56:30 http: TLS handshake error from 10.129.0.1:46956: EOF 2018/08/02 21:56:40 http: TLS handshake error from 10.129.0.1:46964: EOF 2018/08/02 21:56:50 http: TLS handshake error from 10.129.0.1:46972: EOF 2018/08/02 21:57:00 http: TLS handshake error from 10.129.0.1:46980: EOF 2018/08/02 21:57:10 http: TLS handshake error from 10.129.0.1:46988: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:55:12 http: TLS handshake error from 10.129.0.1:43762: EOF 2018/08/02 21:55:22 http: TLS handshake error from 10.129.0.1:43770: EOF 2018/08/02 21:55:32 http: TLS handshake error from 10.129.0.1:43778: EOF 2018/08/02 21:55:42 http: TLS handshake error from 10.129.0.1:43786: EOF 2018/08/02 21:55:52 http: TLS handshake error from 10.129.0.1:43794: EOF 2018/08/02 21:56:02 http: TLS handshake error from 10.129.0.1:43802: EOF 2018/08/02 21:56:12 http: TLS handshake error from 10.129.0.1:43810: EOF 2018/08/02 21:56:22 http: TLS handshake error from 10.129.0.1:43820: EOF 2018/08/02 21:56:32 http: TLS handshake error from 10.129.0.1:43828: EOF level=info timestamp=2018-08-02T21:56:37.904677Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:56:42 http: TLS handshake error from 10.129.0.1:43836: EOF 2018/08/02 21:56:52 http: TLS handshake error from 10.129.0.1:43844: EOF 2018/08/02 21:57:02 http: TLS handshake error from 10.129.0.1:43852: EOF level=info timestamp=2018-08-02T21:57:07.960459Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:57:12 http: TLS handshake error from 10.129.0.1:43860: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:53:44.661013Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmipktgr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmipktgr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 746f0b86-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmipktgr" level=info timestamp=2018-08-02T21:55:42.619708Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiccbnt kind= uid=bb496b00-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:55:42.627106Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiccbnt kind= uid=bb496b00-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:55:42.753958Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiccbnt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiccbnt" level=info timestamp=2018-08-02T21:55:43.388036Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiccbnt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiccbnt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: bb496b00-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiccbnt" level=info timestamp=2018-08-02T21:56:13.630268Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6lklw kind= uid=cdc16b9e-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:56:13.637847Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6lklw kind= uid=cdc16b9e-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:56:15.441941Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6lklw\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi6lklw, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cdc16b9e-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6lklw" level=info timestamp=2018-08-02T21:56:45.663190Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihmv2d kind= uid=e0d49fb1-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:56:45.663835Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihmv2d kind= uid=e0d49fb1-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:56:45.899400Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihmv2d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihmv2d" level=info timestamp=2018-08-02T21:56:46.265551Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihmv2d\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihmv2d, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: e0d49fb1-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihmv2d" level=info timestamp=2018-08-02T21:57:16.419752Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihk6dc kind= uid=f32eca9c-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:57:16.429986Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihk6dc kind= uid=f32eca9c-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:57:16.659217Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihk6dc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihk6dc" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmihk6dc-nsjgn Pod phase: Pending • Failure [30.780 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:535 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-default [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc420153dd0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:558 ------------------------------ STEP: Creating a VirtualMachineInstance with different namespace Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:55:40 http: TLS handshake error from 10.129.0.1:46914: EOF 2018/08/02 21:55:50 http: TLS handshake error from 10.129.0.1:46922: EOF 2018/08/02 21:56:00 http: TLS handshake error from 10.129.0.1:46930: EOF level=info timestamp=2018-08-02T21:56:07.855785Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:56:10 http: TLS handshake error from 10.129.0.1:46938: EOF 2018/08/02 21:56:20 http: TLS handshake error from 10.129.0.1:46948: EOF 2018/08/02 21:56:30 http: TLS handshake error from 10.129.0.1:46956: EOF 2018/08/02 21:56:40 http: TLS handshake error from 10.129.0.1:46964: EOF 2018/08/02 21:56:50 http: TLS handshake error from 10.129.0.1:46972: EOF 2018/08/02 21:57:00 http: TLS handshake error from 10.129.0.1:46980: EOF 2018/08/02 21:57:10 http: TLS handshake error from 10.129.0.1:46988: EOF 2018/08/02 21:57:20 http: TLS handshake error from 10.129.0.1:46996: EOF 2018/08/02 21:57:30 http: TLS handshake error from 10.129.0.1:47004: EOF level=info timestamp=2018-08-02T21:57:37.990933Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:57:40 http: TLS handshake error from 10.129.0.1:47012: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:55:42 http: TLS handshake error from 10.129.0.1:43786: EOF 2018/08/02 21:55:52 http: TLS handshake error from 10.129.0.1:43794: EOF 2018/08/02 21:56:02 http: TLS handshake error from 10.129.0.1:43802: EOF 2018/08/02 21:56:12 http: TLS handshake error from 10.129.0.1:43810: EOF 2018/08/02 21:56:22 http: TLS handshake error from 10.129.0.1:43820: EOF 2018/08/02 21:56:32 http: TLS handshake error from 10.129.0.1:43828: EOF level=info timestamp=2018-08-02T21:56:37.904677Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:56:42 http: TLS handshake error from 10.129.0.1:43836: EOF 2018/08/02 21:56:52 http: TLS handshake error from 10.129.0.1:43844: EOF 2018/08/02 21:57:02 http: TLS handshake error from 10.129.0.1:43852: EOF level=info timestamp=2018-08-02T21:57:07.960459Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:57:12 http: TLS handshake error from 10.129.0.1:43860: EOF 2018/08/02 21:57:22 http: TLS handshake error from 10.129.0.1:43868: EOF 2018/08/02 21:57:32 http: TLS handshake error from 10.129.0.1:43876: EOF 2018/08/02 21:57:42 http: TLS handshake error from 10.129.0.1:43884: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:55:43.388036Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiccbnt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiccbnt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: bb496b00-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiccbnt" level=info timestamp=2018-08-02T21:56:13.630268Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6lklw kind= uid=cdc16b9e-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:56:13.637847Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6lklw kind= uid=cdc16b9e-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:56:15.441941Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6lklw\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi6lklw, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cdc16b9e-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6lklw" level=info timestamp=2018-08-02T21:56:45.663190Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihmv2d kind= uid=e0d49fb1-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:56:45.663835Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihmv2d kind= uid=e0d49fb1-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:56:45.899400Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihmv2d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihmv2d" level=info timestamp=2018-08-02T21:56:46.265551Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihmv2d\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihmv2d, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: e0d49fb1-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihmv2d" level=info timestamp=2018-08-02T21:57:16.419752Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihk6dc kind= uid=f32eca9c-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:57:16.429986Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihk6dc kind= uid=f32eca9c-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:57:16.659217Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihk6dc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihk6dc" level=info timestamp=2018-08-02T21:57:17.001408Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihk6dc\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihk6dc, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: f32eca9c-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihk6dc" level=info timestamp=2018-08-02T21:57:47.199246Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-alternative name=testvmijc4bw kind= uid=0585cb52-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:57:47.202990Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-alternative name=testvmijc4bw kind= uid=0585cb52-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:57:47.723496Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijc4bw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-alternative/testvmijc4bw" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure [31.085 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:535 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-alternative [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.StatusError | 0xc4206ed950>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:558 ------------------------------ STEP: Creating a VirtualMachineInstance with different namespace S [SKIPPING] in Spec Setup (BeforeEach) [0.293 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should enable emulation in virt-launcher [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:604 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.132 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should be reflected in domain XML [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:641 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.413 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should request a TUN device but not KVM [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:685 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:56:10 http: TLS handshake error from 10.129.0.1:46938: EOF 2018/08/02 21:56:20 http: TLS handshake error from 10.129.0.1:46948: EOF 2018/08/02 21:56:30 http: TLS handshake error from 10.129.0.1:46956: EOF 2018/08/02 21:56:40 http: TLS handshake error from 10.129.0.1:46964: EOF 2018/08/02 21:56:50 http: TLS handshake error from 10.129.0.1:46972: EOF 2018/08/02 21:57:00 http: TLS handshake error from 10.129.0.1:46980: EOF 2018/08/02 21:57:10 http: TLS handshake error from 10.129.0.1:46988: EOF 2018/08/02 21:57:20 http: TLS handshake error from 10.129.0.1:46996: EOF 2018/08/02 21:57:30 http: TLS handshake error from 10.129.0.1:47004: EOF level=info timestamp=2018-08-02T21:57:37.990933Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:57:40 http: TLS handshake error from 10.129.0.1:47012: EOF 2018/08/02 21:57:50 http: TLS handshake error from 10.129.0.1:47020: EOF 2018/08/02 21:58:00 http: TLS handshake error from 10.129.0.1:47028: EOF level=info timestamp=2018-08-02T21:58:07.975710Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:58:10 http: TLS handshake error from 10.129.0.1:47036: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:56:12 http: TLS handshake error from 10.129.0.1:43810: EOF 2018/08/02 21:56:22 http: TLS handshake error from 10.129.0.1:43820: EOF 2018/08/02 21:56:32 http: TLS handshake error from 10.129.0.1:43828: EOF level=info timestamp=2018-08-02T21:56:37.904677Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:56:42 http: TLS handshake error from 10.129.0.1:43836: EOF 2018/08/02 21:56:52 http: TLS handshake error from 10.129.0.1:43844: EOF 2018/08/02 21:57:02 http: TLS handshake error from 10.129.0.1:43852: EOF level=info timestamp=2018-08-02T21:57:07.960459Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:57:12 http: TLS handshake error from 10.129.0.1:43860: EOF 2018/08/02 21:57:22 http: TLS handshake error from 10.129.0.1:43868: EOF 2018/08/02 21:57:32 http: TLS handshake error from 10.129.0.1:43876: EOF 2018/08/02 21:57:42 http: TLS handshake error from 10.129.0.1:43884: EOF 2018/08/02 21:57:52 http: TLS handshake error from 10.129.0.1:43892: EOF 2018/08/02 21:58:02 http: TLS handshake error from 10.129.0.1:43900: EOF 2018/08/02 21:58:12 http: TLS handshake error from 10.129.0.1:43908: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:56:13.637847Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6lklw kind= uid=cdc16b9e-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:56:15.441941Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6lklw\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi6lklw, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cdc16b9e-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6lklw" level=info timestamp=2018-08-02T21:56:45.663190Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihmv2d kind= uid=e0d49fb1-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:56:45.663835Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihmv2d kind= uid=e0d49fb1-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:56:45.899400Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihmv2d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihmv2d" level=info timestamp=2018-08-02T21:56:46.265551Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihmv2d\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihmv2d, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: e0d49fb1-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihmv2d" level=info timestamp=2018-08-02T21:57:16.419752Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihk6dc kind= uid=f32eca9c-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:57:16.429986Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihk6dc kind= uid=f32eca9c-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:57:16.659217Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihk6dc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihk6dc" level=info timestamp=2018-08-02T21:57:17.001408Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihk6dc\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihk6dc, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: f32eca9c-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihk6dc" level=info timestamp=2018-08-02T21:57:47.199246Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-alternative name=testvmijc4bw kind= uid=0585cb52-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:57:47.202990Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-alternative name=testvmijc4bw kind= uid=0585cb52-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:57:47.723496Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijc4bw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-alternative/testvmijc4bw" level=info timestamp=2018-08-02T21:58:19.080797Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihrgql kind= uid=1883a480-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:58:19.082867Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihrgql kind= uid=1883a480-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmihrgql-gldc7 Pod phase: Pending • Failure [30.713 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VM Accelerated Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:721 should request a KVM and TUN device [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:733 Expected <*errors.StatusError | 0xc421156fc0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:735 ------------------------------ Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:56:40 http: TLS handshake error from 10.129.0.1:46964: EOF 2018/08/02 21:56:50 http: TLS handshake error from 10.129.0.1:46972: EOF 2018/08/02 21:57:00 http: TLS handshake error from 10.129.0.1:46980: EOF 2018/08/02 21:57:10 http: TLS handshake error from 10.129.0.1:46988: EOF 2018/08/02 21:57:20 http: TLS handshake error from 10.129.0.1:46996: EOF 2018/08/02 21:57:30 http: TLS handshake error from 10.129.0.1:47004: EOF level=info timestamp=2018-08-02T21:57:37.990933Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:57:40 http: TLS handshake error from 10.129.0.1:47012: EOF 2018/08/02 21:57:50 http: TLS handshake error from 10.129.0.1:47020: EOF 2018/08/02 21:58:00 http: TLS handshake error from 10.129.0.1:47028: EOF level=info timestamp=2018-08-02T21:58:07.975710Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:58:10 http: TLS handshake error from 10.129.0.1:47036: EOF 2018/08/02 21:58:20 http: TLS handshake error from 10.129.0.1:47044: EOF 2018/08/02 21:58:30 http: TLS handshake error from 10.129.0.1:47052: EOF 2018/08/02 21:58:40 http: TLS handshake error from 10.129.0.1:47060: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:56:42 http: TLS handshake error from 10.129.0.1:43836: EOF 2018/08/02 21:56:52 http: TLS handshake error from 10.129.0.1:43844: EOF 2018/08/02 21:57:02 http: TLS handshake error from 10.129.0.1:43852: EOF level=info timestamp=2018-08-02T21:57:07.960459Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:57:12 http: TLS handshake error from 10.129.0.1:43860: EOF 2018/08/02 21:57:22 http: TLS handshake error from 10.129.0.1:43868: EOF 2018/08/02 21:57:32 http: TLS handshake error from 10.129.0.1:43876: EOF 2018/08/02 21:57:42 http: TLS handshake error from 10.129.0.1:43884: EOF 2018/08/02 21:57:52 http: TLS handshake error from 10.129.0.1:43892: EOF 2018/08/02 21:58:02 http: TLS handshake error from 10.129.0.1:43900: EOF 2018/08/02 21:58:12 http: TLS handshake error from 10.129.0.1:43908: EOF 2018/08/02 21:58:22 http: TLS handshake error from 10.129.0.1:43916: EOF 2018/08/02 21:58:32 http: TLS handshake error from 10.129.0.1:43924: EOF level=info timestamp=2018-08-02T21:58:37.972938Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:58:42 http: TLS handshake error from 10.129.0.1:43932: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:56:45.663835Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihmv2d kind= uid=e0d49fb1-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:56:45.899400Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihmv2d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihmv2d" level=info timestamp=2018-08-02T21:56:46.265551Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihmv2d\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihmv2d, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: e0d49fb1-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihmv2d" level=info timestamp=2018-08-02T21:57:16.419752Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihk6dc kind= uid=f32eca9c-969e-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:57:16.429986Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihk6dc kind= uid=f32eca9c-969e-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:57:16.659217Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihk6dc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihk6dc" level=info timestamp=2018-08-02T21:57:17.001408Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihk6dc\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihk6dc, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: f32eca9c-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihk6dc" level=info timestamp=2018-08-02T21:57:47.199246Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-alternative name=testvmijc4bw kind= uid=0585cb52-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:57:47.202990Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-alternative name=testvmijc4bw kind= uid=0585cb52-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:57:47.723496Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijc4bw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-alternative/testvmijc4bw" level=info timestamp=2018-08-02T21:58:19.080797Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihrgql kind= uid=1883a480-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:58:19.082867Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihrgql kind= uid=1883a480-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:58:19.619487Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihrgql\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihrgql, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1883a480-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihrgql" level=info timestamp=2018-08-02T21:58:49.783926Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwsck9 kind= uid=2ad8d1fb-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:58:49.784417Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwsck9 kind= uid=2ad8d1fb-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiwsck9-pnzns Pod phase: Pending • Failure [30.834 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VM Accelerated Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:721 should not enable emulation in virt-launcher [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:768 Expected <*errors.StatusError | 0xc4211578c0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:770 ------------------------------ •• Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:57:30 http: TLS handshake error from 10.129.0.1:47004: EOF level=info timestamp=2018-08-02T21:57:37.990933Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:57:40 http: TLS handshake error from 10.129.0.1:47012: EOF 2018/08/02 21:57:50 http: TLS handshake error from 10.129.0.1:47020: EOF 2018/08/02 21:58:00 http: TLS handshake error from 10.129.0.1:47028: EOF level=info timestamp=2018-08-02T21:58:07.975710Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:58:10 http: TLS handshake error from 10.129.0.1:47036: EOF 2018/08/02 21:58:20 http: TLS handshake error from 10.129.0.1:47044: EOF 2018/08/02 21:58:30 http: TLS handshake error from 10.129.0.1:47052: EOF 2018/08/02 21:58:40 http: TLS handshake error from 10.129.0.1:47060: EOF 2018/08/02 21:58:50 http: TLS handshake error from 10.129.0.1:47068: EOF 2018/08/02 21:59:00 http: TLS handshake error from 10.129.0.1:47076: EOF level=info timestamp=2018-08-02T21:59:07.861191Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:59:10 http: TLS handshake error from 10.129.0.1:47084: EOF 2018/08/02 21:59:20 http: TLS handshake error from 10.129.0.1:47094: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running level=info timestamp=2018-08-02T21:57:07.960459Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:57:12 http: TLS handshake error from 10.129.0.1:43860: EOF 2018/08/02 21:57:22 http: TLS handshake error from 10.129.0.1:43868: EOF 2018/08/02 21:57:32 http: TLS handshake error from 10.129.0.1:43876: EOF 2018/08/02 21:57:42 http: TLS handshake error from 10.129.0.1:43884: EOF 2018/08/02 21:57:52 http: TLS handshake error from 10.129.0.1:43892: EOF 2018/08/02 21:58:02 http: TLS handshake error from 10.129.0.1:43900: EOF 2018/08/02 21:58:12 http: TLS handshake error from 10.129.0.1:43908: EOF 2018/08/02 21:58:22 http: TLS handshake error from 10.129.0.1:43916: EOF 2018/08/02 21:58:32 http: TLS handshake error from 10.129.0.1:43924: EOF level=info timestamp=2018-08-02T21:58:37.972938Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:58:42 http: TLS handshake error from 10.129.0.1:43932: EOF 2018/08/02 21:58:52 http: TLS handshake error from 10.129.0.1:43940: EOF 2018/08/02 21:59:02 http: TLS handshake error from 10.129.0.1:43948: EOF 2018/08/02 21:59:12 http: TLS handshake error from 10.129.0.1:43956: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:57:16.659217Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihk6dc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihk6dc" level=info timestamp=2018-08-02T21:57:17.001408Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihk6dc\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihk6dc, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: f32eca9c-969e-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihk6dc" level=info timestamp=2018-08-02T21:57:47.199246Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-alternative name=testvmijc4bw kind= uid=0585cb52-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:57:47.202990Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-alternative name=testvmijc4bw kind= uid=0585cb52-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:57:47.723496Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijc4bw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-alternative/testvmijc4bw" level=info timestamp=2018-08-02T21:58:19.080797Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihrgql kind= uid=1883a480-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:58:19.082867Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihrgql kind= uid=1883a480-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:58:19.619487Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihrgql\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihrgql, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1883a480-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihrgql" level=info timestamp=2018-08-02T21:58:49.783926Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwsck9 kind= uid=2ad8d1fb-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:58:49.784417Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwsck9 kind= uid=2ad8d1fb-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:58:50.592569Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwsck9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwsck9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2ad8d1fb-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwsck9" level=info timestamp=2018-08-02T21:59:21.584278Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikq4d9 kind= uid=3dc7f2d8-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:59:21.587200Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikq4d9 kind= uid=3dc7f2d8-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:59:21.867550Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikq4d9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikq4d9" level=info timestamp=2018-08-02T21:59:21.970091Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikq4d9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikq4d9" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmikq4d9-l4cwr Pod phase: Pending ------------------------------ • Failure [31.059 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance's Pod /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:837 should result in the VirtualMachineInstance moving to a finalized state [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:838 Expected error: <*errors.StatusError | 0xc4211570e0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:841 ------------------------------ STEP: Creating the VirtualMachineInstance Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:58:00 http: TLS handshake error from 10.129.0.1:47028: EOF level=info timestamp=2018-08-02T21:58:07.975710Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:58:10 http: TLS handshake error from 10.129.0.1:47036: EOF 2018/08/02 21:58:20 http: TLS handshake error from 10.129.0.1:47044: EOF 2018/08/02 21:58:30 http: TLS handshake error from 10.129.0.1:47052: EOF 2018/08/02 21:58:40 http: TLS handshake error from 10.129.0.1:47060: EOF 2018/08/02 21:58:50 http: TLS handshake error from 10.129.0.1:47068: EOF 2018/08/02 21:59:00 http: TLS handshake error from 10.129.0.1:47076: EOF level=info timestamp=2018-08-02T21:59:07.861191Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:59:10 http: TLS handshake error from 10.129.0.1:47084: EOF 2018/08/02 21:59:20 http: TLS handshake error from 10.129.0.1:47094: EOF 2018/08/02 21:59:30 http: TLS handshake error from 10.129.0.1:47102: EOF level=info timestamp=2018-08-02T21:59:37.836299Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:59:40 http: TLS handshake error from 10.129.0.1:47110: EOF 2018/08/02 21:59:50 http: TLS handshake error from 10.129.0.1:47118: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:57:42 http: TLS handshake error from 10.129.0.1:43884: EOF 2018/08/02 21:57:52 http: TLS handshake error from 10.129.0.1:43892: EOF 2018/08/02 21:58:02 http: TLS handshake error from 10.129.0.1:43900: EOF 2018/08/02 21:58:12 http: TLS handshake error from 10.129.0.1:43908: EOF 2018/08/02 21:58:22 http: TLS handshake error from 10.129.0.1:43916: EOF 2018/08/02 21:58:32 http: TLS handshake error from 10.129.0.1:43924: EOF level=info timestamp=2018-08-02T21:58:37.972938Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:58:42 http: TLS handshake error from 10.129.0.1:43932: EOF 2018/08/02 21:58:52 http: TLS handshake error from 10.129.0.1:43940: EOF 2018/08/02 21:59:02 http: TLS handshake error from 10.129.0.1:43948: EOF 2018/08/02 21:59:12 http: TLS handshake error from 10.129.0.1:43956: EOF 2018/08/02 21:59:22 http: TLS handshake error from 10.129.0.1:43966: EOF 2018/08/02 21:59:32 http: TLS handshake error from 10.129.0.1:43974: EOF 2018/08/02 21:59:42 http: TLS handshake error from 10.129.0.1:43982: EOF 2018/08/02 21:59:52 http: TLS handshake error from 10.129.0.1:43990: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:57:47.202990Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-alternative name=testvmijc4bw kind= uid=0585cb52-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:57:47.723496Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijc4bw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-alternative/testvmijc4bw" level=info timestamp=2018-08-02T21:58:19.080797Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihrgql kind= uid=1883a480-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:58:19.082867Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihrgql kind= uid=1883a480-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:58:19.619487Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihrgql\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihrgql, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1883a480-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihrgql" level=info timestamp=2018-08-02T21:58:49.783926Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwsck9 kind= uid=2ad8d1fb-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:58:49.784417Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwsck9 kind= uid=2ad8d1fb-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:58:50.592569Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwsck9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwsck9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2ad8d1fb-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwsck9" level=info timestamp=2018-08-02T21:59:21.584278Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikq4d9 kind= uid=3dc7f2d8-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:59:21.587200Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikq4d9 kind= uid=3dc7f2d8-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:59:21.867550Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikq4d9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikq4d9" level=info timestamp=2018-08-02T21:59:21.970091Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikq4d9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikq4d9" level=info timestamp=2018-08-02T21:59:52.763551Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirzmsr kind= uid=5061948a-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:59:52.764729Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirzmsr kind= uid=5061948a-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:59:52.984549Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirzmsr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirzmsr" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmirzmsr-scbrf Pod phase: Pending • Failure [31.054 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with an active pod. /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:870 should result in pod being terminated [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:871 Expected error: <*errors.StatusError | 0xc420b5c900>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:875 ------------------------------ STEP: Creating the VirtualMachineInstance Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:58:40 http: TLS handshake error from 10.129.0.1:47060: EOF 2018/08/02 21:58:50 http: TLS handshake error from 10.129.0.1:47068: EOF 2018/08/02 21:59:00 http: TLS handshake error from 10.129.0.1:47076: EOF level=info timestamp=2018-08-02T21:59:07.861191Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:59:10 http: TLS handshake error from 10.129.0.1:47084: EOF 2018/08/02 21:59:20 http: TLS handshake error from 10.129.0.1:47094: EOF 2018/08/02 21:59:30 http: TLS handshake error from 10.129.0.1:47102: EOF level=info timestamp=2018-08-02T21:59:37.836299Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:59:40 http: TLS handshake error from 10.129.0.1:47110: EOF 2018/08/02 21:59:50 http: TLS handshake error from 10.129.0.1:47118: EOF level=info timestamp=2018-08-02T21:59:54.843874Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:00 http: TLS handshake error from 10.129.0.1:47126: EOF level=info timestamp=2018-08-02T22:00:07.815074Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:10 http: TLS handshake error from 10.129.0.1:47134: EOF 2018/08/02 22:00:20 http: TLS handshake error from 10.129.0.1:47142: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:58:22 http: TLS handshake error from 10.129.0.1:43916: EOF 2018/08/02 21:58:32 http: TLS handshake error from 10.129.0.1:43924: EOF level=info timestamp=2018-08-02T21:58:37.972938Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:58:42 http: TLS handshake error from 10.129.0.1:43932: EOF 2018/08/02 21:58:52 http: TLS handshake error from 10.129.0.1:43940: EOF 2018/08/02 21:59:02 http: TLS handshake error from 10.129.0.1:43948: EOF 2018/08/02 21:59:12 http: TLS handshake error from 10.129.0.1:43956: EOF 2018/08/02 21:59:22 http: TLS handshake error from 10.129.0.1:43966: EOF 2018/08/02 21:59:32 http: TLS handshake error from 10.129.0.1:43974: EOF 2018/08/02 21:59:42 http: TLS handshake error from 10.129.0.1:43982: EOF 2018/08/02 21:59:52 http: TLS handshake error from 10.129.0.1:43990: EOF level=info timestamp=2018-08-02T21:59:54.740773Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:02 http: TLS handshake error from 10.129.0.1:43998: EOF 2018/08/02 22:00:12 http: TLS handshake error from 10.129.0.1:44006: EOF 2018/08/02 22:00:22 http: TLS handshake error from 10.129.0.1:44014: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:58:19.082867Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihrgql kind= uid=1883a480-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:58:19.619487Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihrgql\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihrgql, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1883a480-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihrgql" level=info timestamp=2018-08-02T21:58:49.783926Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwsck9 kind= uid=2ad8d1fb-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:58:49.784417Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwsck9 kind= uid=2ad8d1fb-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:58:50.592569Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwsck9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwsck9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2ad8d1fb-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwsck9" level=info timestamp=2018-08-02T21:59:21.584278Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikq4d9 kind= uid=3dc7f2d8-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:59:21.587200Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikq4d9 kind= uid=3dc7f2d8-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:59:21.867550Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikq4d9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikq4d9" level=info timestamp=2018-08-02T21:59:21.970091Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikq4d9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikq4d9" level=info timestamp=2018-08-02T21:59:52.763551Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirzmsr kind= uid=5061948a-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:59:52.764729Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirzmsr kind= uid=5061948a-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:59:52.984549Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirzmsr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirzmsr" level=info timestamp=2018-08-02T21:59:53.523366Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirzmsr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmirzmsr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5061948a-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirzmsr" level=info timestamp=2018-08-02T22:00:23.812886Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigqzzp kind= uid=62de7d04-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:00:23.813235Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigqzzp kind= uid=62de7d04-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmigqzzp-dgpd5 Pod phase: Pending • Failure [31.158 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with ACPI and 0 grace period seconds /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:895 should result in vmi status failed [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:896 Expected error: <*errors.StatusError | 0xc4211561b0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:904 ------------------------------ STEP: Creating the VirtualMachineInstance Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running level=info timestamp=2018-08-02T21:59:07.861191Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:59:10 http: TLS handshake error from 10.129.0.1:47084: EOF 2018/08/02 21:59:20 http: TLS handshake error from 10.129.0.1:47094: EOF 2018/08/02 21:59:30 http: TLS handshake error from 10.129.0.1:47102: EOF level=info timestamp=2018-08-02T21:59:37.836299Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:59:40 http: TLS handshake error from 10.129.0.1:47110: EOF 2018/08/02 21:59:50 http: TLS handshake error from 10.129.0.1:47118: EOF level=info timestamp=2018-08-02T21:59:54.843874Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:00 http: TLS handshake error from 10.129.0.1:47126: EOF level=info timestamp=2018-08-02T22:00:07.815074Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:10 http: TLS handshake error from 10.129.0.1:47134: EOF 2018/08/02 22:00:20 http: TLS handshake error from 10.129.0.1:47142: EOF 2018/08/02 22:00:30 http: TLS handshake error from 10.129.0.1:47150: EOF 2018/08/02 22:00:40 http: TLS handshake error from 10.129.0.1:47158: EOF 2018/08/02 22:00:50 http: TLS handshake error from 10.129.0.1:47166: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:58:52 http: TLS handshake error from 10.129.0.1:43940: EOF 2018/08/02 21:59:02 http: TLS handshake error from 10.129.0.1:43948: EOF 2018/08/02 21:59:12 http: TLS handshake error from 10.129.0.1:43956: EOF 2018/08/02 21:59:22 http: TLS handshake error from 10.129.0.1:43966: EOF 2018/08/02 21:59:32 http: TLS handshake error from 10.129.0.1:43974: EOF 2018/08/02 21:59:42 http: TLS handshake error from 10.129.0.1:43982: EOF 2018/08/02 21:59:52 http: TLS handshake error from 10.129.0.1:43990: EOF level=info timestamp=2018-08-02T21:59:54.740773Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:02 http: TLS handshake error from 10.129.0.1:43998: EOF 2018/08/02 22:00:12 http: TLS handshake error from 10.129.0.1:44006: EOF 2018/08/02 22:00:22 http: TLS handshake error from 10.129.0.1:44014: EOF 2018/08/02 22:00:32 http: TLS handshake error from 10.129.0.1:44022: EOF level=info timestamp=2018-08-02T22:00:37.889586Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:42 http: TLS handshake error from 10.129.0.1:44030: EOF 2018/08/02 22:00:52 http: TLS handshake error from 10.129.0.1:44038: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:58:50.592569Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwsck9\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwsck9, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2ad8d1fb-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwsck9" level=info timestamp=2018-08-02T21:59:21.584278Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikq4d9 kind= uid=3dc7f2d8-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:59:21.587200Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikq4d9 kind= uid=3dc7f2d8-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:59:21.867550Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikq4d9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikq4d9" level=info timestamp=2018-08-02T21:59:21.970091Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikq4d9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikq4d9" level=info timestamp=2018-08-02T21:59:52.763551Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirzmsr kind= uid=5061948a-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:59:52.764729Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirzmsr kind= uid=5061948a-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:59:52.984549Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirzmsr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirzmsr" level=info timestamp=2018-08-02T21:59:53.523366Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirzmsr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmirzmsr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5061948a-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirzmsr" level=info timestamp=2018-08-02T22:00:23.812886Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigqzzp kind= uid=62de7d04-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:00:23.813235Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigqzzp kind= uid=62de7d04-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:00:24.665640Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigqzzp\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmigqzzp, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 62de7d04-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigqzzp" level=info timestamp=2018-08-02T22:00:55.076715Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikl2rr kind= uid=758096b2-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:00:55.077392Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikl2rr kind= uid=758096b2-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:00:55.328742Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmikl2rr-zc88f Pod phase: Pending • Failure [31.148 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with ACPI and some grace period seconds /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:920 should result in vmi status succeeded [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:921 Expected error: <*errors.StatusError | 0xc421157050>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:929 ------------------------------ STEP: Creating the VirtualMachineInstance Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:59:30 http: TLS handshake error from 10.129.0.1:47102: EOF level=info timestamp=2018-08-02T21:59:37.836299Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 21:59:40 http: TLS handshake error from 10.129.0.1:47110: EOF 2018/08/02 21:59:50 http: TLS handshake error from 10.129.0.1:47118: EOF level=info timestamp=2018-08-02T21:59:54.843874Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:00 http: TLS handshake error from 10.129.0.1:47126: EOF level=info timestamp=2018-08-02T22:00:07.815074Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:10 http: TLS handshake error from 10.129.0.1:47134: EOF 2018/08/02 22:00:20 http: TLS handshake error from 10.129.0.1:47142: EOF 2018/08/02 22:00:30 http: TLS handshake error from 10.129.0.1:47150: EOF 2018/08/02 22:00:40 http: TLS handshake error from 10.129.0.1:47158: EOF 2018/08/02 22:00:50 http: TLS handshake error from 10.129.0.1:47166: EOF 2018/08/02 22:01:00 http: TLS handshake error from 10.129.0.1:47174: EOF 2018/08/02 22:01:10 http: TLS handshake error from 10.129.0.1:47182: EOF 2018/08/02 22:01:20 http: TLS handshake error from 10.129.0.1:47190: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 21:59:32 http: TLS handshake error from 10.129.0.1:43974: EOF 2018/08/02 21:59:42 http: TLS handshake error from 10.129.0.1:43982: EOF 2018/08/02 21:59:52 http: TLS handshake error from 10.129.0.1:43990: EOF level=info timestamp=2018-08-02T21:59:54.740773Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:02 http: TLS handshake error from 10.129.0.1:43998: EOF 2018/08/02 22:00:12 http: TLS handshake error from 10.129.0.1:44006: EOF 2018/08/02 22:00:22 http: TLS handshake error from 10.129.0.1:44014: EOF 2018/08/02 22:00:32 http: TLS handshake error from 10.129.0.1:44022: EOF level=info timestamp=2018-08-02T22:00:37.889586Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:42 http: TLS handshake error from 10.129.0.1:44030: EOF 2018/08/02 22:00:52 http: TLS handshake error from 10.129.0.1:44038: EOF 2018/08/02 22:01:02 http: TLS handshake error from 10.129.0.1:44046: EOF level=info timestamp=2018-08-02T22:01:08.002510Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:01:12 http: TLS handshake error from 10.129.0.1:44054: EOF 2018/08/02 22:01:22 http: TLS handshake error from 10.129.0.1:44062: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:59:52.763551Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirzmsr kind= uid=5061948a-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T21:59:52.764729Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirzmsr kind= uid=5061948a-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T21:59:52.984549Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirzmsr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirzmsr" level=info timestamp=2018-08-02T21:59:53.523366Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirzmsr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmirzmsr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5061948a-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirzmsr" level=info timestamp=2018-08-02T22:00:23.812886Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigqzzp kind= uid=62de7d04-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:00:23.813235Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigqzzp kind= uid=62de7d04-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:00:24.665640Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigqzzp\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmigqzzp, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 62de7d04-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigqzzp" level=info timestamp=2018-08-02T22:00:55.076715Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikl2rr kind= uid=758096b2-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:00:55.077392Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikl2rr kind= uid=758096b2-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:00:55.328742Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" level=info timestamp=2018-08-02T22:00:55.751749Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmikl2rr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 758096b2-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" level=info timestamp=2018-08-02T22:01:25.907802Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitrpll kind= uid=87e8369b-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:01:25.908101Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitrpll kind= uid=87e8369b-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:01:26.012167Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:26.051016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmitrpll-qw68n Pod phase: Pending • Failure [30.765 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with grace period greater than 0 /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:945 should run graceful shutdown [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:946 Expected error: <*errors.StatusError | 0xc420ae23f0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:970 ------------------------------ STEP: Setting a VirtualMachineInstance termination grace period to 5 STEP: Creating the VirtualMachineInstance Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 21:59:50 http: TLS handshake error from 10.129.0.1:47118: EOF level=info timestamp=2018-08-02T21:59:54.843874Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:00 http: TLS handshake error from 10.129.0.1:47126: EOF level=info timestamp=2018-08-02T22:00:07.815074Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:10 http: TLS handshake error from 10.129.0.1:47134: EOF 2018/08/02 22:00:20 http: TLS handshake error from 10.129.0.1:47142: EOF 2018/08/02 22:00:30 http: TLS handshake error from 10.129.0.1:47150: EOF 2018/08/02 22:00:40 http: TLS handshake error from 10.129.0.1:47158: EOF 2018/08/02 22:00:50 http: TLS handshake error from 10.129.0.1:47166: EOF 2018/08/02 22:01:00 http: TLS handshake error from 10.129.0.1:47174: EOF 2018/08/02 22:01:10 http: TLS handshake error from 10.129.0.1:47182: EOF 2018/08/02 22:01:20 http: TLS handshake error from 10.129.0.1:47190: EOF 2018/08/02 22:01:30 http: TLS handshake error from 10.129.0.1:47198: EOF 2018/08/02 22:01:40 http: TLS handshake error from 10.129.0.1:47206: EOF 2018/08/02 22:01:50 http: TLS handshake error from 10.129.0.1:47214: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 22:00:02 http: TLS handshake error from 10.129.0.1:43998: EOF 2018/08/02 22:00:12 http: TLS handshake error from 10.129.0.1:44006: EOF 2018/08/02 22:00:22 http: TLS handshake error from 10.129.0.1:44014: EOF 2018/08/02 22:00:32 http: TLS handshake error from 10.129.0.1:44022: EOF level=info timestamp=2018-08-02T22:00:37.889586Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:42 http: TLS handshake error from 10.129.0.1:44030: EOF 2018/08/02 22:00:52 http: TLS handshake error from 10.129.0.1:44038: EOF 2018/08/02 22:01:02 http: TLS handshake error from 10.129.0.1:44046: EOF level=info timestamp=2018-08-02T22:01:08.002510Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:01:12 http: TLS handshake error from 10.129.0.1:44054: EOF 2018/08/02 22:01:22 http: TLS handshake error from 10.129.0.1:44062: EOF 2018/08/02 22:01:32 http: TLS handshake error from 10.129.0.1:44070: EOF level=info timestamp=2018-08-02T22:01:37.902355Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:01:42 http: TLS handshake error from 10.129.0.1:44078: EOF 2018/08/02 22:01:52 http: TLS handshake error from 10.129.0.1:44086: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T21:59:53.523366Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirzmsr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmirzmsr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5061948a-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirzmsr" level=info timestamp=2018-08-02T22:00:23.812886Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigqzzp kind= uid=62de7d04-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:00:23.813235Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigqzzp kind= uid=62de7d04-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:00:24.665640Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigqzzp\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmigqzzp, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 62de7d04-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigqzzp" level=info timestamp=2018-08-02T22:00:55.076715Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikl2rr kind= uid=758096b2-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:00:55.077392Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikl2rr kind= uid=758096b2-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:00:55.328742Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" level=info timestamp=2018-08-02T22:00:55.751749Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmikl2rr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 758096b2-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" level=info timestamp=2018-08-02T22:01:25.907802Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitrpll kind= uid=87e8369b-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:01:25.908101Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitrpll kind= uid=87e8369b-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:01:26.012167Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:26.051016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:26.539778Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmitrpll, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 87e8369b-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:56.758256Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv4kt kind= uid=9a459856-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:01:56.758870Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv4kt kind= uid=9a459856-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmibv4kt-ng8lq Pod phase: Pending • Failure [31.194 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Killed VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:997 should be in Failed phase [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:998 Expected <*errors.StatusError | 0xc420152900>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:1001 ------------------------------ STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 22:00:10 http: TLS handshake error from 10.129.0.1:47134: EOF 2018/08/02 22:00:20 http: TLS handshake error from 10.129.0.1:47142: EOF 2018/08/02 22:00:30 http: TLS handshake error from 10.129.0.1:47150: EOF 2018/08/02 22:00:40 http: TLS handshake error from 10.129.0.1:47158: EOF 2018/08/02 22:00:50 http: TLS handshake error from 10.129.0.1:47166: EOF 2018/08/02 22:01:00 http: TLS handshake error from 10.129.0.1:47174: EOF 2018/08/02 22:01:10 http: TLS handshake error from 10.129.0.1:47182: EOF 2018/08/02 22:01:20 http: TLS handshake error from 10.129.0.1:47190: EOF 2018/08/02 22:01:30 http: TLS handshake error from 10.129.0.1:47198: EOF 2018/08/02 22:01:40 http: TLS handshake error from 10.129.0.1:47206: EOF 2018/08/02 22:01:50 http: TLS handshake error from 10.129.0.1:47214: EOF 2018/08/02 22:02:00 http: TLS handshake error from 10.129.0.1:47222: EOF level=info timestamp=2018-08-02T22:02:07.872720Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:02:10 http: TLS handshake error from 10.129.0.1:47230: EOF 2018/08/02 22:02:20 http: TLS handshake error from 10.129.0.1:47238: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 22:00:32 http: TLS handshake error from 10.129.0.1:44022: EOF level=info timestamp=2018-08-02T22:00:37.889586Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:00:42 http: TLS handshake error from 10.129.0.1:44030: EOF 2018/08/02 22:00:52 http: TLS handshake error from 10.129.0.1:44038: EOF 2018/08/02 22:01:02 http: TLS handshake error from 10.129.0.1:44046: EOF level=info timestamp=2018-08-02T22:01:08.002510Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:01:12 http: TLS handshake error from 10.129.0.1:44054: EOF 2018/08/02 22:01:22 http: TLS handshake error from 10.129.0.1:44062: EOF 2018/08/02 22:01:32 http: TLS handshake error from 10.129.0.1:44070: EOF level=info timestamp=2018-08-02T22:01:37.902355Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:01:42 http: TLS handshake error from 10.129.0.1:44078: EOF 2018/08/02 22:01:52 http: TLS handshake error from 10.129.0.1:44086: EOF 2018/08/02 22:02:02 http: TLS handshake error from 10.129.0.1:44094: EOF 2018/08/02 22:02:12 http: TLS handshake error from 10.129.0.1:44102: EOF 2018/08/02 22:02:22 http: TLS handshake error from 10.129.0.1:44112: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T22:00:24.665640Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigqzzp\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmigqzzp, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 62de7d04-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigqzzp" level=info timestamp=2018-08-02T22:00:55.076715Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikl2rr kind= uid=758096b2-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:00:55.077392Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikl2rr kind= uid=758096b2-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:00:55.328742Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" level=info timestamp=2018-08-02T22:00:55.751749Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmikl2rr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 758096b2-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" level=info timestamp=2018-08-02T22:01:25.907802Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitrpll kind= uid=87e8369b-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:01:25.908101Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitrpll kind= uid=87e8369b-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:01:26.012167Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:26.051016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:26.539778Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmitrpll, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 87e8369b-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:56.758256Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv4kt kind= uid=9a459856-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:01:56.758870Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv4kt kind= uid=9a459856-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:02:28.134212Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilrtnp kind= uid=acf6dd71-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:02:28.139568Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilrtnp kind= uid=acf6dd71-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:02:28.441754Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilrtnp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilrtnp" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmilrtnp-ksjzf Pod phase: Pending • Failure [31.196 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Killed VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:997 should be left alone by virt-handler [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:1025 Expected <*errors.StatusError | 0xc4206ec6c0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:1028 ------------------------------ STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 22:00:50 http: TLS handshake error from 10.129.0.1:47166: EOF 2018/08/02 22:01:00 http: TLS handshake error from 10.129.0.1:47174: EOF 2018/08/02 22:01:10 http: TLS handshake error from 10.129.0.1:47182: EOF 2018/08/02 22:01:20 http: TLS handshake error from 10.129.0.1:47190: EOF 2018/08/02 22:01:30 http: TLS handshake error from 10.129.0.1:47198: EOF 2018/08/02 22:01:40 http: TLS handshake error from 10.129.0.1:47206: EOF 2018/08/02 22:01:50 http: TLS handshake error from 10.129.0.1:47214: EOF 2018/08/02 22:02:00 http: TLS handshake error from 10.129.0.1:47222: EOF level=info timestamp=2018-08-02T22:02:07.872720Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:02:10 http: TLS handshake error from 10.129.0.1:47230: EOF 2018/08/02 22:02:20 http: TLS handshake error from 10.129.0.1:47238: EOF 2018/08/02 22:02:30 http: TLS handshake error from 10.129.0.1:47248: EOF level=info timestamp=2018-08-02T22:02:37.757368Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:02:40 http: TLS handshake error from 10.129.0.1:47258: EOF 2018/08/02 22:02:50 http: TLS handshake error from 10.129.0.1:47266: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 22:00:52 http: TLS handshake error from 10.129.0.1:44038: EOF 2018/08/02 22:01:02 http: TLS handshake error from 10.129.0.1:44046: EOF level=info timestamp=2018-08-02T22:01:08.002510Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:01:12 http: TLS handshake error from 10.129.0.1:44054: EOF 2018/08/02 22:01:22 http: TLS handshake error from 10.129.0.1:44062: EOF 2018/08/02 22:01:32 http: TLS handshake error from 10.129.0.1:44070: EOF level=info timestamp=2018-08-02T22:01:37.902355Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:01:42 http: TLS handshake error from 10.129.0.1:44078: EOF 2018/08/02 22:01:52 http: TLS handshake error from 10.129.0.1:44086: EOF 2018/08/02 22:02:02 http: TLS handshake error from 10.129.0.1:44094: EOF 2018/08/02 22:02:12 http: TLS handshake error from 10.129.0.1:44102: EOF 2018/08/02 22:02:22 http: TLS handshake error from 10.129.0.1:44112: EOF 2018/08/02 22:02:32 http: TLS handshake error from 10.129.0.1:44120: EOF 2018/08/02 22:02:42 http: TLS handshake error from 10.129.0.1:44130: EOF 2018/08/02 22:02:52 http: TLS handshake error from 10.129.0.1:44138: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T22:00:24.665640Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigqzzp\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmigqzzp, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 62de7d04-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigqzzp" level=info timestamp=2018-08-02T22:00:55.076715Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikl2rr kind= uid=758096b2-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:00:55.077392Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikl2rr kind= uid=758096b2-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:00:55.328742Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" level=info timestamp=2018-08-02T22:00:55.751749Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmikl2rr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 758096b2-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" level=info timestamp=2018-08-02T22:01:25.907802Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitrpll kind= uid=87e8369b-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:01:25.908101Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitrpll kind= uid=87e8369b-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:01:26.012167Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:26.051016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:26.539778Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmitrpll, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 87e8369b-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:56.758256Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv4kt kind= uid=9a459856-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:01:56.758870Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv4kt kind= uid=9a459856-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:02:28.134212Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilrtnp kind= uid=acf6dd71-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:02:28.139568Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilrtnp kind= uid=acf6dd71-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:02:28.441754Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilrtnp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilrtnp" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: subresource-access-testerqs7sn Pod phase: Running • Failure [30.861 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:48 with correct permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:51 should be allowed to access subresource endpoint [It] /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:52 Timed out after 30.000s. Expected : Running to equal : Succeeded /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:118 ------------------------------ • [SLOW TEST:5.040 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:48 Without permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:56 should not be able to access subresource endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:57 ------------------------------ Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running 2018/08/02 22:01:40 http: TLS handshake error from 10.129.0.1:47206: EOF 2018/08/02 22:01:50 http: TLS handshake error from 10.129.0.1:47214: EOF 2018/08/02 22:02:00 http: TLS handshake error from 10.129.0.1:47222: EOF level=info timestamp=2018-08-02T22:02:07.872720Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:02:10 http: TLS handshake error from 10.129.0.1:47230: EOF 2018/08/02 22:02:20 http: TLS handshake error from 10.129.0.1:47238: EOF 2018/08/02 22:02:30 http: TLS handshake error from 10.129.0.1:47248: EOF level=info timestamp=2018-08-02T22:02:37.757368Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:02:40 http: TLS handshake error from 10.129.0.1:47258: EOF 2018/08/02 22:02:50 http: TLS handshake error from 10.129.0.1:47266: EOF 2018/08/02 22:03:00 http: TLS handshake error from 10.129.0.1:47274: EOF level=info timestamp=2018-08-02T22:03:07.729678Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:03:10 http: TLS handshake error from 10.129.0.1:47286: EOF 2018/08/02 22:03:20 http: TLS handshake error from 10.129.0.1:47294: EOF 2018/08/02 22:03:30 http: TLS handshake error from 10.129.0.1:47302: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 22:01:22 http: TLS handshake error from 10.129.0.1:44062: EOF 2018/08/02 22:01:32 http: TLS handshake error from 10.129.0.1:44070: EOF level=info timestamp=2018-08-02T22:01:37.902355Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:01:42 http: TLS handshake error from 10.129.0.1:44078: EOF 2018/08/02 22:01:52 http: TLS handshake error from 10.129.0.1:44086: EOF 2018/08/02 22:02:02 http: TLS handshake error from 10.129.0.1:44094: EOF 2018/08/02 22:02:12 http: TLS handshake error from 10.129.0.1:44102: EOF 2018/08/02 22:02:22 http: TLS handshake error from 10.129.0.1:44112: EOF 2018/08/02 22:02:32 http: TLS handshake error from 10.129.0.1:44120: EOF 2018/08/02 22:02:42 http: TLS handshake error from 10.129.0.1:44130: EOF 2018/08/02 22:02:52 http: TLS handshake error from 10.129.0.1:44138: EOF 2018/08/02 22:03:02 http: TLS handshake error from 10.129.0.1:44146: EOF 2018/08/02 22:03:12 http: TLS handshake error from 10.129.0.1:44158: EOF 2018/08/02 22:03:22 http: TLS handshake error from 10.129.0.1:44166: EOF 2018/08/02 22:03:32 http: TLS handshake error from 10.129.0.1:44174: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T22:00:24.665640Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigqzzp\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmigqzzp, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 62de7d04-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigqzzp" level=info timestamp=2018-08-02T22:00:55.076715Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikl2rr kind= uid=758096b2-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:00:55.077392Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikl2rr kind= uid=758096b2-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:00:55.328742Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" level=info timestamp=2018-08-02T22:00:55.751749Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmikl2rr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 758096b2-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" level=info timestamp=2018-08-02T22:01:25.907802Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitrpll kind= uid=87e8369b-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:01:25.908101Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitrpll kind= uid=87e8369b-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:01:26.012167Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:26.051016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:26.539778Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmitrpll, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 87e8369b-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:56.758256Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv4kt kind= uid=9a459856-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:01:56.758870Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv4kt kind= uid=9a459856-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:02:28.134212Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilrtnp kind= uid=acf6dd71-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:02:28.139568Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilrtnp kind= uid=acf6dd71-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:02:28.441754Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilrtnp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilrtnp" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: subresource-access-testerv64fk Pod phase: Running • Failure [30.916 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization For Version Command /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:63 with authenticated user /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:66 should be allowed to access subresource version endpoint [It] /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:67 Timed out after 30.000s. Expected : Running to equal : Succeeded /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:118 ------------------------------ Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running level=info timestamp=2018-08-02T22:02:07.872720Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:02:10 http: TLS handshake error from 10.129.0.1:47230: EOF 2018/08/02 22:02:20 http: TLS handshake error from 10.129.0.1:47238: EOF 2018/08/02 22:02:30 http: TLS handshake error from 10.129.0.1:47248: EOF level=info timestamp=2018-08-02T22:02:37.757368Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:02:40 http: TLS handshake error from 10.129.0.1:47258: EOF 2018/08/02 22:02:50 http: TLS handshake error from 10.129.0.1:47266: EOF 2018/08/02 22:03:00 http: TLS handshake error from 10.129.0.1:47274: EOF level=info timestamp=2018-08-02T22:03:07.729678Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:03:10 http: TLS handshake error from 10.129.0.1:47286: EOF 2018/08/02 22:03:20 http: TLS handshake error from 10.129.0.1:47294: EOF 2018/08/02 22:03:30 http: TLS handshake error from 10.129.0.1:47302: EOF 2018/08/02 22:03:40 http: TLS handshake error from 10.129.0.1:47312: EOF 2018/08/02 22:03:50 http: TLS handshake error from 10.129.0.1:47320: EOF 2018/08/02 22:04:00 http: TLS handshake error from 10.129.0.1:47328: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running 2018/08/02 22:01:52 http: TLS handshake error from 10.129.0.1:44086: EOF 2018/08/02 22:02:02 http: TLS handshake error from 10.129.0.1:44094: EOF 2018/08/02 22:02:12 http: TLS handshake error from 10.129.0.1:44102: EOF 2018/08/02 22:02:22 http: TLS handshake error from 10.129.0.1:44112: EOF 2018/08/02 22:02:32 http: TLS handshake error from 10.129.0.1:44120: EOF 2018/08/02 22:02:42 http: TLS handshake error from 10.129.0.1:44130: EOF 2018/08/02 22:02:52 http: TLS handshake error from 10.129.0.1:44138: EOF 2018/08/02 22:03:02 http: TLS handshake error from 10.129.0.1:44146: EOF 2018/08/02 22:03:12 http: TLS handshake error from 10.129.0.1:44158: EOF 2018/08/02 22:03:22 http: TLS handshake error from 10.129.0.1:44166: EOF 2018/08/02 22:03:32 http: TLS handshake error from 10.129.0.1:44174: EOF level=info timestamp=2018-08-02T22:03:37.890887Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:03:42 http: TLS handshake error from 10.129.0.1:44184: EOF 2018/08/02 22:03:52 http: TLS handshake error from 10.129.0.1:44192: EOF 2018/08/02 22:04:02 http: TLS handshake error from 10.129.0.1:44200: EOF Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T22:00:24.665640Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigqzzp\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmigqzzp, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 62de7d04-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigqzzp" level=info timestamp=2018-08-02T22:00:55.076715Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikl2rr kind= uid=758096b2-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:00:55.077392Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikl2rr kind= uid=758096b2-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:00:55.328742Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" level=info timestamp=2018-08-02T22:00:55.751749Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmikl2rr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 758096b2-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" level=info timestamp=2018-08-02T22:01:25.907802Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitrpll kind= uid=87e8369b-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:01:25.908101Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitrpll kind= uid=87e8369b-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:01:26.012167Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:26.051016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:26.539778Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmitrpll, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 87e8369b-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:56.758256Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv4kt kind= uid=9a459856-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:01:56.758870Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv4kt kind= uid=9a459856-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:02:28.134212Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilrtnp kind= uid=acf6dd71-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:02:28.139568Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilrtnp kind= uid=acf6dd71-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:02:28.441754Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilrtnp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilrtnp" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: subresource-access-testerx52kw Pod phase: Running • Failure [30.672 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization For Version Command /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:63 Without permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:71 should be able to access subresource version endpoint [It] /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:72 Timed out after 30.000s. Expected : Running to equal : Succeeded /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:118 ------------------------------ • [SLOW TEST:90.690 seconds] Templates /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:42 Launching VMI from VM Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:60 with given Fedora Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:193 should succeed to generate a VM JSON file using oc-process command /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:150 ------------------------------ Pod name: disks-images-provider-hrc9c Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-lrcb2 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-r89xz Pod phase: Running level=info timestamp=2018-08-02T22:04:07.831094Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:04:10 http: TLS handshake error from 10.129.0.1:47336: EOF 2018/08/02 22:04:20 http: TLS handshake error from 10.129.0.1:47344: EOF 2018/08/02 22:04:30 http: TLS handshake error from 10.129.0.1:47352: EOF level=info timestamp=2018-08-02T22:04:37.950355Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:04:40 http: TLS handshake error from 10.129.0.1:47360: EOF 2018/08/02 22:04:50 http: TLS handshake error from 10.129.0.1:47368: EOF 2018/08/02 22:05:00 http: TLS handshake error from 10.129.0.1:47376: EOF 2018/08/02 22:05:10 http: TLS handshake error from 10.129.0.1:47384: EOF 2018/08/02 22:05:20 http: TLS handshake error from 10.129.0.1:47392: EOF 2018/08/02 22:05:30 http: TLS handshake error from 10.129.0.1:47400: EOF level=info timestamp=2018-08-02T22:05:37.900240Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 22:05:40 http: TLS handshake error from 10.129.0.1:47408: EOF 2018/08/02 22:05:50 http: TLS handshake error from 10.129.0.1:47416: EOF 2018/08/02 22:06:00 http: TLS handshake error from 10.129.0.1:47426: EOF Pod name: virt-api-7d79764579-wcv25 Pod phase: Running level=info timestamp=2018-08-02T22:05:37.577969Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T22:05:37.600659Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 22:05:42 http: TLS handshake error from 10.129.0.1:44280: EOF level=info timestamp=2018-08-02T22:05:46.236398Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 22:05:52 http: TLS handshake error from 10.129.0.1:44290: EOF level=info timestamp=2018-08-02T22:05:52.188356Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T22:05:52.211745Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T22:05:52.234537Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T22:05:56.480589Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 22:06:02 http: TLS handshake error from 10.129.0.1:44298: EOF level=info timestamp=2018-08-02T22:06:06.366881Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T22:06:06.422120Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T22:06:06.453657Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T22:06:06.593841Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T22:06:07.753564Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-frjqz Pod phase: Running level=info timestamp=2018-08-02T21:50:21.398063Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 E0802 21:50:23.532515 1 leaderelection.go:224] error retrieving resource lock kube-system/virt-controller: Get https://172.30.0.1:443/api/v1/namespaces/kube-system/endpoints/virt-controller: dial tcp 172.30.0.1:443: connect: no route to host Pod name: virt-controller-7d57d96b65-vpqqb Pod phase: Running level=info timestamp=2018-08-02T22:00:55.328742Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" level=info timestamp=2018-08-02T22:00:55.751749Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikl2rr\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmikl2rr, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 758096b2-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikl2rr" level=info timestamp=2018-08-02T22:01:25.907802Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitrpll kind= uid=87e8369b-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:01:25.908101Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitrpll kind= uid=87e8369b-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:01:26.012167Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:26.051016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:26.539778Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitrpll\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmitrpll, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 87e8369b-969f-11e8-8cb1-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitrpll" level=info timestamp=2018-08-02T22:01:56.758256Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv4kt kind= uid=9a459856-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:01:56.758870Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibv4kt kind= uid=9a459856-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:02:28.134212Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilrtnp kind= uid=acf6dd71-969f-11e8-8cb1-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T22:02:28.139568Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilrtnp kind= uid=acf6dd71-969f-11e8-8cb1-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T22:02:28.441754Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilrtnp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilrtnp" level=info timestamp=2018-08-02T22:06:08.580296Z pos=vm.go:135 component=virt-controller service=http namespace=default name=testvm kind= uid=3052c311-96a0-11e8-8cb1-525500d15501 msg="Started processing VM" level=info timestamp=2018-08-02T22:06:08.581205Z pos=vm.go:186 component=virt-controller service=http namespace=default name=testvm kind= uid=3052c311-96a0-11e8-8cb1-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-08-02T22:06:08.582932Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" Pod name: virt-handler-b8bjc Pod phase: Running level=info timestamp=2018-08-02T21:50:25.375394Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:50:25.375484Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:50:25.375505Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:50:25.375533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:50:25.375594Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="No update processing required" level=info timestamp=2018-08-02T21:50:25.375643Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.301740Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: true\n" level=info timestamp=2018-08-02T21:51:08.302426Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T21:51:08.302513Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.302783Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.303560Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind= uid=e8ca6fa3-969d-11e8-8cb1-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:51:08.423795Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvminbztq, existing: false\n" level=info timestamp=2018-08-02T21:51:08.424016Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:51:08.424287Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:51:08.424689Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvminbztq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-vxhhj Pod phase: Running level=info timestamp=2018-08-02T21:48:30.928940Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-02T21:48:30.929129Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid=58fc775d-969d-11e8-8cb1-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-08-02T21:48:30.929194Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.929223Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-02T21:48:30.929252Z pos=vm.go:331 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-02T21:48:30.930479Z pos=vm.go:358 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-02T21:48:30.930574Z pos=vm.go:410 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-02T21:48:30.930910Z pos=vm.go:585 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmitsspz" level=info timestamp=2018-08-02T21:48:30.933498Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T21:48:30.988023Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-08-02T21:48:30.988214Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-02T21:48:30.988292Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitsspz, existing: false\n" level=info timestamp=2018-08-02T21:48:30.998055Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T21:48:30.998171Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T21:48:30.998358Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitsspz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." • Failure [33.253 seconds] Templates /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:42 Launching VMI from VM Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:60 with given Fedora Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:193 with given VM JSON from the Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:152 should succeed to create a VM using oc-create command [It] /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:156 Expected error: <*exec.ExitError | 0xc420634180>: { ProcessState: { pid: 11644, status: 256, rusage: { Utime: {Sec: 0, Usec: 827427}, Stime: {Sec: 0, Usec: 134394}, Maxrss: 44288, Ixrss: 0, Idrss: 0, Isrss: 0, Minflt: 11280, Majflt: 0, Nswap: 0, Inblock: 0, Oublock: 0, Msgsnd: 0, Msgrcv: 0, Nsignals: 0, Nvcsw: 1641, Nivcsw: 30, }, }, Stderr: nil, } exit status 1 not to have occurred /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:113 ------------------------------ STEP: Generating VM JSON from the Template via oc-process command STEP: Creating VM via oc-create command level=error timestamp=2018-08-02T22:06:08.816145Z pos=utils.go:1448 component=tests output="Error from server (Timeout): Timeout: request did not complete within allowed duration\n" reason="exit status 1" msg="oc command failed: /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl create -f testvm.json," STEP: Deleting the VM via oc-delete command STEP: Checking if the VM does not exist anymore via oc-get command. • [SLOW TEST:32.457 seconds] Templates /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:42 Launching VMI from VM Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:60 with given Fedora Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:193 with given VM JSON from the Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:152 with given VM from the VM JSON /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:158 should succeed to launch a VMI using oc-patch command /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:161 ------------------------------ • [SLOW TEST:5.932 seconds] Templates /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:42 Launching VMI from VM Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:60 with given Fedora Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:193 with given VM JSON from the Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:152 with given VM from the VM JSON /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:158 with given VMI from the VM /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:163 should succeed to terminate the VMI using oc-patch command /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:166 ------------------------------ • [SLOW TEST:38.251 seconds] LeaderElection /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:43 Start a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:53 when the controller pod is not running /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:54 should success /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:55 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.011 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to start a vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:133 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.024 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to stop a running vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:139 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.010 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have correct UUID /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:192 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.013 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have pod IP /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:208 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.016 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to start a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:242 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.022 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to stop a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:250 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ 2018/08/02 18:08:08 read closing down: EOF • [SLOW TEST:41.106 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a cirros image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:67 should return that we are running cirros /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:68 ------------------------------ 2018/08/02 18:08:48 read closing down: EOF • [SLOW TEST:39.991 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a fedora image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:77 should return that we are running fedora /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:78 ------------------------------ 2018/08/02 18:09:20 read closing down: EOF 2018/08/02 18:09:21 read closing down: EOF 2018/08/02 18:09:22 read closing down: EOF 2018/08/02 18:09:22 read closing down: EOF 2018/08/02 18:09:23 read closing down: EOF • [SLOW TEST:35.163 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should be able to reconnect to console multiple times /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:87 ------------------------------ • [SLOW TEST:18.430 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should wait until the virtual machine is in running state and return a stream interface /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:103 ------------------------------ • [SLOW TEST:30.321 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the virtual machine instance to be running /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:111 ------------------------------ • [SLOW TEST:30.657 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the expecter /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:134 ------------------------------ • [SLOW TEST:42.485 seconds] Health Monitoring /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:37 A VirtualMachineInstance with a watchdog device 2018/08/02 18:11:25 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:56 should be shut down when the watchdog expires /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:57 ------------------------------ volumedisk0 compute • [SLOW TEST:37.727 seconds] Configurations 2018/08/02 18:12:03 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with 3 CPU cores /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:56 should report 3 cpu cores under guest OS /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:62 ------------------------------ • ------------------------------ • [SLOW TEST:19.942 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:164 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-2Mi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ S [SKIPPING] [0.572 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:164 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-1Gi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 No node with hugepages hugepages-1Gi capacity /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:216 ------------------------------ •2018/08/02 18:14:07 read closing down: EOF ------------------------------ • [SLOW TEST:100.888 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model defined /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:340 should report defined CPU model /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:341 ------------------------------ 2018/08/02 18:15:54 read closing down: EOF • [SLOW TEST:107.020 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model equals to passthrough /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:368 should report exactly the same model as node CPU /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:369 ------------------------------ • [SLOW TEST:105.226 seconds] 2018/08/02 18:17:39 read closing down: EOF Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model not defined /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:392 should report CPU model from libvirt capabilities /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:393 ------------------------------ • [SLOW TEST:38.820 seconds] 2018/08/02 18:18:18 read closing down: EOF Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 New VirtualMachineInstance with all supported drives /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:413 should have all the device nodes /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:436 ------------------------------ •2018/08/02 18:19:02 read closing down: EOF 2018/08/02 18:19:43 read closing down: EOF 2018/08/02 18:19:46 read closing down: EOF ------------------------------ • [SLOW TEST:87.421 seconds] Slirp /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:39 should be able to /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 VirtualMachineInstance with slirp interface /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ •2018/08/02 18:19:49 read closing down: EOF ------------------------------ • [SLOW TEST:19.992 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should successfully start with hook sidecar annotation /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:60 ------------------------------ • [SLOW TEST:20.352 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should call Collect and OnDefineDomain on the hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:67 ------------------------------ • [SLOW TEST:22.066 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should update domain XML with SM BIOS properties /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:83 ------------------------------ Waiting for namespace kubevirt-test-default to be removed, this can take a while ... Waiting for namespace kubevirt-test-alternative to be removed, this can take a while ... Summarizing 29 Failures: [Fail] Expose Expose service on a VMI replica set [BeforeEach] Expose ClusterIP service Should create a ClusterIP service on VMRS and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:36 [Fail] Storage Starting a VirtualMachineInstance with Alpine PVC should be successfully started and stopped multiple times [It] with CDRom PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:99 [Fail] Networking [BeforeEach] should be able to reach the Inbound VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:135 [Fail] Networking [BeforeEach] should be able to reach the Inbound VirtualMachineInstance with pod network connectivity explicitly set /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:135 [Fail] Networking [BeforeEach] should be able to reach the Inbound VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:136 [Fail] VMIlifecycle Creating a VirtualMachineInstance with boot order should be able to boot from selected disk [It] Alpine as first boot /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:198 [Fail] VMIlifecycle Creating a VirtualMachineInstance with boot order should be able to boot from selected disk [It] Cirros as first boot /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:187 [Fail] VMIlifecycle Creating a VirtualMachineInstance with user-data without k8s secret [It] should retry starting the VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:221 [Fail] VMIlifecycle Creating a VirtualMachineInstance with user-data without k8s secret [It] should log warning and proceed once the secret is there /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:253 [Fail] VMIlifecycle Creating a VirtualMachineInstance when virt-launcher crashes [It] should be stopped and have Failed phase /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:288 [Fail] VMIlifecycle Creating a VirtualMachineInstance when virt-handler crashes [It] should recover and continue management /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:313 [Fail] VMIlifecycle Creating a VirtualMachineInstance when virt-handler is not responsive [BeforeEach] the node controller should react /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:383 [Fail] VMIlifecycle Creating a VirtualMachineInstance with node tainted [It] the vmi with tolerations should be scheduled /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:502 [Fail] VMIlifecycle Creating a VirtualMachineInstance with node tainted [It] the vmi without tolerations should not be scheduled /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:522 [Fail] VMIlifecycle Creating a VirtualMachineInstance with non default namespace should log libvirt start and stop lifecycle events of the domain [It] kubevirt-test-default /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:558 [Fail] VMIlifecycle Creating a VirtualMachineInstance with non default namespace should log libvirt start and stop lifecycle events of the domain [It] kubevirt-test-alternative /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:558 [Fail] VMIlifecycle Creating a VirtualMachineInstance VM Accelerated Mode [It] should request a KVM and TUN device /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:735 [Fail] VMIlifecycle Creating a VirtualMachineInstance VM Accelerated Mode [It] should not enable emulation in virt-launcher /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:770 [Fail] VMIlifecycle Delete a VirtualMachineInstance's Pod [It] should result in the VirtualMachineInstance moving to a finalized state /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:841 [Fail] VMIlifecycle Delete a VirtualMachineInstance with an active pod. [It] should result in pod being terminated /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:875 [Fail] VMIlifecycle Delete a VirtualMachineInstance with ACPI and 0 grace period seconds [It] should result in vmi status failed /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:904 [Fail] VMIlifecycle Delete a VirtualMachineInstance with ACPI and some grace period seconds [It] should result in vmi status succeeded /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:929 [Fail] VMIlifecycle Delete a VirtualMachineInstance with grace period greater than 0 [It] should run graceful shutdown /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:970 [Fail] VMIlifecycle Killed VirtualMachineInstance [It] should be in Failed phase /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:1001 [Fail] VMIlifecycle Killed VirtualMachineInstance [It] should be left alone by virt-handler /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:1028 [Fail] Subresource Api Rbac Authorization with correct permissions [It] should be allowed to access subresource endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:118 [Fail] Subresource Api Rbac Authorization For Version Command with authenticated user [It] should be allowed to access subresource version endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:118 [Fail] Subresource Api Rbac Authorization For Version Command Without permissions [It] should be able to access subresource version endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:118 [Fail] Templates Launching VMI from VM Template with given Fedora Template with given VM JSON from the Template [It] should succeed to create a VM using oc-create command /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:113 Ran 141 of 151 Specs in 4962.788 seconds FAIL! -- 112 Passed | 29 Failed | 0 Pending | 10 Skipped --- FAIL: TestTests (4962.79s) FAIL make: *** [functest] Error 1 + make cluster-down ./cluster/down.sh