+ export WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + [[ openshift-3.10-release =~ openshift-.* ]] + [[ openshift-3.10-release =~ .*-crio-.* ]] + export KUBEVIRT_PROVIDER=os-3.10.0 + KUBEVIRT_PROVIDER=os-3.10.0 + export KUBEVIRT_NUM_NODES=2 + KUBEVIRT_NUM_NODES=2 + export NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + export NAMESPACE=kube-system + NAMESPACE=kube-system + trap '{ make cluster-down; }' EXIT SIGINT SIGTERM SIGSTOP + make cluster-down ./cluster/down.sh + make cluster-up ./cluster/up.sh Downloading ....... Downloading ....... Downloading ....... 2018/07/17 02:57:14 Waiting for host: 192.168.66.102:22 2018/07/17 02:57:17 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/17 02:57:25 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/17 02:57:33 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/17 02:57:41 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/17 02:57:49 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: connection refused. Sleeping 5s 2018/07/17 02:57:54 Connected to tcp://192.168.66.102:22 + systemctl stop origin-node.service + rm -rf /etc/origin/ /etc/etcd/ /var/lib/origin /var/lib/etcd/ ++ docker ps -q + containers= + '[' -n '' ']' ++ docker ps -q -a + containers='380578eb6b01 04838495ca18 aef4dea680cc 46bea3472251 6251077554f3 fe2a0d7ec163 970722bcd3fd 1eb051c1b629 1052914cea3e e039c376e094 84eb0c833d7c 4e08778d9b1d 0a9a2fcf5f77 50aebcf65f7b 9faedfc6ec82 f59bf1b05c0b 2ec313cb7901 5a782498db8c fef39cf40a53 c8ca0d879d8b aabbef87da66 0ac6cd26ae28 d021d99166a4 ed496b2ca295 e69e08bd00a8 0ddad7779bb0 3ca535d6f49f 2fc9538787a8' + '[' -n '380578eb6b01 04838495ca18 aef4dea680cc 46bea3472251 6251077554f3 fe2a0d7ec163 970722bcd3fd 1eb051c1b629 1052914cea3e e039c376e094 84eb0c833d7c 4e08778d9b1d 0a9a2fcf5f77 50aebcf65f7b 9faedfc6ec82 f59bf1b05c0b 2ec313cb7901 5a782498db8c fef39cf40a53 c8ca0d879d8b aabbef87da66 0ac6cd26ae28 d021d99166a4 ed496b2ca295 e69e08bd00a8 0ddad7779bb0 3ca535d6f49f 2fc9538787a8' ']' + docker rm -f 380578eb6b01 04838495ca18 aef4dea680cc 46bea3472251 6251077554f3 fe2a0d7ec163 970722bcd3fd 1eb051c1b629 1052914cea3e e039c376e094 84eb0c833d7c 4e08778d9b1d 0a9a2fcf5f77 50aebcf65f7b 9faedfc6ec82 f59bf1b05c0b 2ec313cb7901 5a782498db8c fef39cf40a53 c8ca0d879d8b aabbef87da66 0ac6cd26ae28 d021d99166a4 ed496b2ca295 e69e08bd00a8 0ddad7779bb0 3ca535d6f49f 2fc9538787a8 380578eb6b01 04838495ca18 aef4dea680cc 46bea3472251 6251077554f3 fe2a0d7ec163 970722bcd3fd 1eb051c1b629 1052914cea3e e039c376e094 84eb0c833d7c 4e08778d9b1d 0a9a2fcf5f77 50aebcf65f7b 9faedfc6ec82 f59bf1b05c0b 2ec313cb7901 5a782498db8c fef39cf40a53 c8ca0d879d8b aabbef87da66 0ac6cd26ae28 d021d99166a4 ed496b2ca295 e69e08bd00a8 0ddad7779bb0 3ca535d6f49f 2fc9538787a8 2018/07/17 02:57:57 Waiting for host: 192.168.66.101:22 2018/07/17 02:58:00 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/17 02:58:08 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/17 02:58:16 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/17 02:58:22 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: connection refused. Sleeping 5s 2018/07/17 02:58:27 Connected to tcp://192.168.66.101:22 + inventory_file=/root/inventory + openshift_ansible=/root/openshift-ansible + echo '[new_nodes]' + sed -i '/\[OSEv3:children\]/a new_nodes' /root/inventory + nodes_found=false ++ seq 2 100 + for i in '$(seq 2 100)' ++ printf node%02d 2 + node=node02 ++ printf 192.168.66.1%02d 2 + node_ip=192.168.66.102 + set +e + ping 192.168.66.102 -c 1 PING 192.168.66.102 (192.168.66.102) 56(84) bytes of data. 64 bytes from 192.168.66.102: icmp_seq=1 ttl=64 time=1.66 ms --- 192.168.66.102 ping statistics --- 1 packets transmitted, 1 received, 0% packet loss, time 0ms rtt min/avg/max/mdev = 1.669/1.669/1.669/0.000 ms Found node02. Adding it to the inventory. + '[' 0 -ne 0 ']' + nodes_found=true + set -e + echo '192.168.66.102 node02' + echo 'Found node02. Adding it to the inventory.' + echo 'node02 openshift_node_group_name="node-config-compute" openshift_schedulable=true openshift_ip=192.168.66.102' + for i in '$(seq 2 100)' ++ printf node%02d 3 + node=node03 ++ printf 192.168.66.1%02d 3 + node_ip=192.168.66.103 + set +e + ping 192.168.66.103 -c 1 PING 192.168.66.103 (192.168.66.103) 56(84) bytes of data. From 192.168.66.101 icmp_seq=1 Destination Host Unreachable --- 192.168.66.103 ping statistics --- 1 packets transmitted, 0 received, +1 errors, 100% packet loss, time 0ms + '[' 1 -ne 0 ']' + break + '[' true = true ']' + ansible-playbook -i /root/inventory /root/openshift-ansible/playbooks/openshift-node/scaleup.yml PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) PLAY [Ensure there are new_nodes] ********************************************** TASK [fail] ******************************************************************** skipping: [localhost] TASK [fail] ******************************************************************** skipping: [localhost] PLAY [Initialization Checkpoint Start] ***************************************** TASK [Set install initialization 'In Progress'] ******************************** ok: [node01] PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) [WARNING]: Could not match supplied host pattern, ignoring: oo_lb_to_config PLAY [Ensure that all non-node hosts are accessible] *************************** TASK [Gathering Facts] ********************************************************* ok: [node01] PLAY [Initialize basic host facts] ********************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml for node01, node02 TASK [openshift_sanitize_inventory : Check for usage of deprecated variables] *** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : debug] ************************************ skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_stats] ******************************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Assign deprecated variables to correct counterparts] *** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml for node01, node02 included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml for node01, node02 TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Standardize on latest variable names] ***** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Normalize openshift_release] ************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Abort when openshift_release is invalid] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml for node01, node02 TASK [openshift_sanitize_inventory : Ensure that openshift_use_dnsmasq is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that openshift_node_dnsmasq_install_network_manager_hook is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* skipping: [node01] => (item=openshift_hosted_etcd_storage_kind) skipping: [node02] => (item=openshift_hosted_etcd_storage_kind) TASK [openshift_sanitize_inventory : Ensure that dynamic provisioning is set if using dynamic storage] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure clusterid is set along with the cloudprovider] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure ansible_service_broker_remove and ansible_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure template_service_broker_remove and template_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that all requires vsphere configuration variables are set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : ensure provider configuration variables are defined] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure removed web console extension variables are not set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that web console port matches API server port] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : At least one master is schedulable] ******* skipping: [node01] skipping: [node02] TASK [Detecting Operating System from ostree_booted] *************************** ok: [node01] ok: [node02] TASK [set openshift_deployment_type if unset] ********************************** skipping: [node01] skipping: [node02] TASK [check for node already bootstrapped] ************************************* ok: [node02] ok: [node01] TASK [initialize_facts set fact openshift_is_bootstrapped] ********************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_atomic and openshift_is_containerized] *** ok: [node01] ok: [node02] TASK [Determine Atomic Host Docker Version] ************************************ skipping: [node01] skipping: [node02] TASK [assert atomic host docker version is 1.12 or later] ********************** skipping: [node01] skipping: [node02] PLAY [Retrieve existing master configs and validate] *************************** TASK [openshift_control_plane : stat] ****************************************** ok: [node01] TASK [openshift_control_plane : slurp] ***************************************** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [openshift_control_plane : Check for file paths outside of /etc/origin/master in master's config] *** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Initialize special first-master variables] ******************************* TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Disable web console if required] ***************************************** TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Setup yum repositories for all hosts] ************************************ TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Install Red Hat Subscription manager] ******************* skipping: [node02] TASK [rhel_subscribe : Is host already registered?] **************************** skipping: [node02] TASK [rhel_subscribe : Register host] ****************************************** skipping: [node02] TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Determine if OpenShift Pool Already Attached] *********** skipping: [node02] TASK [rhel_subscribe : Attach to OpenShift Pool] ******************************* skipping: [node02] TASK [rhel_subscribe : Satellite preparation] ********************************** skipping: [node02] TASK [openshift_repos : openshift_repos detect ostree] ************************* ok: [node02] TASK [openshift_repos : Ensure libselinux-python is installed] ***************** ok: [node02] TASK [openshift_repos : Remove openshift_additional.repo file] ***************** ok: [node02] TASK [openshift_repos : Create any additional repos that are defined] ********** TASK [openshift_repos : include_tasks] ***************************************** skipping: [node02] TASK [openshift_repos : include_tasks] ***************************************** included: /root/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml for node02 TASK [openshift_repos : Configure origin gpg keys] ***************************** ok: [node02] TASK [openshift_repos : Configure correct origin release repository] *********** ok: [node02] => (item=/root/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin.repo.j2) TASK [openshift_repos : Ensure clean repo cache in the event repos have been changed manually] *** changed: [node02] => { "msg": "First run of openshift_repos" } TASK [openshift_repos : Record that openshift_repos already ran] *************** ok: [node02] RUNNING HANDLER [openshift_repos : refresh cache] ****************************** changed: [node02] PLAY [Install packages necessary for installer] ******************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Determine if chrony is installed] **************************************** changed: [node02] [WARNING]: Consider using the yum, dnf or zypper module rather than running rpm. If you need to use command because yum, dnf or zypper is insufficient you can add warn=False to this command task or set command_warnings=False in ansible.cfg to get rid of this message. TASK [Install ntp package] ***************************************************** skipping: [node02] TASK [Start and enable ntpd/chronyd] ******************************************* changed: [node02] TASK [Ensure openshift-ansible installer package deps are installed] *********** ok: [node02] => (item=iproute) ok: [node02] => (item=dbus-python) ok: [node02] => (item=PyYAML) ok: [node02] => (item=python-ipaddress) ok: [node02] => (item=libsemanage-python) ok: [node02] => (item=yum-utils) ok: [node02] => (item=python-docker) PLAY [Initialize cluster facts] ************************************************ TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [get openshift_current_version] ******************************************* ok: [node02] ok: [node01] TASK [set_fact openshift_portal_net if present on masters] ********************* ok: [node02] ok: [node01] TASK [Gather Cluster facts] **************************************************** changed: [node02] changed: [node01] TASK [Set fact of no_proxy_internal_hostnames] ********************************* skipping: [node01] skipping: [node02] TASK [Initialize openshift.node.sdn_mtu] *************************************** changed: [node02] ok: [node01] PLAY [Initialize etcd host variables] ****************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role] ************************************************************ TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** ok: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** ok: [node01] => { "msg": "openshift_pkg_version was not defined. Falling back to -3.10.0" } TASK [openshift_version : set_fact] ******************************************** ok: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Verify Requirements] ***************************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [Run variable sanity checks] ********************************************** ok: [node01] TASK [Validate openshift_node_groups and openshift_node_group_name] ************ ok: [node01] PLAY [Initialization Checkpoint End] ******************************************* TASK [Set install initialization 'Complete'] *********************************** ok: [node01] PLAY [Validate node hostnames] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Query DNS for IP address of node02] ************************************** ok: [node02] TASK [Validate openshift_hostname when defined] ******************************** skipping: [node02] TASK [Validate openshift_ip exists on node when defined] *********************** skipping: [node02] PLAY [Configure os_firewall] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [os_firewall : Detecting Atomic Host Operating System] ******************** ok: [node02] TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** ok: [node02] TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** skipping: [node02] TASK [os_firewall : Install firewalld packages] ******************************** skipping: [node02] TASK [os_firewall : Ensure iptables services are not enabled] ****************** skipping: [node02] => (item=iptables) skipping: [node02] => (item=ip6tables) TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** skipping: [node02] TASK [os_firewall : Start and enable firewalld service] ************************ skipping: [node02] TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** skipping: [node02] TASK [os_firewall : Restart polkitd] ******************************************* skipping: [node02] TASK [os_firewall : Wait for polkit action to have been created] *************** skipping: [node02] TASK [os_firewall : Ensure firewalld service is not enabled] ******************* ok: [node02] TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** skipping: [node02] TASK [os_firewall : Install iptables packages] ********************************* ok: [node02] => (item=iptables) ok: [node02] => (item=iptables-services) TASK [os_firewall : Start and enable iptables service] ************************* ok: [node02 -> node02] => (item=node02) TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** skipping: [node02] PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [container_runtime : Setup the docker-storage for overlay] **************** skipping: [node02] TASK [container_runtime : Create file system on extra volume device] *********** TASK [container_runtime : Create mount entry for extra volume] ***************** PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** ok: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* ok: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Add http_proxy to /etc/atomic.conf] ****************** skipping: [node02] TASK [container_runtime : Add https_proxy to /etc/atomic.conf] ***************** skipping: [node02] TASK [container_runtime : Add no_proxy to /etc/atomic.conf] ******************** skipping: [node02] TASK [container_runtime : Get current installed Docker version] **************** ok: [node02] TASK [container_runtime : Error out if Docker pre-installed but too old] ******* skipping: [node02] TASK [container_runtime : Error out if requested Docker is too old] ************ skipping: [node02] TASK [container_runtime : Install Docker] ************************************** skipping: [node02] TASK [container_runtime : Ensure docker.service.d directory exists] ************ ok: [node02] TASK [container_runtime : Configure Docker service unit file] ****************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Set registry params] ********************************* skipping: [node02] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) TASK [container_runtime : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** skipping: [node02] TASK [container_runtime : Set Proxy Settings] ********************************** skipping: [node02] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) TASK [container_runtime : Set various Docker options] ************************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Configure Docker Network OPTIONS] ******************** ok: [node02] TASK [container_runtime : Detect if docker is already started] ***************** ok: [node02] TASK [container_runtime : Start the Docker service] **************************** ok: [node02] TASK [container_runtime : set_fact] ******************************************** ok: [node02] TASK [container_runtime : Check for docker_storage_path/overlay2] ************** ok: [node02] TASK [container_runtime : Fixup SELinux permissions for docker] **************** changed: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* ok: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** ok: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** ok: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] TASK [container_runtime : Fail if Atomic Host since this is an rpm request] **** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Check that overlay is in the kernel] ***************** skipping: [node02] TASK [container_runtime : Add overlay to modprobe.d] *************************** skipping: [node02] TASK [container_runtime : Manually modprobe overlay into the kernel] *********** skipping: [node02] TASK [container_runtime : Enable and start systemd-modules-load] *************** skipping: [node02] TASK [container_runtime : Install cri-o] *************************************** skipping: [node02] TASK [container_runtime : Remove CRI-O default configuration files] ************ skipping: [node02] => (item=/etc/cni/net.d/200-loopback.conf) skipping: [node02] => (item=/etc/cni/net.d/100-crio-bridge.conf) TASK [container_runtime : Create the CRI-O configuration] ********************** skipping: [node02] TASK [container_runtime : Ensure CNI configuration directory exists] *********** skipping: [node02] TASK [container_runtime : Add iptables allow rules] **************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove iptables rules] ******************************* TASK [container_runtime : Add firewalld allow rules] *************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove firewalld allow rules] ************************ TASK [container_runtime : Configure the CNI network] *************************** skipping: [node02] TASK [container_runtime : Create /etc/sysconfig/crio-network] ****************** skipping: [node02] TASK [container_runtime : Start the CRI-O service] ***************************** skipping: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* skipping: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** skipping: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** skipping: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role] ************************************************************ TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** skipping: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Node Preparation Checkpoint Start] *************************************** TASK [Set Node preparation 'In Progress'] ************************************** ok: [node01] PLAY [Only target nodes that have not yet been bootstrapped] ******************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [add_host] **************************************************************** skipping: [localhost] => (item=node02) ok: [localhost] => (item=node01) PLAY [Disable excluders] ******************************************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for node02 TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-docker-excluder version detected] ************ skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-docker-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-excluder version detected] ******************* skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** skipping: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] PLAY [Configure nodes] ********************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_cloud_provider : Set cloud provider facts] ********************* skipping: [node02] TASK [openshift_cloud_provider : Create cloudprovider config dir] ************** skipping: [node02] TASK [openshift_cloud_provider : include the defined cloud provider files] ***** skipping: [node02] TASK [openshift_node : fail] *************************************************** skipping: [node02] TASK [openshift_node : Check for NetworkManager service] *********************** ok: [node02] TASK [openshift_node : Set fact using_network_manager] ************************* ok: [node02] TASK [openshift_node : Install dnsmasq] **************************************** ok: [node02] TASK [openshift_node : ensure origin/node directory exists] ******************** changed: [node02] => (item=/etc/origin) changed: [node02] => (item=/etc/origin/node) TASK [openshift_node : Install NetworkManager during node_bootstrap provisioning] *** skipping: [node02] TASK [openshift_node : Install network manager dispatch script] **************** skipping: [node02] TASK [openshift_node : Install dnsmasq configuration] ************************** ok: [node02] TASK [openshift_node : Deploy additional dnsmasq.conf] ************************* skipping: [node02] TASK [openshift_node : Enable dnsmasq] ***************************************** ok: [node02] TASK [openshift_node : Install network manager dispatch script] **************** ok: [node02] TASK [openshift_node : Add iptables allow rules] ******************************* ok: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) ok: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) ok: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) ok: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) ok: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove iptables rules] ********************************** TASK [openshift_node : Add firewalld allow rules] ****************************** skipping: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) skipping: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) skipping: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) skipping: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) skipping: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove firewalld allow rules] *************************** TASK [openshift_node : Checking for journald.conf] ***************************** ok: [node02] TASK [openshift_node : Create journald persistence directories] **************** ok: [node02] TASK [openshift_node : Update journald setup] ********************************** ok: [node02] => (item={u'var': u'Storage', u'val': u'persistent'}) ok: [node02] => (item={u'var': u'Compress', u'val': True}) ok: [node02] => (item={u'var': u'SyncIntervalSec', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitInterval', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitBurst', u'val': 10000}) ok: [node02] => (item={u'var': u'SystemMaxUse', u'val': u'8G'}) ok: [node02] => (item={u'var': u'SystemKeepFree', u'val': u'20%'}) ok: [node02] => (item={u'var': u'SystemMaxFileSize', u'val': u'10M'}) ok: [node02] => (item={u'var': u'MaxRetentionSec', u'val': u'1month'}) ok: [node02] => (item={u'var': u'MaxFileSec', u'val': u'1day'}) ok: [node02] => (item={u'var': u'ForwardToSyslog', u'val': False}) ok: [node02] => (item={u'var': u'ForwardToWall', u'val': False}) TASK [openshift_node : Restart journald] *************************************** skipping: [node02] TASK [openshift_node : Disable swap] ******************************************* ok: [node02] TASK [openshift_node : Install node, clients, and conntrack packages] ********** ok: [node02] => (item={u'name': u'origin-node-3.10.0*'}) ok: [node02] => (item={u'name': u'origin-clients-3.10.0*'}) ok: [node02] => (item={u'name': u'conntrack-tools'}) TASK [openshift_node : Restart cri-o] ****************************************** skipping: [node02] TASK [openshift_node : restart NetworkManager to ensure resolv.conf is present] *** changed: [node02] TASK [openshift_node : sysctl] ************************************************* ok: [node02] TASK [openshift_node : Check for credentials file for registry auth] *********** skipping: [node02] TASK [openshift_node : Create credentials for registry auth] ******************* skipping: [node02] TASK [openshift_node : Create credentials for registry auth (alternative)] ***** skipping: [node02] TASK [openshift_node : Setup ro mount of /root/.docker for containerized hosts] *** skipping: [node02] TASK [openshift_node : Check that node image is present] *********************** changed: [node02] TASK [openshift_node : Pre-pull node image] ************************************ skipping: [node02] TASK [openshift_node : Copy node script to the node] *************************** ok: [node02] TASK [openshift_node : Install Node service file] ****************************** ok: [node02] TASK [openshift_node : Ensure old system path is set] ************************** skipping: [node02] => (item=/etc/origin/openvswitch) skipping: [node02] => (item=/var/lib/kubelet) skipping: [node02] => (item=/opt/cni/bin) TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Copy node container image to ostree storage] ************ skipping: [node02] TASK [openshift_node : Install or Update node system container] **************** skipping: [node02] TASK [openshift_node : Restart network manager to ensure networking configuration is in place] *** skipping: [node02] TASK [openshift_node : Configure Node settings] ******************************** ok: [node02] => (item={u'regex': u'^OPTIONS=', u'line': u'OPTIONS='}) ok: [node02] => (item={u'regex': u'^DEBUG_LOGLEVEL=', u'line': u'DEBUG_LOGLEVEL=2'}) ok: [node02] => (item={u'regex': u'^IMAGE_VERSION=', u'line': u'IMAGE_VERSION=v3.10.0-rc.0'}) TASK [openshift_node : Configure Proxy Settings] ******************************* skipping: [node02] => (item={u'regex': u'^HTTP_PROXY=', u'line': u'HTTP_PROXY='}) skipping: [node02] => (item={u'regex': u'^HTTPS_PROXY=', u'line': u'HTTPS_PROXY='}) skipping: [node02] => (item={u'regex': u'^NO_PROXY=', u'line': u'NO_PROXY=[],172.30.0.0/16,10.128.0.0/14'}) TASK [openshift_node : file] *************************************************** skipping: [node02] TASK [openshift_node : Create the Node config] ********************************* changed: [node02] TASK [openshift_node : Configure Node Environment Variables] ******************* TASK [openshift_node : Ensure the node static pod directory exists] ************ changed: [node02] TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Install NFS storage plugin dependencies] **************** ok: [node02] TASK [openshift_node : Check for existence of nfs sebooleans] ****************** ok: [node02] => (item=virt_use_nfs) ok: [node02] => (item=virt_sandbox_use_nfs) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-17 03:06:14.913448', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.013580', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-17 03:06:14.899868', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-17 03:06:16.155474', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.011147', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-17 03:06:16.144327', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-17 03:06:14.913448', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.013580', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-17 03:06:14.899868', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-17 03:06:16.155474', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.011147', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-17 03:06:16.144327', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install GlusterFS storage plugin dependencies] ********** ok: [node02] TASK [openshift_node : Check for existence of fusefs sebooleans] *************** ok: [node02] => (item=virt_use_fusefs) ok: [node02] => (item=virt_sandbox_use_fusefs) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-17 03:06:24.129246', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.009081', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-17 03:06:24.120165', '_ansible_ignore_errors': None, 'failed': False}) ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-17 03:06:25.616609', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.008387', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-17 03:06:25.608222', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-17 03:06:24.129246', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.009081', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-17 03:06:24.120165', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-17 03:06:25.616609', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.008387', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-17 03:06:25.608222', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install Ceph storage plugin dependencies] *************** ok: [node02] TASK [openshift_node : Install iSCSI storage plugin dependencies] ************** ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=device-mapper-multipath) TASK [openshift_node : restart services] *************************************** ok: [node02] => (item=multipathd) ok: [node02] => (item=rpcbind) ok: [node02] => (item=iscsid) TASK [openshift_node : Template multipath configuration] *********************** changed: [node02] TASK [openshift_node : Enable and start multipath] ***************************** changed: [node02] TASK [tuned : Check for tuned package] ***************************************** ok: [node02] TASK [tuned : Set tuned OpenShift variables] *********************************** ok: [node02] TASK [tuned : Ensure directory structure exists] ******************************* ok: [node02] => (item={'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'state': 'directory', 'ctime': 1529614575.1009853, 'serole': 'object_r', 'gid': 0, 'mode': '0755', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'state': 'directory', 'ctime': 1529614575.1009853, 'serole': 'object_r', 'gid': 0, 'mode': '0755', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'state': 'directory', 'ctime': 1529614575.1009853, 'serole': 'object_r', 'gid': 0, 'mode': '0755', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Ensure files are populated from templates] *********************** skipping: [node02] => (item={'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'state': 'directory', 'ctime': 1529614575.1009853, 'serole': 'object_r', 'gid': 0, 'mode': '0755', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'state': 'directory', 'ctime': 1529614575.1009853, 'serole': 'object_r', 'gid': 0, 'mode': '0755', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'state': 'directory', 'ctime': 1529614575.1009853, 'serole': 'object_r', 'gid': 0, 'mode': '0755', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Make tuned use the recommended tuned profile on restart] ********* changed: [node02] => (item=/etc/tuned/active_profile) changed: [node02] => (item=/etc/tuned/profile_mode) TASK [tuned : Restart tuned service] ******************************************* changed: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] ******* ok: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] *** PLAY [node bootstrap config] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_node : install needed rpm(s)] ********************************** ok: [node02] => (item=origin-node) ok: [node02] => (item=origin-docker-excluder) ok: [node02] => (item=ansible) ok: [node02] => (item=bash-completion) ok: [node02] => (item=docker) ok: [node02] => (item=haproxy) ok: [node02] => (item=dnsmasq) ok: [node02] => (item=ntp) ok: [node02] => (item=logrotate) ok: [node02] => (item=httpd-tools) ok: [node02] => (item=bind-utils) ok: [node02] => (item=firewalld) ok: [node02] => (item=libselinux-python) ok: [node02] => (item=conntrack-tools) ok: [node02] => (item=openssl) ok: [node02] => (item=iproute) ok: [node02] => (item=python-dbus) ok: [node02] => (item=PyYAML) ok: [node02] => (item=yum-utils) ok: [node02] => (item=glusterfs-fuse) ok: [node02] => (item=device-mapper-multipath) ok: [node02] => (item=nfs-utils) ok: [node02] => (item=cockpit-ws) ok: [node02] => (item=cockpit-system) ok: [node02] => (item=cockpit-bridge) ok: [node02] => (item=cockpit-docker) ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=ceph-common) TASK [openshift_node : create the directory for node] ************************** skipping: [node02] TASK [openshift_node : laydown systemd override] ******************************* skipping: [node02] TASK [openshift_node : update the sysconfig to have necessary variables] ******* ok: [node02] => (item={u'regexp': u'^KUBECONFIG=.*', u'line': u'KUBECONFIG=/etc/origin/node/bootstrap.kubeconfig'}) TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : disable origin-node service] **************************** changed: [node02] => (item=origin-node.service) TASK [openshift_node : Check for RPM generated config marker file .config_managed] *** ok: [node02] TASK [openshift_node : create directories for bootstrapping] ******************* ok: [node02] => (item=/root/openshift_bootstrap) changed: [node02] => (item=/var/lib/origin/openshift.local.config) changed: [node02] => (item=/var/lib/origin/openshift.local.config/node) ok: [node02] => (item=/etc/docker/certs.d/docker-registry.default.svc:5000) TASK [openshift_node : laydown the bootstrap.yml file for on boot configuration] *** ok: [node02] TASK [openshift_node : Create a symlink to the node client CA for the docker registry] *** ok: [node02] TASK [openshift_node : Remove RPM generated config files if present] *********** skipping: [node02] => (item=master) skipping: [node02] => (item=.config_managed) TASK [openshift_node : find all files in /etc/origin/node so we can remove them] *** skipping: [node02] TASK [openshift_node : Remove everything except the resolv.conf required for node] *** skipping: [node02] TASK [openshift_node_group : create node config template] ********************** changed: [node02] TASK [openshift_node_group : remove existing node config] ********************** changed: [node02] TASK [openshift_node_group : Ensure required directories are present] ********** ok: [node02] => (item=/etc/origin/node/pods) changed: [node02] => (item=/etc/origin/node/certificates) TASK [openshift_node_group : Update the sysconfig to group "node-config-compute"] *** changed: [node02] TASK [set_fact] **************************************************************** ok: [node02] PLAY [Re-enable excluder if it was previously enabled] ************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/enable.yml for node02 TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] PLAY [Node Preparation Checkpoint End] ***************************************** TASK [Set Node preparation 'Complete'] ***************************************** ok: [node01] PLAY [Distribute bootstrap and start nodes] ************************************ TASK [openshift_node : Gather node information] ******************************** changed: [node02] ok: [node01] TASK [openshift_node : Copy master bootstrap config locally] ******************* ok: [node02] TASK [openshift_node : Distribute bootstrap kubeconfig if one does not exist] *** ok: [node01] changed: [node02] TASK [openshift_node : Start and enable node for bootstrapping] **************** changed: [node01] changed: [node02] TASK [openshift_node : Get node logs] ****************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : debug] ************************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : fail] *************************************************** skipping: [node02] skipping: [node01] PLAY [Approve any pending CSR requests from inventory nodes] ******************* TASK [Dump all candidate bootstrap hostnames] ********************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Find all hostnames for bootstrapping] ************************************ ok: [node01] TASK [Dump the bootstrap hostnames] ******************************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Approve bootstrap nodes] ************************************************* changed: [node01] TASK [Get CSRs] **************************************************************** skipping: [node01] TASK [Report approval errors] ************************************************** skipping: [node01] PLAY [Ensure any inventory labels are applied to the nodes] ******************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_manage_node : Wait for master API to become available before proceeding] *** skipping: [node02] TASK [openshift_manage_node : Wait for Node Registration] ********************** ok: [node01 -> node01] ok: [node02 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/config.yml for node02, node01 TASK [openshift_manage_node : Set node schedulability] ************************* ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/set_default_node_role.yml for node02, node01 TASK [openshift_manage_node : Retrieve nodes that are marked with the infra selector or the legacy infra selector] *** ok: [node02 -> node01] TASK [openshift_manage_node : Label infra or legacy infra nodes with the new role label] *** TASK [openshift_manage_node : Retrieve non-infra, non-master nodes that are not yet labeled compute] *** ok: [node02 -> node01] TASK [openshift_manage_node : label non-master non-infra nodes compute] ******** TASK [openshift_manage_node : Label all-in-one master as a compute node] ******* skipping: [node02] PLAY RECAP ********************************************************************* localhost : ok=30 changed=0 unreachable=0 failed=0 node01 : ok=71 changed=3 unreachable=0 failed=0 node02 : ok=155 changed=33 unreachable=0 failed=0 INSTALLER STATUS *************************************************************** Initialization : Complete (0:03:36) Node Preparation : Complete (0:04:52) Sending file modes: C0755 110489328 oc Sending file modes: C0600 5645 admin.kubeconfig Cluster "node01:8443" set. Cluster "node01:8443" set. + set +e + kubectl get nodes --no-headers + cluster/kubectl.sh get nodes --no-headers node01 Ready compute,infra,master 25d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + kubectl_rc=0 + '[' 0 -ne 0 ']' ++ kubectl get nodes --no-headers ++ cluster/kubectl.sh get nodes --no-headers ++ grep NotReady + '[' -n '' ']' + set -e + echo 'Nodes are ready:' Nodes are ready: + kubectl get nodes + cluster/kubectl.sh get nodes NAME STATUS ROLES AGE VERSION node01 Ready compute,infra,master 25d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + make cluster-sync ./cluster/build.sh Building ... Untagged: localhost:32889/kubevirt/virt-controller:devel Untagged: localhost:32889/kubevirt/virt-controller@sha256:39ca7ffc97ca4b845db91b568ddf53f68242467fddd805cd0a975b0c371be673 Deleted: sha256:ce91c0342b4d31d1a927a6b8dcc8ed11a6707f9e194f8afb1c165272bf01d8fc Deleted: sha256:bcb74ccf96a8e58de43ac63bb0b5e5e43137c366c29451fde3fd782a3004f2f5 Deleted: sha256:d582a12dd511c1f88bc9886bd76893405284f9604cc1ce68fff97dfbc42620ed Deleted: sha256:a8f4594d34720e8e2bba640517154016408987671b0a383853190cac2b011c20 Untagged: localhost:32889/kubevirt/virt-launcher:devel Untagged: localhost:32889/kubevirt/virt-launcher@sha256:622da327e22405d282a958fe47de51e536bd93400697e14113a717b8cb3774c5 Deleted: sha256:f8a1032638c57040104237574797379c014b68abe0d3ed10ba62a672bd2afca8 Deleted: sha256:e0d89b6ae3b7e5beb50515a838e34056fdf31cd5e319b357e18996599bb93009 Deleted: sha256:36f144cee9ff5e789e0a3811c84d26ff1ababcfdeceebe03b3f70514f681381b Deleted: sha256:829a41486e5f7e39ffbadbb5bac7e15ed33cfd0321db942e5618a751ff727771 Deleted: sha256:3c5e1e15e7447602f7c48ae49424574e4bbaa676258f0106ff08f8fec0b4fbaf Deleted: sha256:c70d5315cef0df781caa80ca016961826fdc7735c7d44d9384c5f5e257da1e7f Deleted: sha256:8789744cea11c8e02462230a3ec0c0a0a19e52251aeaf77581e130b9e22411a2 Deleted: sha256:f16b3781eebe9b00e495642799e74d58f011fdb7e392f5d453a4d675fccc47bc Deleted: sha256:75b9c274c6c137cb6bf989b7823fbfc18da1eac5f071f33358ba3f75a42b0e48 Deleted: sha256:09d593ddb8642795e5170019b2a90831ff8c5477d63df2c80626c8d34a3deb97 Deleted: sha256:896e179cb5f2ef0a9791f4a966f58667557cd813224c17e12484b87cd3b38de8 Deleted: sha256:10ef371bdabd25d0b76a8665709f5e7891822965bbf428d1d73e7af9d4168400 Untagged: localhost:32889/kubevirt/virt-handler:devel Untagged: localhost:32889/kubevirt/virt-handler@sha256:4babb74203769a5350cf1f3284ee3c9c8438169e822c41f125ee1bf4136f0187 Deleted: sha256:68945ca6070c127e07a9a985eb3051641c8f603909e2bd9b5b6bd72076a55edf Deleted: sha256:75759e2853272e891639f22d2e54781783196635304dc471b38faf5591943145 Deleted: sha256:f8792cffa6d51a9090adc803aa37a1ab2ba148137d9d558d19d6fd3c74f182d8 Deleted: sha256:0c886da2d9905965f03d693b477d6c59b6e365d6d4fb77fa907a6389d31732a8 Untagged: localhost:32889/kubevirt/virt-api:devel Untagged: localhost:32889/kubevirt/virt-api@sha256:a22d568d13060befa18f421ac40db1e628b99a813f2f88dae0f8b4575a7e057a Deleted: sha256:3f0b07039fcc5c6238d9649fb95f153ada9ee852838df8d338a1a79f9e8cc25c Deleted: sha256:9b4db48ec02c7fe903280543296ffca2ef665672fa2adb7114ddbc8a1eb235ed Deleted: sha256:3c79063f87415d2f4b372695b92fb1f81217d6dd19b942f5f9a8ef33de352daf Deleted: sha256:737dd32ce378f750b0b6568a4c41ac4944b62ebd69e0f4404e740a0faa659bc3 Untagged: localhost:32889/kubevirt/subresource-access-test:devel Untagged: localhost:32889/kubevirt/subresource-access-test@sha256:58fbb6b7ed8978018fcc27a53b1a3377d71cfeafb33a94b13dd0985e92993d74 Deleted: sha256:0ca13cc3423a03e645fdce575378ba8a3b3e4f7a784c98396ea12ca412f1b107 Deleted: sha256:91e33afaa8ba620f64c7be94811ee9960c4856c54178f8b02578ce77d703eb5b Deleted: sha256:6a4382a0f9041fcb11d45be6ded72c3dabb94014a62ae50fb41841fdea9ef2b2 Deleted: sha256:f531bd017ee2e59ea79ac63db8f266c9ac9e5f86dee239baca490e369427f6a9 Untagged: localhost:32889/kubevirt/example-hook-sidecar:devel Untagged: localhost:32889/kubevirt/example-hook-sidecar@sha256:4830bd0788ce76c8ddc6f1e3bc38cfbc6b9534f0cce2a9b59d182e91d8c11a1f Deleted: sha256:24bf19720b458f3d49193ab24b49765e7d04697f5f0d27ca89e8898111dcb149 Deleted: sha256:6520c143e14ed138f97b8aa7f4e9cd253a3c071add871ac7808fda108e5321a5 Deleted: sha256:f6b3fcd20112f20fefef985e3db7e665fb3efc328a042248bef4da486c55781a Deleted: sha256:5c8941e8f91dbec67237523ace18b470c2b48babd5288ebe2ddf486007c4aa1d sha256:7d0ff7c8c318d908872cede18cbe0463a1f4e1df0e2ad9fe22f899e0e0bf4104 go version go1.10 linux/amd64 go version go1.10 linux/amd64 make[1]: Entering directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' hack/dockerized "./hack/check.sh && KUBEVIRT_VERSION= ./hack/build-go.sh install " && ./hack/build-copy-artifacts.sh sha256:7d0ff7c8c318d908872cede18cbe0463a1f4e1df0e2ad9fe22f899e0e0bf4104 go version go1.10 linux/amd64 go version go1.10 linux/amd64 find: '/root/go/src/kubevirt.io/kubevirt/_out/cmd': No such file or directory Compiling tests... compiled tests.test hack/build-docker.sh build Sending build context to Docker daemon 38.1 MB Step 1/8 : FROM fedora:27 ---> 9110ae7f579f Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-controller ---> Using cache ---> 0241ca8295a7 Step 4/8 : WORKDIR /home/virt-controller ---> Using cache ---> 348afd6e30e7 Step 5/8 : USER 1001 ---> Using cache ---> a56bbefef027 Step 6/8 : COPY virt-controller /usr/bin/virt-controller ---> 545d2692fe52 Removing intermediate container ea671f748258 Step 7/8 : ENTRYPOINT /usr/bin/virt-controller ---> Running in 94f0d14a2921 ---> 6676f3d0b4aa Removing intermediate container 94f0d14a2921 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-controller" '' ---> Running in 1c5dbdd46a00 ---> afe5e222d269 Removing intermediate container 1c5dbdd46a00 Successfully built afe5e222d269 Sending build context to Docker daemon 40.43 MB Step 1/10 : FROM kubevirt/libvirt:3.7.0 ---> c4e262d2dc3c Step 2/10 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 52c17d330685 Step 3/10 : RUN dnf -y install socat genisoimage util-linux libcgroup-tools ethtool net-tools sudo && dnf -y clean all && test $(id -u qemu) = 107 # make sure that the qemu user really is 107 ---> Using cache ---> c09056350983 Step 4/10 : COPY virt-launcher /usr/bin/virt-launcher ---> ba923b8c6314 Removing intermediate container 6c61152775f8 Step 5/10 : COPY kubevirt-sudo /etc/sudoers.d/kubevirt ---> 9a614b48d124 Removing intermediate container 6aed9bad70d8 Step 6/10 : RUN setcap CAP_NET_BIND_SERVICE=+eip /usr/bin/qemu-system-x86_64 ---> Running in c6ee8b7532f4  ---> 4cc896dd6d49 Removing intermediate container c6ee8b7532f4 Step 7/10 : RUN mkdir -p /usr/share/kubevirt/virt-launcher ---> Running in 96631ad73a1b  ---> a2d0455b155b Removing intermediate container 96631ad73a1b Step 8/10 : COPY entrypoint.sh libvirtd.sh sock-connector /usr/share/kubevirt/virt-launcher/ ---> 8a6e07bbf54d Removing intermediate container 469545329d46 Step 9/10 : ENTRYPOINT /usr/share/kubevirt/virt-launcher/entrypoint.sh ---> Running in 58fd59c302bd ---> 2b221806aae5 Removing intermediate container 58fd59c302bd Step 10/10 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-launcher" '' ---> Running in e586e573a804 ---> a201043790c2 Removing intermediate container e586e573a804 Successfully built a201043790c2 Sending build context to Docker daemon 39.56 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/5 : COPY virt-handler /usr/bin/virt-handler ---> b24788147d28 Removing intermediate container e9f98844c8e6 Step 4/5 : ENTRYPOINT /usr/bin/virt-handler ---> Running in ac8a2f66bd59 ---> 56064a69d507 Removing intermediate container ac8a2f66bd59 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-handler" '' ---> Running in 20bc98239460 ---> a972cb26ea52 Removing intermediate container 20bc98239460 Successfully built a972cb26ea52 Sending build context to Docker daemon 37.02 MB Step 1/8 : FROM fedora:27 ---> 9110ae7f579f Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-api ---> Using cache ---> 5ad04faf42c8 Step 4/8 : WORKDIR /home/virt-api ---> Using cache ---> 686855fdf261 Step 5/8 : USER 1001 ---> Using cache ---> c633d341fb6f Step 6/8 : COPY virt-api /usr/bin/virt-api ---> 203ef11d4277 Removing intermediate container 84bb112b990a Step 7/8 : ENTRYPOINT /usr/bin/virt-api ---> Running in 2198eafc641f ---> 067e9e323209 Removing intermediate container 2198eafc641f Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-api" '' ---> Running in b5a6b58682b8 ---> cc997a275029 Removing intermediate container b5a6b58682b8 Successfully built cc997a275029 Sending build context to Docker daemon 4.096 kB Step 1/7 : FROM fedora:27 ---> 9110ae7f579f Step 2/7 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/7 : ENV container docker ---> Using cache ---> fa9b94755746 Step 4/7 : RUN mkdir -p /images/custom /images/alpine && truncate -s 64M /images/custom/disk.img && curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /images/alpine/disk.img ---> Using cache ---> f44e689d5514 Step 5/7 : ADD entrypoint.sh / ---> Using cache ---> 898a453e2948 Step 6/7 : CMD /entrypoint.sh ---> Using cache ---> 5aad873fd4d8 Step 7/7 : LABEL "disks-images-provider" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> 3ad188cde34a Successfully built 3ad188cde34a Sending build context to Docker daemon 2.56 kB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/5 : ENV container docker ---> Using cache ---> fa9b94755746 Step 4/5 : RUN dnf -y install procps-ng nmap-ncat && dnf -y clean all ---> Using cache ---> e4b4ed9ff163 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "vm-killer" '' ---> Using cache ---> e6700cd0dd16 Successfully built e6700cd0dd16 Sending build context to Docker daemon 5.12 kB Step 1/7 : FROM debian:sid ---> 496290160351 Step 2/7 : MAINTAINER "David Vossel" \ ---> Using cache ---> 081acc82039b Step 3/7 : ENV container docker ---> Using cache ---> 87a43203841c Step 4/7 : RUN apt-get update && apt-get install -y bash curl bzip2 qemu-utils && mkdir -p /disk && rm -rf /var/lib/apt/lists/* ---> Using cache ---> bbc83781e0a9 Step 5/7 : ADD entry-point.sh / ---> Using cache ---> c588d7a778a6 Step 6/7 : CMD /entry-point.sh ---> Using cache ---> e28b44b64988 Step 7/7 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "registry-disk-v1alpha" '' ---> Using cache ---> e11bb84f4d97 Successfully built e11bb84f4d97 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33095/kubevirt/registry-disk-v1alpha:devel ---> e11bb84f4d97 Step 2/4 : MAINTAINER "David Vossel" \ ---> Using cache ---> 203987dfb356 Step 3/4 : RUN curl https://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img > /disk/cirros.img ---> Using cache ---> e89a7b9e613a Step 4/4 : LABEL "cirros-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> d8b4b036c834 Successfully built d8b4b036c834 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33095/kubevirt/registry-disk-v1alpha:devel ---> e11bb84f4d97 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 203b9a9a9b6f Step 3/4 : RUN curl -g -L https://download.fedoraproject.org/pub/fedora/linux/releases/27/CloudImages/x86_64/images/Fedora-Cloud-Base-27-1.6.x86_64.qcow2 > /disk/fedora.qcow2 ---> Using cache ---> 5c3ab03ad56f Step 4/4 : LABEL "fedora-cloud-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> f16803ed7c89 Successfully built f16803ed7c89 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33095/kubevirt/registry-disk-v1alpha:devel ---> e11bb84f4d97 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 203b9a9a9b6f Step 3/4 : RUN curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /disk/alpine.iso ---> Using cache ---> 94806a94f275 Step 4/4 : LABEL "alpine-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> d9a071ce84a3 Successfully built d9a071ce84a3 Sending build context to Docker daemon 34.04 MB Step 1/8 : FROM fedora:27 ---> 9110ae7f579f Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virtctl ---> Using cache ---> d310698d4b36 Step 4/8 : WORKDIR /home/virtctl ---> Using cache ---> e8b8a9c6ab20 Step 5/8 : USER 1001 ---> Using cache ---> d741836b0961 Step 6/8 : COPY subresource-access-test /subresource-access-test ---> 9f05dcffa242 Removing intermediate container b961c0238280 Step 7/8 : ENTRYPOINT /subresource-access-test ---> Running in 3159dcb8a62d ---> 5c90e1c29d75 Removing intermediate container 3159dcb8a62d Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "subresource-access-test" '' ---> Running in d9b2d12eaa3f ---> e98f273e0a20 Removing intermediate container d9b2d12eaa3f Successfully built e98f273e0a20 Sending build context to Docker daemon 3.072 kB Step 1/9 : FROM fedora:27 ---> 9110ae7f579f Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/9 : ENV container docker ---> Using cache ---> fa9b94755746 Step 4/9 : RUN dnf -y install make git gcc && dnf -y clean all ---> Using cache ---> 67d950d251fa Step 5/9 : ENV GIMME_GO_VERSION 1.9.2 ---> Using cache ---> 63dee67c4bde Step 6/9 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> f9f0a3f8320f Step 7/9 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 15d5af487ed6 Step 8/9 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/masterzen/winrm-cli ---> Using cache ---> 62349a577761 Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "winrmcli" '' ---> Using cache ---> 8b65ea264095 Successfully built 8b65ea264095 Sending build context to Docker daemon 34.48 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/5 : COPY example-hook-sidecar /example-hook-sidecar ---> bb0274aff839 Removing intermediate container 1f665e99284a Step 4/5 : ENTRYPOINT /example-hook-sidecar ---> Running in 3af2f130736e ---> 02175614ccca Removing intermediate container 3af2f130736e Step 5/5 : LABEL "example-hook-sidecar" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in 3b860c70476d ---> 5a6a789e8084 Removing intermediate container 3b860c70476d Successfully built 5a6a789e8084 hack/build-docker.sh push The push refers to a repository [localhost:33095/kubevirt/virt-controller] 6825ae6c42ee: Preparing a5b36bcc540d: Preparing 39bae602f753: Preparing a5b36bcc540d: Pushed 6825ae6c42ee: Pushed 39bae602f753: Pushed devel: digest: sha256:c1d352863447c29879dfdbd0166ace33058c1dc467650b52ea5693a0ecd6195a size: 948 The push refers to a repository [localhost:33095/kubevirt/virt-launcher] 6f702b2e9634: Preparing 0550f837842f: Preparing 36cb90cbf54f: Preparing ffd4960bc91d: Preparing fc4c1d03149f: Preparing d56a24c0b6b9: Preparing 9e20b26113ea: Preparing a1a99db27cd1: Preparing ec5be2616f4d: Preparing ffcfbc9458ac: Preparing 68e0ce966da1: Preparing 39bae602f753: Preparing 9e20b26113ea: Waiting a1a99db27cd1: Waiting ec5be2616f4d: Waiting 39bae602f753: Waiting d56a24c0b6b9: Waiting 68e0ce966da1: Waiting ffcfbc9458ac: Waiting 0550f837842f: Pushed 6f702b2e9634: Pushed ffd4960bc91d: Pushed a1a99db27cd1: Pushed 9e20b26113ea: Pushed ffcfbc9458ac: Pushed ec5be2616f4d: Pushed 39bae602f753: Mounted from kubevirt/virt-controller 36cb90cbf54f: Pushed d56a24c0b6b9: Pushed fc4c1d03149f: Pushed 68e0ce966da1: Pushed devel: digest: sha256:f1b646d1dae61f2b471ece1b48194c3cd9a3e770df827453ca51ae310404e67a size: 2828 The push refers to a repository [localhost:33095/kubevirt/virt-handler] d4ce31fcdc41: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/virt-launcher d4ce31fcdc41: Pushed devel: digest: sha256:63027e4a749d9caf76b505f275061db82cd0fe59578e34aa9b010eb6da66394d size: 741 The push refers to a repository [localhost:33095/kubevirt/virt-api] 7afc437eff79: Preparing 75ab133441d4: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/virt-handler 75ab133441d4: Pushed 7afc437eff79: Pushed devel: digest: sha256:84447b4077fd943183544ba2199e00ca82f81cc1f59b75feb11b03fa4945c2a8 size: 948 The push refers to a repository [localhost:33095/kubevirt/disks-images-provider] ee9ad4a84d8d: Preparing 70611d11de64: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/virt-api ee9ad4a84d8d: Pushed 70611d11de64: Pushed devel: digest: sha256:bb91c6463e766d53e4dfe01b4643f230023ba404e45fc336c7539b1faa0c4a76 size: 948 The push refers to a repository [localhost:33095/kubevirt/vm-killer] 4b816de0d5fd: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/disks-images-provider 4b816de0d5fd: Pushed devel: digest: sha256:44b8c6b6505bf6829ac05b6c202bb8ff84937fc61e8b4e7f8c4fcd69fa0ec54f size: 740 The push refers to a repository [localhost:33095/kubevirt/registry-disk-v1alpha] cb3d1019d03e: Preparing 626899eeec02: Preparing 132d61a890c5: Preparing cb3d1019d03e: Pushed 626899eeec02: Pushed 132d61a890c5: Pushed devel: digest: sha256:72fd0c47c0c44c7977693a12cc05d64421f46faa0c1aba8b8056baf15d72f83f size: 948 The push refers to a repository [localhost:33095/kubevirt/cirros-registry-disk-demo] d06cf28520bd: Preparing cb3d1019d03e: Preparing 626899eeec02: Preparing 132d61a890c5: Preparing 626899eeec02: Mounted from kubevirt/registry-disk-v1alpha cb3d1019d03e: Mounted from kubevirt/registry-disk-v1alpha 132d61a890c5: Mounted from kubevirt/registry-disk-v1alpha d06cf28520bd: Pushed devel: digest: sha256:43c576c72c81f5011589375db8d7bb158f25124bd6c537b981b36a4502fb8b36 size: 1160 The push refers to a repository [localhost:33095/kubevirt/fedora-cloud-registry-disk-demo] ebb5671f5e29: Preparing cb3d1019d03e: Preparing 626899eeec02: Preparing 132d61a890c5: Preparing cb3d1019d03e: Mounted from kubevirt/cirros-registry-disk-demo 626899eeec02: Mounted from kubevirt/cirros-registry-disk-demo 132d61a890c5: Mounted from kubevirt/cirros-registry-disk-demo ebb5671f5e29: Pushed devel: digest: sha256:fb088d5bfda6184fc3900f0386f933f62443246dc3b43949ba6f1654ad2fe700 size: 1161 The push refers to a repository [localhost:33095/kubevirt/alpine-registry-disk-demo] f266c6313b5c: Preparing cb3d1019d03e: Preparing 626899eeec02: Preparing 132d61a890c5: Preparing 132d61a890c5: Mounted from kubevirt/fedora-cloud-registry-disk-demo 626899eeec02: Mounted from kubevirt/fedora-cloud-registry-disk-demo cb3d1019d03e: Mounted from kubevirt/fedora-cloud-registry-disk-demo f266c6313b5c: Pushed devel: digest: sha256:3678d344eb613bcd9c858ae918591c50bbdb7bb86a94ff84e8c4ca1a4ad9081a size: 1160 The push refers to a repository [localhost:33095/kubevirt/subresource-access-test] 2393e70ab101: Preparing 6368c0536c64: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/vm-killer 6368c0536c64: Pushed 2393e70ab101: Pushed devel: digest: sha256:5fe36b7b75880c083afab24e85a7977b5c28e871bd5a04fd59db2ac21417a64f size: 948 The push refers to a repository [localhost:33095/kubevirt/winrmcli] 00e232679558: Preparing b7685068dac4: Preparing 0d5d88e46b0e: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/subresource-access-test 00e232679558: Pushed 0d5d88e46b0e: Pushed b7685068dac4: Pushed devel: digest: sha256:cb4df228553453167c8c4ee43793cb793eb82d391efb42fa075bce5f3ddf1008 size: 1165 The push refers to a repository [localhost:33095/kubevirt/example-hook-sidecar] 902150db4a71: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/winrmcli 902150db4a71: Pushed devel: digest: sha256:30fb7aa4074ed7fc0f8d71cfd098bad5c913ab3fa3c58c4b8e179d429d956c84 size: 740 make[1]: Leaving directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' Done ./cluster/clean.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release0 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release0 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-45-g8e006bf ++ KUBEVIRT_VERSION=v0.7.0-45-g8e006bf + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:0977f1c716862710c8324798b95802e11a149f4532e33be20dd70877fe8f5332 ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33095/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Cleaning up ...' Cleaning up ... + cluster/kubectl.sh get vmis --all-namespaces -o=custom-columns=NAME:.metadata.name,NAMESPACE:.metadata.namespace,FINALIZERS:.metadata.finalizers --no-headers + grep foregroundDeleteVirtualMachine + read p error: the server doesn't have a resource type "vmis" + _kubectl delete ds -l kubevirt.io -n kube-system --cascade=false --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=libvirt --force --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=virt-handler --force --grace-period 0 No resources found + namespaces=(default ${namespace}) + for i in '${namespaces[@]}' + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete deployment -l kubevirt.io No resources found + _kubectl -n default delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rs -l kubevirt.io No resources found + _kubectl -n default delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete services -l kubevirt.io No resources found + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n default delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete secrets -l kubevirt.io No resources found + _kubectl -n default delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pv -l kubevirt.io No resources found + _kubectl -n default delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pvc -l kubevirt.io No resources found + _kubectl -n default delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete ds -l kubevirt.io No resources found + _kubectl -n default delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n default delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pods -l kubevirt.io No resources found + _kubectl -n default delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n default delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rolebinding -l kubevirt.io No resources found + _kubectl -n default delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete roles -l kubevirt.io No resources found + _kubectl -n default delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterroles -l kubevirt.io No resources found + _kubectl -n default delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n default get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n default get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + for i in '${namespaces[@]}' + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete deployment -l kubevirt.io No resources found + _kubectl -n kube-system delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rs -l kubevirt.io No resources found + _kubectl -n kube-system delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete services -l kubevirt.io No resources found + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n kube-system delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete secrets -l kubevirt.io No resources found + _kubectl -n kube-system delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pv -l kubevirt.io No resources found + _kubectl -n kube-system delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pvc -l kubevirt.io No resources found + _kubectl -n kube-system delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete ds -l kubevirt.io No resources found + _kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n kube-system delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pods -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete roles -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterroles -l kubevirt.io No resources found + _kubectl -n kube-system delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + sleep 2 + echo Done Done ./cluster/deploy.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release0 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release0 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-45-g8e006bf ++ KUBEVIRT_VERSION=v0.7.0-45-g8e006bf + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:0977f1c716862710c8324798b95802e11a149f4532e33be20dd70877fe8f5332 ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33095/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Deploying ...' Deploying ... + [[ -z openshift-3.10-release ]] + [[ openshift-3.10-release =~ .*-dev ]] + [[ openshift-3.10-release =~ .*-release ]] + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/demo-content.yaml =~ .*demo.* ]] + continue + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml =~ .*demo.* ]] + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml clusterrole.rbac.authorization.k8s.io "kubevirt.io:admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:edit" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:view" created serviceaccount "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver-auth-delegator" created rolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created role.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-controller" created serviceaccount "kubevirt-controller" created serviceaccount "kubevirt-privileged" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller-cluster-admin" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-privileged-cluster-admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:default" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt.io:default" created service "virt-api" created deployment.extensions "virt-api" created deployment.extensions "virt-controller" created daemonset.extensions "virt-handler" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstances.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancereplicasets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancepresets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachines.kubevirt.io" created + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R persistentvolumeclaim "disk-alpine" created persistentvolume "host-path-disk-alpine" created persistentvolumeclaim "disk-custom" created persistentvolume "host-path-disk-custom" created daemonset.extensions "disks-images-provider" created serviceaccount "kubevirt-testing" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-testing-cluster-admin" created + [[ os-3.10.0 =~ os-* ]] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-controller"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-testing"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-privileged"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-apiserver"] + _kubectl adm policy add-scc-to-user privileged admin + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged admin scc "privileged" added to: ["admin"] + echo Done Done + namespaces=(kube-system default) + [[ kube-system != \k\u\b\e\-\s\y\s\t\e\m ]] + timeout=300 + sample=30 + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n 'disks-images-provider-gmdx6 0/1 ContainerCreating 0 5s disks-images-provider-mnlpj 0/1 ContainerCreating 0 5s virt-api-7d79764579-95r5c 0/1 ContainerCreating 0 8s virt-api-7d79764579-lrr44 0/1 ContainerCreating 0 8s virt-controller-7d57d96b65-t5fjs 0/1 ContainerCreating 0 8s virt-handler-zrw7w 0/1 ContainerCreating 0 8s' ']' + echo 'Waiting for kubevirt pods to enter the Running state ...' Waiting for kubevirt pods to enter the Running state ... + kubectl get pods -n kube-system --no-headers + cluster/kubectl.sh get pods -n kube-system --no-headers + grep -v Running disks-images-provider-gmdx6 0/1 ContainerCreating 0 6s disks-images-provider-mnlpj 0/1 ContainerCreating 0 6s virt-api-7d79764579-95r5c 0/1 ContainerCreating 0 9s virt-api-7d79764579-lrr44 0/1 ContainerCreating 0 9s virt-handler-zrw7w 0/1 ContainerCreating 0 9s + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n false ']' + echo 'Waiting for KubeVirt containers to become ready ...' Waiting for KubeVirt containers to become ready ... + kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + grep false + cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers false + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n kube-system + cluster/kubectl.sh get pods -n kube-system NAME READY STATUS RESTARTS AGE disks-images-provider-gmdx6 1/1 Running 0 1m disks-images-provider-mnlpj 1/1 Running 0 1m master-api-node01 1/1 Running 1 25d master-controllers-node01 1/1 Running 1 25d master-etcd-node01 1/1 Running 1 25d virt-api-7d79764579-95r5c 1/1 Running 0 1m virt-api-7d79764579-lrr44 1/1 Running 0 1m virt-controller-7d57d96b65-mr5d9 1/1 Running 0 1m virt-controller-7d57d96b65-t5fjs 1/1 Running 0 1m virt-handler-p5l6g 1/1 Running 0 1m virt-handler-zrw7w 1/1 Running 0 1m + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n default --no-headers ++ cluster/kubectl.sh get pods -n default --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n default + cluster/kubectl.sh get pods -n default NAME READY STATUS RESTARTS AGE docker-registry-1-tqlsm 1/1 Running 1 25d registry-console-1-bhtqz 1/1 Running 1 25d router-1-r2xxq 1/1 Running 1 25d + kubectl version + cluster/kubectl.sh version oc v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 features: Basic-Auth GSSAPI Kerberos SPNEGO Server https://127.0.0.1:33092 openshift v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 + ginko_params='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + [[ -d /home/nfs/images/windows2016 ]] + [[ openshift-3.10-release =~ windows.* ]] + FUNC_TEST_ARGS='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + make functest hack/dockerized "hack/build-func-tests.sh" sha256:7d0ff7c8c318d908872cede18cbe0463a1f4e1df0e2ad9fe22f899e0e0bf4104 go version go1.10 linux/amd64 go version go1.10 linux/amd64 Compiling tests... compiled tests.test hack/functests.sh Running Suite: Tests Suite ========================== Random Seed: 1531797837 Will run 140 of 140 specs Service cluster-ip-vm successfully exposed for virtualmachineinstance testvmi8rbvx • [SLOW TEST:49.164 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose ClusterIP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:68 Should expose a Cluster IP service on a VM and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:71 ------------------------------ Service node-port-vm successfully exposed for virtualmachineinstance testvmi8rbvx • [SLOW TEST:9.703 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose NodePort service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:98 Should expose a NodePort service on a VM and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:103 ------------------------------ Service cluster-ip-udp-vm successfully exposed for virtualmachineinstance testvmi9wp8q • [SLOW TEST:50.366 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose UDP service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:140 Expose ClusterIP UDP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:147 Should expose a ClusterIP service on a VM and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:151 ------------------------------ Service node-port-udp-vm successfully exposed for virtualmachineinstance testvmi9wp8q • [SLOW TEST:10.673 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose UDP service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:140 Expose NodePort UDP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:179 Should expose a NodePort service on a VM and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:184 ------------------------------ Service cluster-ip-vmrs successfully exposed for vmirs replicasetk7m48 • [SLOW TEST:61.334 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM replica set /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:227 Expose ClusterIP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:260 Should create a ClusterIP service on VMRS and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:264 ------------------------------ Service cluster-ip-ovm successfully exposed for virtualmachine testvmif59p9 • [SLOW TEST:49.898 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on an Offline VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:292 Expose ClusterIP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:336 Connect to ClusterIP services that was set when VM was offline /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:337 ------------------------------ • ------------------------------ • [SLOW TEST:17.396 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 should start it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:76 ------------------------------ • [SLOW TEST:18.822 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 should attach virt-launcher to it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:82 ------------------------------ •••• ------------------------------ • [SLOW TEST:34.393 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:170 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Alpine as first boot /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:25.202 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:170 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Cirros as first boot /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:17.022 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:201 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:202 should retry starting the VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:203 ------------------------------ • [SLOW TEST:19.093 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:201 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:202 should log warning and proceed once the secret is there /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:233 ------------------------------ • [SLOW TEST:43.166 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 when virt-launcher crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:281 should be stopped and have Failed phase /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:282 ------------------------------ • [SLOW TEST:65.289 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 when virt-handler crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:304 should recover and continue management /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:305 ------------------------------ • [SLOW TEST:6.727 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 when virt-handler is responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:335 should indicate that a node is ready for vmis /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:336 ------------------------------ Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running 2018/07/17 03:32:02 http: TLS handshake error from 10.129.0.1:39162: EOF 2018/07/17 03:32:12 http: TLS handshake error from 10.129.0.1:39168: EOF level=info timestamp=2018-07-17T03:32:16.445668Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:32:22 http: TLS handshake error from 10.129.0.1:39174: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:31:23 http: TLS handshake error from 10.128.0.1:55050: EOF level=info timestamp=2018-07-17T03:31:25.596817Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T03:31:25.612403Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T03:31:25.630634Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T03:31:25.680401Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:31:33 http: TLS handshake error from 10.128.0.1:55108: EOF level=info timestamp=2018-07-17T03:31:40.153797Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:31:43 http: TLS handshake error from 10.128.0.1:55164: EOF level=info timestamp=2018-07-17T03:31:45.499595Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T03:31:50.726874Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:31:53 http: TLS handshake error from 10.128.0.1:55220: EOF level=info timestamp=2018-07-17T03:32:02.002880Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:32:03 http: TLS handshake error from 10.128.0.1:55278: EOF 2018/07/17 03:32:13 http: TLS handshake error from 10.128.0.1:55328: EOF 2018/07/17 03:32:23 http: TLS handshake error from 10.128.0.1:55382: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:22:17.298552Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 level=info timestamp=2018-07-17T03:31:26.501069Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-17T03:31:26.501247Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-17T03:31:26.501271Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-17T03:31:26.503390Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-17T03:31:26.503463Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-17T03:31:26.503573Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-17T03:31:26.503603Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-17T03:31:26.503787Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-17T03:31:26.517687Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-17T03:31:26.517843Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-17T03:31:26.517888Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-17T03:31:26.517974Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-17T03:32:29.814030Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib9kg2 kind= uid=f6b07d90-8971-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:32:29.814737Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib9kg2 kind= uid=f6b07d90-8971-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:51.210231Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:31:51.211284Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:31:51.314852Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:31:51.333001Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.373902Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.515844Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646334Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:51.646538Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmib9kg2-r5qww Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.584 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 when virt-handler is not responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:366 the node controller should react [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:405 Expected error: <*errors.StatusError | 0xc4208f6900>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:378 ------------------------------ S [SKIPPING] [0.474 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:458 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-default [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Skip log query tests for JENKINS ci test environment /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:463 ------------------------------ S [SKIPPING] [0.298 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:458 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-alternative [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Skip log query tests for JENKINS ci test environment /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:463 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.217 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:519 should enable emulation in virt-launcher [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:531 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:527 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.152 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:519 should be reflected in domain XML [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:568 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:527 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.192 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:519 should request a TUN device but not KVM [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:612 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:527 ------------------------------ Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running 2018/07/17 03:32:02 http: TLS handshake error from 10.129.0.1:39162: EOF 2018/07/17 03:32:12 http: TLS handshake error from 10.129.0.1:39168: EOF level=info timestamp=2018-07-17T03:32:16.445668Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:32:22 http: TLS handshake error from 10.129.0.1:39174: EOF 2018/07/17 03:32:32 http: TLS handshake error from 10.129.0.1:39180: EOF 2018/07/17 03:32:42 http: TLS handshake error from 10.129.0.1:39186: EOF level=info timestamp=2018-07-17T03:32:46.528148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:32:52 http: TLS handshake error from 10.129.0.1:39192: EOF 2018/07/17 03:33:02 http: TLS handshake error from 10.129.0.1:39198: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running level=info timestamp=2018-07-17T03:31:25.630634Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T03:31:25.680401Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:31:33 http: TLS handshake error from 10.128.0.1:55108: EOF level=info timestamp=2018-07-17T03:31:40.153797Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:31:43 http: TLS handshake error from 10.128.0.1:55164: EOF level=info timestamp=2018-07-17T03:31:45.499595Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T03:31:50.726874Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:31:53 http: TLS handshake error from 10.128.0.1:55220: EOF level=info timestamp=2018-07-17T03:32:02.002880Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:32:03 http: TLS handshake error from 10.128.0.1:55278: EOF 2018/07/17 03:32:13 http: TLS handshake error from 10.128.0.1:55328: EOF 2018/07/17 03:32:23 http: TLS handshake error from 10.128.0.1:55382: EOF 2018/07/17 03:32:33 http: TLS handshake error from 10.128.0.1:55436: EOF 2018/07/17 03:32:43 http: TLS handshake error from 10.128.0.1:55486: EOF 2018/07/17 03:32:53 http: TLS handshake error from 10.128.0.1:55540: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:31:26.501247Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-17T03:31:26.501271Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-17T03:31:26.503390Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-17T03:31:26.503463Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-17T03:31:26.503573Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-17T03:31:26.503603Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-17T03:31:26.503787Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-17T03:31:26.517687Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-17T03:31:26.517843Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-17T03:31:26.517888Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-17T03:31:26.517974Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-17T03:32:29.814030Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib9kg2 kind= uid=f6b07d90-8971-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:32:29.814737Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib9kg2 kind= uid=f6b07d90-8971-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:33:01.860173Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv7lc4 kind= uid=09bf175e-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:33:01.861544Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv7lc4 kind= uid=09bf175e-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:51.210231Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:31:51.211284Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:31:51.314852Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:31:51.333001Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.373902Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.515844Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646334Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:51.646538Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmiv7lc4-6q52z Pod phase: Pending • Failure [31.060 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 VM Accelerated Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:648 should request a KVM and TUN device [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:660 Expected <*errors.StatusError | 0xc42075f4d0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:662 ------------------------------ Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running 2018/07/17 03:32:02 http: TLS handshake error from 10.129.0.1:39162: EOF 2018/07/17 03:32:12 http: TLS handshake error from 10.129.0.1:39168: EOF level=info timestamp=2018-07-17T03:32:16.445668Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:32:22 http: TLS handshake error from 10.129.0.1:39174: EOF 2018/07/17 03:32:32 http: TLS handshake error from 10.129.0.1:39180: EOF 2018/07/17 03:32:42 http: TLS handshake error from 10.129.0.1:39186: EOF level=info timestamp=2018-07-17T03:32:46.528148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:32:52 http: TLS handshake error from 10.129.0.1:39192: EOF 2018/07/17 03:33:02 http: TLS handshake error from 10.129.0.1:39198: EOF 2018/07/17 03:33:12 http: TLS handshake error from 10.129.0.1:39204: EOF level=info timestamp=2018-07-17T03:33:16.534337Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:33:22 http: TLS handshake error from 10.129.0.1:39210: EOF 2018/07/17 03:33:32 http: TLS handshake error from 10.129.0.1:39216: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running level=info timestamp=2018-07-17T03:31:40.153797Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:31:43 http: TLS handshake error from 10.128.0.1:55164: EOF level=info timestamp=2018-07-17T03:31:45.499595Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T03:31:50.726874Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:31:53 http: TLS handshake error from 10.128.0.1:55220: EOF level=info timestamp=2018-07-17T03:32:02.002880Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:32:03 http: TLS handshake error from 10.128.0.1:55278: EOF 2018/07/17 03:32:13 http: TLS handshake error from 10.128.0.1:55328: EOF 2018/07/17 03:32:23 http: TLS handshake error from 10.128.0.1:55382: EOF 2018/07/17 03:32:33 http: TLS handshake error from 10.128.0.1:55436: EOF 2018/07/17 03:32:43 http: TLS handshake error from 10.128.0.1:55486: EOF 2018/07/17 03:32:53 http: TLS handshake error from 10.128.0.1:55540: EOF 2018/07/17 03:33:03 http: TLS handshake error from 10.128.0.1:55592: EOF 2018/07/17 03:33:13 http: TLS handshake error from 10.128.0.1:55642: EOF 2018/07/17 03:33:23 http: TLS handshake error from 10.128.0.1:55696: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:31:26.503463Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-17T03:31:26.503573Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-17T03:31:26.503603Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-17T03:31:26.503787Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-17T03:31:26.517687Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-17T03:31:26.517843Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-17T03:31:26.517888Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-17T03:31:26.517974Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-17T03:32:29.814030Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib9kg2 kind= uid=f6b07d90-8971-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:32:29.814737Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib9kg2 kind= uid=f6b07d90-8971-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:33:01.860173Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv7lc4 kind= uid=09bf175e-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:33:01.861544Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv7lc4 kind= uid=09bf175e-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:33:02.982352Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv7lc4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiv7lc4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 09bf175e-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv7lc4" level=info timestamp=2018-07-17T03:33:33.225098Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwtn4t kind= uid=1c73e19a-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:33:33.226486Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwtn4t kind= uid=1c73e19a-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:51.210231Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:31:51.211284Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:31:51.314852Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:31:51.333001Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.373902Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.515844Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646334Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:51.646538Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmiwtn4t-b9h4q Pod phase: Pending • Failure [31.022 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:70 VM Accelerated Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:648 should not enable emulation in virt-launcher [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:695 Expected <*errors.StatusError | 0xc420934f30>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:697 ------------------------------ •• Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running level=info timestamp=2018-07-17T03:32:16.445668Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:32:22 http: TLS handshake error from 10.129.0.1:39174: EOF 2018/07/17 03:32:32 http: TLS handshake error from 10.129.0.1:39180: EOF 2018/07/17 03:32:42 http: TLS handshake error from 10.129.0.1:39186: EOF level=info timestamp=2018-07-17T03:32:46.528148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:32:52 http: TLS handshake error from 10.129.0.1:39192: EOF 2018/07/17 03:33:02 http: TLS handshake error from 10.129.0.1:39198: EOF 2018/07/17 03:33:12 http: TLS handshake error from 10.129.0.1:39204: EOF level=info timestamp=2018-07-17T03:33:16.534337Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:33:22 http: TLS handshake error from 10.129.0.1:39210: EOF 2018/07/17 03:33:32 http: TLS handshake error from 10.129.0.1:39216: EOF 2018/07/17 03:33:42 http: TLS handshake error from 10.129.0.1:39222: EOF level=info timestamp=2018-07-17T03:33:46.584923Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:33:52 http: TLS handshake error from 10.129.0.1:39228: EOF 2018/07/17 03:34:02 http: TLS handshake error from 10.129.0.1:39234: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:31:53 http: TLS handshake error from 10.128.0.1:55220: EOF level=info timestamp=2018-07-17T03:32:02.002880Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:32:03 http: TLS handshake error from 10.128.0.1:55278: EOF 2018/07/17 03:32:13 http: TLS handshake error from 10.128.0.1:55328: EOF 2018/07/17 03:32:23 http: TLS handshake error from 10.128.0.1:55382: EOF 2018/07/17 03:32:33 http: TLS handshake error from 10.128.0.1:55436: EOF 2018/07/17 03:32:43 http: TLS handshake error from 10.128.0.1:55486: EOF 2018/07/17 03:32:53 http: TLS handshake error from 10.128.0.1:55540: EOF 2018/07/17 03:33:03 http: TLS handshake error from 10.128.0.1:55592: EOF 2018/07/17 03:33:13 http: TLS handshake error from 10.128.0.1:55642: EOF 2018/07/17 03:33:23 http: TLS handshake error from 10.128.0.1:55696: EOF 2018/07/17 03:33:33 http: TLS handshake error from 10.128.0.1:55746: EOF 2018/07/17 03:33:43 http: TLS handshake error from 10.128.0.1:55796: EOF 2018/07/17 03:33:53 http: TLS handshake error from 10.128.0.1:55850: EOF 2018/07/17 03:34:03 http: TLS handshake error from 10.128.0.1:55900: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:31:26.517843Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-17T03:31:26.517888Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-17T03:31:26.517974Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-17T03:32:29.814030Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib9kg2 kind= uid=f6b07d90-8971-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:32:29.814737Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib9kg2 kind= uid=f6b07d90-8971-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:33:01.860173Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv7lc4 kind= uid=09bf175e-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:33:01.861544Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv7lc4 kind= uid=09bf175e-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:33:02.982352Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv7lc4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiv7lc4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 09bf175e-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv7lc4" level=info timestamp=2018-07-17T03:33:33.225098Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwtn4t kind= uid=1c73e19a-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:33:33.226486Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwtn4t kind= uid=1c73e19a-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:33:33.890811Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwtn4t\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwtn4t, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1c73e19a-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwtn4t" level=info timestamp=2018-07-17T03:33:33.932505Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwtn4t\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwtn4t, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1c73e19a-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwtn4t" level=info timestamp=2018-07-17T03:34:04.705933Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi49fd5 kind= uid=2f37283f-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:34:04.706152Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi49fd5 kind= uid=2f37283f-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:34:04.888039Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi49fd5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi49fd5" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:51.210231Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:31:51.211284Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:31:51.314852Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:31:51.333001Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.373902Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.515844Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646334Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:51.646538Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmi49fd5-6d2m2 Pod phase: Pending ------------------------------ • Failure [30.896 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Delete a VirtualMachineInstance's Pod /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:764 should result in the VirtualMachineInstance moving to a finalized state [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:765 Expected error: <*errors.StatusError | 0xc4201553b0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:768 ------------------------------ STEP: Creating the VirtualMachineInstance Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running 2018/07/17 03:32:42 http: TLS handshake error from 10.129.0.1:39186: EOF level=info timestamp=2018-07-17T03:32:46.528148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:32:52 http: TLS handshake error from 10.129.0.1:39192: EOF 2018/07/17 03:33:02 http: TLS handshake error from 10.129.0.1:39198: EOF 2018/07/17 03:33:12 http: TLS handshake error from 10.129.0.1:39204: EOF level=info timestamp=2018-07-17T03:33:16.534337Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:33:22 http: TLS handshake error from 10.129.0.1:39210: EOF 2018/07/17 03:33:32 http: TLS handshake error from 10.129.0.1:39216: EOF 2018/07/17 03:33:42 http: TLS handshake error from 10.129.0.1:39222: EOF level=info timestamp=2018-07-17T03:33:46.584923Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:33:52 http: TLS handshake error from 10.129.0.1:39228: EOF 2018/07/17 03:34:02 http: TLS handshake error from 10.129.0.1:39234: EOF 2018/07/17 03:34:12 http: TLS handshake error from 10.129.0.1:39240: EOF 2018/07/17 03:34:22 http: TLS handshake error from 10.129.0.1:39246: EOF 2018/07/17 03:34:32 http: TLS handshake error from 10.129.0.1:39252: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:32:23 http: TLS handshake error from 10.128.0.1:55382: EOF 2018/07/17 03:32:33 http: TLS handshake error from 10.128.0.1:55436: EOF 2018/07/17 03:32:43 http: TLS handshake error from 10.128.0.1:55486: EOF 2018/07/17 03:32:53 http: TLS handshake error from 10.128.0.1:55540: EOF 2018/07/17 03:33:03 http: TLS handshake error from 10.128.0.1:55592: EOF 2018/07/17 03:33:13 http: TLS handshake error from 10.128.0.1:55642: EOF 2018/07/17 03:33:23 http: TLS handshake error from 10.128.0.1:55696: EOF 2018/07/17 03:33:33 http: TLS handshake error from 10.128.0.1:55746: EOF 2018/07/17 03:33:43 http: TLS handshake error from 10.128.0.1:55796: EOF 2018/07/17 03:33:53 http: TLS handshake error from 10.128.0.1:55850: EOF 2018/07/17 03:34:03 http: TLS handshake error from 10.128.0.1:55900: EOF 2018/07/17 03:34:13 http: TLS handshake error from 10.128.0.1:55950: EOF level=info timestamp=2018-07-17T03:34:15.471849Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:34:23 http: TLS handshake error from 10.128.0.1:56004: EOF 2018/07/17 03:34:33 http: TLS handshake error from 10.128.0.1:56054: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:32:29.814737Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib9kg2 kind= uid=f6b07d90-8971-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:33:01.860173Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv7lc4 kind= uid=09bf175e-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:33:01.861544Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv7lc4 kind= uid=09bf175e-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:33:02.982352Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv7lc4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiv7lc4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 09bf175e-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv7lc4" level=info timestamp=2018-07-17T03:33:33.225098Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwtn4t kind= uid=1c73e19a-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:33:33.226486Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwtn4t kind= uid=1c73e19a-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:33:33.890811Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwtn4t\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwtn4t, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1c73e19a-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwtn4t" level=info timestamp=2018-07-17T03:33:33.932505Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwtn4t\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwtn4t, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1c73e19a-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwtn4t" level=info timestamp=2018-07-17T03:34:04.705933Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi49fd5 kind= uid=2f37283f-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:34:04.706152Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi49fd5 kind= uid=2f37283f-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:34:04.888039Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi49fd5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi49fd5" level=info timestamp=2018-07-17T03:34:05.379073Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi49fd5\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi49fd5, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2f37283f-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi49fd5" level=info timestamp=2018-07-17T03:34:35.557579Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq6pjr kind= uid=41a42656-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:34:35.560750Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq6pjr kind= uid=41a42656-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:34:35.673921Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq6pjr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq6pjr" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:51.210231Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:31:51.211284Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:31:51.314852Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:31:51.333001Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.373902Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.515844Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646334Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:51.646538Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmiq6pjr-p575q Pod phase: Pending • Failure [30.774 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:796 with an active pod. /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:797 should result in pod being terminated [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:798 Expected error: <*errors.StatusError | 0xc420154b40>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:802 ------------------------------ STEP: Creating the VirtualMachineInstance Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running 2018/07/17 03:33:02 http: TLS handshake error from 10.129.0.1:39198: EOF 2018/07/17 03:33:12 http: TLS handshake error from 10.129.0.1:39204: EOF level=info timestamp=2018-07-17T03:33:16.534337Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:33:22 http: TLS handshake error from 10.129.0.1:39210: EOF 2018/07/17 03:33:32 http: TLS handshake error from 10.129.0.1:39216: EOF 2018/07/17 03:33:42 http: TLS handshake error from 10.129.0.1:39222: EOF level=info timestamp=2018-07-17T03:33:46.584923Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:33:52 http: TLS handshake error from 10.129.0.1:39228: EOF 2018/07/17 03:34:02 http: TLS handshake error from 10.129.0.1:39234: EOF 2018/07/17 03:34:12 http: TLS handshake error from 10.129.0.1:39240: EOF 2018/07/17 03:34:22 http: TLS handshake error from 10.129.0.1:39246: EOF 2018/07/17 03:34:32 http: TLS handshake error from 10.129.0.1:39252: EOF 2018/07/17 03:34:42 http: TLS handshake error from 10.129.0.1:39258: EOF 2018/07/17 03:34:52 http: TLS handshake error from 10.129.0.1:39264: EOF 2018/07/17 03:35:02 http: TLS handshake error from 10.129.0.1:39270: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:33:03 http: TLS handshake error from 10.128.0.1:55592: EOF 2018/07/17 03:33:13 http: TLS handshake error from 10.128.0.1:55642: EOF 2018/07/17 03:33:23 http: TLS handshake error from 10.128.0.1:55696: EOF 2018/07/17 03:33:33 http: TLS handshake error from 10.128.0.1:55746: EOF 2018/07/17 03:33:43 http: TLS handshake error from 10.128.0.1:55796: EOF 2018/07/17 03:33:53 http: TLS handshake error from 10.128.0.1:55850: EOF 2018/07/17 03:34:03 http: TLS handshake error from 10.128.0.1:55900: EOF 2018/07/17 03:34:13 http: TLS handshake error from 10.128.0.1:55950: EOF level=info timestamp=2018-07-17T03:34:15.471849Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:34:23 http: TLS handshake error from 10.128.0.1:56004: EOF 2018/07/17 03:34:33 http: TLS handshake error from 10.128.0.1:56054: EOF 2018/07/17 03:34:43 http: TLS handshake error from 10.128.0.1:56104: EOF level=info timestamp=2018-07-17T03:34:45.368015Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:34:53 http: TLS handshake error from 10.128.0.1:56158: EOF 2018/07/17 03:35:03 http: TLS handshake error from 10.128.0.1:56208: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:33:02.982352Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv7lc4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiv7lc4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 09bf175e-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv7lc4" level=info timestamp=2018-07-17T03:33:33.225098Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwtn4t kind= uid=1c73e19a-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:33:33.226486Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwtn4t kind= uid=1c73e19a-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:33:33.890811Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwtn4t\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwtn4t, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1c73e19a-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwtn4t" level=info timestamp=2018-07-17T03:33:33.932505Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwtn4t\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwtn4t, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1c73e19a-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwtn4t" level=info timestamp=2018-07-17T03:34:04.705933Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi49fd5 kind= uid=2f37283f-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:34:04.706152Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi49fd5 kind= uid=2f37283f-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:34:04.888039Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi49fd5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi49fd5" level=info timestamp=2018-07-17T03:34:05.379073Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi49fd5\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi49fd5, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2f37283f-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi49fd5" level=info timestamp=2018-07-17T03:34:35.557579Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq6pjr kind= uid=41a42656-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:34:35.560750Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq6pjr kind= uid=41a42656-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:34:35.673921Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq6pjr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq6pjr" level=info timestamp=2018-07-17T03:35:06.303110Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirxmpq kind= uid=53f3de73-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:35:06.308712Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirxmpq kind= uid=53f3de73-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:35:06.527585Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirxmpq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirxmpq" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:51.210231Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:31:51.211284Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:31:51.314852Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:31:51.333001Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.373902Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.515844Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646334Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:51.646538Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmirxmpq-zn5fp Pod phase: Pending • Failure [30.734 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:796 with grace period greater than 0 /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:821 should run graceful shutdown [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:822 Expected error: <*errors.StatusError | 0xc420935170>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:846 ------------------------------ STEP: Setting a VirtualMachineInstance termination grace period to 5 STEP: Creating the VirtualMachineInstance Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running 2018/07/17 03:33:32 http: TLS handshake error from 10.129.0.1:39216: EOF 2018/07/17 03:33:42 http: TLS handshake error from 10.129.0.1:39222: EOF level=info timestamp=2018-07-17T03:33:46.584923Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:33:52 http: TLS handshake error from 10.129.0.1:39228: EOF 2018/07/17 03:34:02 http: TLS handshake error from 10.129.0.1:39234: EOF 2018/07/17 03:34:12 http: TLS handshake error from 10.129.0.1:39240: EOF 2018/07/17 03:34:22 http: TLS handshake error from 10.129.0.1:39246: EOF 2018/07/17 03:34:32 http: TLS handshake error from 10.129.0.1:39252: EOF 2018/07/17 03:34:42 http: TLS handshake error from 10.129.0.1:39258: EOF 2018/07/17 03:34:52 http: TLS handshake error from 10.129.0.1:39264: EOF 2018/07/17 03:35:02 http: TLS handshake error from 10.129.0.1:39270: EOF 2018/07/17 03:35:12 http: TLS handshake error from 10.129.0.1:39276: EOF level=info timestamp=2018-07-17T03:35:16.542706Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:35:22 http: TLS handshake error from 10.129.0.1:39284: EOF 2018/07/17 03:35:32 http: TLS handshake error from 10.129.0.1:39290: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:33:33 http: TLS handshake error from 10.128.0.1:55746: EOF 2018/07/17 03:33:43 http: TLS handshake error from 10.128.0.1:55796: EOF 2018/07/17 03:33:53 http: TLS handshake error from 10.128.0.1:55850: EOF 2018/07/17 03:34:03 http: TLS handshake error from 10.128.0.1:55900: EOF 2018/07/17 03:34:13 http: TLS handshake error from 10.128.0.1:55950: EOF level=info timestamp=2018-07-17T03:34:15.471849Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:34:23 http: TLS handshake error from 10.128.0.1:56004: EOF 2018/07/17 03:34:33 http: TLS handshake error from 10.128.0.1:56054: EOF 2018/07/17 03:34:43 http: TLS handshake error from 10.128.0.1:56104: EOF level=info timestamp=2018-07-17T03:34:45.368015Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:34:53 http: TLS handshake error from 10.128.0.1:56158: EOF 2018/07/17 03:35:03 http: TLS handshake error from 10.128.0.1:56208: EOF 2018/07/17 03:35:13 http: TLS handshake error from 10.128.0.1:56258: EOF 2018/07/17 03:35:23 http: TLS handshake error from 10.128.0.1:56312: EOF 2018/07/17 03:35:33 http: TLS handshake error from 10.128.0.1:56362: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:33:33.932505Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwtn4t\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwtn4t, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1c73e19a-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwtn4t" level=info timestamp=2018-07-17T03:34:04.705933Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi49fd5 kind= uid=2f37283f-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:34:04.706152Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi49fd5 kind= uid=2f37283f-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:34:04.888039Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi49fd5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi49fd5" level=info timestamp=2018-07-17T03:34:05.379073Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi49fd5\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi49fd5, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2f37283f-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi49fd5" level=info timestamp=2018-07-17T03:34:35.557579Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq6pjr kind= uid=41a42656-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:34:35.560750Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq6pjr kind= uid=41a42656-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:34:35.673921Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq6pjr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq6pjr" level=info timestamp=2018-07-17T03:35:06.303110Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirxmpq kind= uid=53f3de73-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:35:06.308712Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirxmpq kind= uid=53f3de73-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:35:06.527585Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirxmpq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirxmpq" level=info timestamp=2018-07-17T03:35:37.136261Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq9vcc kind= uid=66541f43-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:35:37.145829Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq9vcc kind= uid=66541f43-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:35:37.392434Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq9vcc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq9vcc" level=info timestamp=2018-07-17T03:35:37.491942Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq9vcc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq9vcc" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:51.210231Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:31:51.211284Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:31:51.314852Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:31:51.333001Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.373902Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.515844Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646334Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:51.646538Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmiq9vcc-sdnlc Pod phase: Pending • Failure [30.916 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Killed VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:873 should be in Failed phase [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:874 Expected <*errors.StatusError | 0xc420934510>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:877 ------------------------------ STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running 2018/07/17 03:34:02 http: TLS handshake error from 10.129.0.1:39234: EOF 2018/07/17 03:34:12 http: TLS handshake error from 10.129.0.1:39240: EOF 2018/07/17 03:34:22 http: TLS handshake error from 10.129.0.1:39246: EOF 2018/07/17 03:34:32 http: TLS handshake error from 10.129.0.1:39252: EOF 2018/07/17 03:34:42 http: TLS handshake error from 10.129.0.1:39258: EOF 2018/07/17 03:34:52 http: TLS handshake error from 10.129.0.1:39264: EOF 2018/07/17 03:35:02 http: TLS handshake error from 10.129.0.1:39270: EOF 2018/07/17 03:35:12 http: TLS handshake error from 10.129.0.1:39276: EOF level=info timestamp=2018-07-17T03:35:16.542706Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:35:22 http: TLS handshake error from 10.129.0.1:39284: EOF 2018/07/17 03:35:32 http: TLS handshake error from 10.129.0.1:39290: EOF 2018/07/17 03:35:42 http: TLS handshake error from 10.129.0.1:39296: EOF level=info timestamp=2018-07-17T03:35:46.461374Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:35:52 http: TLS handshake error from 10.129.0.1:39302: EOF 2018/07/17 03:36:02 http: TLS handshake error from 10.129.0.1:39308: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:34:03 http: TLS handshake error from 10.128.0.1:55900: EOF 2018/07/17 03:34:13 http: TLS handshake error from 10.128.0.1:55950: EOF level=info timestamp=2018-07-17T03:34:15.471849Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:34:23 http: TLS handshake error from 10.128.0.1:56004: EOF 2018/07/17 03:34:33 http: TLS handshake error from 10.128.0.1:56054: EOF 2018/07/17 03:34:43 http: TLS handshake error from 10.128.0.1:56104: EOF level=info timestamp=2018-07-17T03:34:45.368015Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:34:53 http: TLS handshake error from 10.128.0.1:56158: EOF 2018/07/17 03:35:03 http: TLS handshake error from 10.128.0.1:56208: EOF 2018/07/17 03:35:13 http: TLS handshake error from 10.128.0.1:56258: EOF 2018/07/17 03:35:23 http: TLS handshake error from 10.128.0.1:56312: EOF 2018/07/17 03:35:33 http: TLS handshake error from 10.128.0.1:56362: EOF 2018/07/17 03:35:43 http: TLS handshake error from 10.128.0.1:56412: EOF 2018/07/17 03:35:53 http: TLS handshake error from 10.128.0.1:56466: EOF 2018/07/17 03:36:03 http: TLS handshake error from 10.128.0.1:56516: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:34:05.379073Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi49fd5\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi49fd5, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 2f37283f-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi49fd5" level=info timestamp=2018-07-17T03:34:35.557579Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq6pjr kind= uid=41a42656-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:34:35.560750Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq6pjr kind= uid=41a42656-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:34:35.673921Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq6pjr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq6pjr" level=info timestamp=2018-07-17T03:35:06.303110Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirxmpq kind= uid=53f3de73-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:35:06.308712Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirxmpq kind= uid=53f3de73-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:35:06.527585Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirxmpq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirxmpq" level=info timestamp=2018-07-17T03:35:37.136261Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq9vcc kind= uid=66541f43-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:35:37.145829Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq9vcc kind= uid=66541f43-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:35:37.392434Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq9vcc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq9vcc" level=info timestamp=2018-07-17T03:35:37.491942Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq9vcc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq9vcc" level=info timestamp=2018-07-17T03:35:38.127263Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq9vcc\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiq9vcc, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 66541f43-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq9vcc" level=info timestamp=2018-07-17T03:36:08.484215Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmillzvt kind= uid=78fedfa0-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:36:08.487639Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmillzvt kind= uid=78fedfa0-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:36:08.714014Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmillzvt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmillzvt" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:51.210231Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:31:51.211284Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:31:51.314852Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:31:51.333001Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.373902Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.515844Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646334Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:51.646538Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmillzvt-wmwsb Pod phase: Pending • Failure [31.300 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:48 Killed VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:873 should be left alone by virt-handler [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:901 Expected <*errors.StatusError | 0xc42075ec60>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:904 ------------------------------ STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running 2018/07/17 03:34:42 http: TLS handshake error from 10.129.0.1:39258: EOF 2018/07/17 03:34:52 http: TLS handshake error from 10.129.0.1:39264: EOF 2018/07/17 03:35:02 http: TLS handshake error from 10.129.0.1:39270: EOF 2018/07/17 03:35:12 http: TLS handshake error from 10.129.0.1:39276: EOF level=info timestamp=2018-07-17T03:35:16.542706Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:35:22 http: TLS handshake error from 10.129.0.1:39284: EOF 2018/07/17 03:35:32 http: TLS handshake error from 10.129.0.1:39290: EOF 2018/07/17 03:35:42 http: TLS handshake error from 10.129.0.1:39296: EOF level=info timestamp=2018-07-17T03:35:46.461374Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:35:52 http: TLS handshake error from 10.129.0.1:39302: EOF 2018/07/17 03:36:02 http: TLS handshake error from 10.129.0.1:39308: EOF 2018/07/17 03:36:12 http: TLS handshake error from 10.129.0.1:39314: EOF level=info timestamp=2018-07-17T03:36:16.407878Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:36:22 http: TLS handshake error from 10.129.0.1:39320: EOF 2018/07/17 03:36:32 http: TLS handshake error from 10.129.0.1:39326: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:34:23 http: TLS handshake error from 10.128.0.1:56004: EOF 2018/07/17 03:34:33 http: TLS handshake error from 10.128.0.1:56054: EOF 2018/07/17 03:34:43 http: TLS handshake error from 10.128.0.1:56104: EOF level=info timestamp=2018-07-17T03:34:45.368015Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:34:53 http: TLS handshake error from 10.128.0.1:56158: EOF 2018/07/17 03:35:03 http: TLS handshake error from 10.128.0.1:56208: EOF 2018/07/17 03:35:13 http: TLS handshake error from 10.128.0.1:56258: EOF 2018/07/17 03:35:23 http: TLS handshake error from 10.128.0.1:56312: EOF 2018/07/17 03:35:33 http: TLS handshake error from 10.128.0.1:56362: EOF 2018/07/17 03:35:43 http: TLS handshake error from 10.128.0.1:56412: EOF 2018/07/17 03:35:53 http: TLS handshake error from 10.128.0.1:56466: EOF 2018/07/17 03:36:03 http: TLS handshake error from 10.128.0.1:56516: EOF 2018/07/17 03:36:13 http: TLS handshake error from 10.128.0.1:56566: EOF 2018/07/17 03:36:23 http: TLS handshake error from 10.128.0.1:56620: EOF 2018/07/17 03:36:33 http: TLS handshake error from 10.128.0.1:56672: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:34:35.673921Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq6pjr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq6pjr" level=info timestamp=2018-07-17T03:35:06.303110Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirxmpq kind= uid=53f3de73-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:35:06.308712Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirxmpq kind= uid=53f3de73-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:35:06.527585Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirxmpq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirxmpq" level=info timestamp=2018-07-17T03:35:37.136261Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq9vcc kind= uid=66541f43-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:35:37.145829Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq9vcc kind= uid=66541f43-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:35:37.392434Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq9vcc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq9vcc" level=info timestamp=2018-07-17T03:35:37.491942Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq9vcc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq9vcc" level=info timestamp=2018-07-17T03:35:38.127263Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq9vcc\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiq9vcc, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 66541f43-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq9vcc" level=info timestamp=2018-07-17T03:36:08.484215Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmillzvt kind= uid=78fedfa0-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:36:08.487639Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmillzvt kind= uid=78fedfa0-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:36:08.714014Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmillzvt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmillzvt" level=info timestamp=2018-07-17T03:36:09.050770Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmillzvt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmillzvt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 78fedfa0-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmillzvt" level=info timestamp=2018-07-17T03:36:39.214440Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqq54q kind= uid=8b552389-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:36:39.219729Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqq54q kind= uid=8b552389-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:51.210231Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:31:51.211284Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:31:51.314852Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:31:51.333001Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.373902Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.515844Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646334Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:51.646538Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmiqq54q-6bzpx Pod phase: Pending • Failure [30.782 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with 3 CPU cores /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:56 should report 3 cpu cores under guest OS [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:62 Expected error: <*errors.StatusError | 0xc4201550e0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:74 ------------------------------ STEP: Starting a VirtualMachineInstance S [SKIPPING] [0.423 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:108 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-2Mi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 No node with hugepages hugepages-2Mi capacity /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:160 ------------------------------ S [SKIPPING] [0.143 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:108 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-1Gi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 No node with hugepages hugepages-1Gi capacity /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:160 ------------------------------ Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running level=info timestamp=2018-07-17T03:35:16.542706Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:35:22 http: TLS handshake error from 10.129.0.1:39284: EOF 2018/07/17 03:35:32 http: TLS handshake error from 10.129.0.1:39290: EOF 2018/07/17 03:35:42 http: TLS handshake error from 10.129.0.1:39296: EOF level=info timestamp=2018-07-17T03:35:46.461374Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:35:52 http: TLS handshake error from 10.129.0.1:39302: EOF 2018/07/17 03:36:02 http: TLS handshake error from 10.129.0.1:39308: EOF 2018/07/17 03:36:12 http: TLS handshake error from 10.129.0.1:39314: EOF level=info timestamp=2018-07-17T03:36:16.407878Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:36:22 http: TLS handshake error from 10.129.0.1:39320: EOF 2018/07/17 03:36:32 http: TLS handshake error from 10.129.0.1:39326: EOF 2018/07/17 03:36:42 http: TLS handshake error from 10.129.0.1:39332: EOF level=info timestamp=2018-07-17T03:36:46.471377Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:36:52 http: TLS handshake error from 10.129.0.1:39338: EOF 2018/07/17 03:37:02 http: TLS handshake error from 10.129.0.1:39344: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running level=info timestamp=2018-07-17T03:34:45.368015Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:34:53 http: TLS handshake error from 10.128.0.1:56158: EOF 2018/07/17 03:35:03 http: TLS handshake error from 10.128.0.1:56208: EOF 2018/07/17 03:35:13 http: TLS handshake error from 10.128.0.1:56258: EOF 2018/07/17 03:35:23 http: TLS handshake error from 10.128.0.1:56312: EOF 2018/07/17 03:35:33 http: TLS handshake error from 10.128.0.1:56362: EOF 2018/07/17 03:35:43 http: TLS handshake error from 10.128.0.1:56412: EOF 2018/07/17 03:35:53 http: TLS handshake error from 10.128.0.1:56466: EOF 2018/07/17 03:36:03 http: TLS handshake error from 10.128.0.1:56516: EOF 2018/07/17 03:36:13 http: TLS handshake error from 10.128.0.1:56566: EOF 2018/07/17 03:36:23 http: TLS handshake error from 10.128.0.1:56620: EOF 2018/07/17 03:36:33 http: TLS handshake error from 10.128.0.1:56672: EOF 2018/07/17 03:36:43 http: TLS handshake error from 10.128.0.1:56722: EOF 2018/07/17 03:36:53 http: TLS handshake error from 10.128.0.1:56776: EOF 2018/07/17 03:37:03 http: TLS handshake error from 10.128.0.1:56826: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:35:06.527585Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirxmpq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirxmpq" level=info timestamp=2018-07-17T03:35:37.136261Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq9vcc kind= uid=66541f43-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:35:37.145829Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiq9vcc kind= uid=66541f43-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:35:37.392434Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq9vcc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq9vcc" level=info timestamp=2018-07-17T03:35:37.491942Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq9vcc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq9vcc" level=info timestamp=2018-07-17T03:35:38.127263Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq9vcc\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiq9vcc, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 66541f43-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq9vcc" level=info timestamp=2018-07-17T03:36:08.484215Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmillzvt kind= uid=78fedfa0-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:36:08.487639Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmillzvt kind= uid=78fedfa0-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:36:08.714014Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmillzvt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmillzvt" level=info timestamp=2018-07-17T03:36:09.050770Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmillzvt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmillzvt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 78fedfa0-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmillzvt" level=info timestamp=2018-07-17T03:36:39.214440Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqq54q kind= uid=8b552389-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:36:39.219729Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqq54q kind= uid=8b552389-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:36:39.961789Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqq54q\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqq54q, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 8b552389-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqq54q" level=info timestamp=2018-07-17T03:37:10.444879Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis98xg kind= uid=9df225e4-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:37:10.451355Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis98xg kind= uid=9df225e4-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:51.210231Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:31:51.211284Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:31:51.314852Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:31:51.333001Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.373902Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.515844Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646334Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:51.646538Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmis98xg-t4f8l Pod phase: Pending • Failure [30.696 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:108 with usupported page size /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:194 should failed to schedule the pod [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:195 Expected error: <*errors.StatusError | 0xc42075efc0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:214 ------------------------------ STEP: Starting a VM Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running level=info timestamp=2018-07-17T03:35:46.461374Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:35:52 http: TLS handshake error from 10.129.0.1:39302: EOF 2018/07/17 03:36:02 http: TLS handshake error from 10.129.0.1:39308: EOF 2018/07/17 03:36:12 http: TLS handshake error from 10.129.0.1:39314: EOF level=info timestamp=2018-07-17T03:36:16.407878Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:36:22 http: TLS handshake error from 10.129.0.1:39320: EOF 2018/07/17 03:36:32 http: TLS handshake error from 10.129.0.1:39326: EOF 2018/07/17 03:36:42 http: TLS handshake error from 10.129.0.1:39332: EOF level=info timestamp=2018-07-17T03:36:46.471377Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:36:52 http: TLS handshake error from 10.129.0.1:39338: EOF 2018/07/17 03:37:02 http: TLS handshake error from 10.129.0.1:39344: EOF 2018/07/17 03:37:12 http: TLS handshake error from 10.129.0.1:39350: EOF 2018/07/17 03:37:22 http: TLS handshake error from 10.129.0.1:39356: EOF 2018/07/17 03:37:32 http: TLS handshake error from 10.129.0.1:39362: EOF 2018/07/17 03:37:42 http: TLS handshake error from 10.129.0.1:39368: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:35:23 http: TLS handshake error from 10.128.0.1:56312: EOF 2018/07/17 03:35:33 http: TLS handshake error from 10.128.0.1:56362: EOF 2018/07/17 03:35:43 http: TLS handshake error from 10.128.0.1:56412: EOF 2018/07/17 03:35:53 http: TLS handshake error from 10.128.0.1:56466: EOF 2018/07/17 03:36:03 http: TLS handshake error from 10.128.0.1:56516: EOF 2018/07/17 03:36:13 http: TLS handshake error from 10.128.0.1:56566: EOF 2018/07/17 03:36:23 http: TLS handshake error from 10.128.0.1:56620: EOF 2018/07/17 03:36:33 http: TLS handshake error from 10.128.0.1:56672: EOF 2018/07/17 03:36:43 http: TLS handshake error from 10.128.0.1:56722: EOF 2018/07/17 03:36:53 http: TLS handshake error from 10.128.0.1:56776: EOF 2018/07/17 03:37:03 http: TLS handshake error from 10.128.0.1:56826: EOF 2018/07/17 03:37:13 http: TLS handshake error from 10.128.0.1:56876: EOF level=info timestamp=2018-07-17T03:37:15.535583Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:37:23 http: TLS handshake error from 10.128.0.1:56930: EOF 2018/07/17 03:37:33 http: TLS handshake error from 10.128.0.1:56980: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:35:38.127263Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiq9vcc\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiq9vcc, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 66541f43-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiq9vcc" level=info timestamp=2018-07-17T03:36:08.484215Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmillzvt kind= uid=78fedfa0-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:36:08.487639Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmillzvt kind= uid=78fedfa0-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:36:08.714014Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmillzvt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmillzvt" level=info timestamp=2018-07-17T03:36:09.050770Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmillzvt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmillzvt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 78fedfa0-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmillzvt" level=info timestamp=2018-07-17T03:36:39.214440Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqq54q kind= uid=8b552389-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:36:39.219729Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqq54q kind= uid=8b552389-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:36:39.961789Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqq54q\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqq54q, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 8b552389-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqq54q" level=info timestamp=2018-07-17T03:37:10.444879Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis98xg kind= uid=9df225e4-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:37:10.451355Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis98xg kind= uid=9df225e4-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:37:11.097902Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis98xg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis98xg" level=info timestamp=2018-07-17T03:37:11.111081Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis98xg\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmis98xg, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 9df225e4-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis98xg" level=info timestamp=2018-07-17T03:37:41.291073Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9gbqn kind= uid=b052bce5-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:37:41.292066Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9gbqn kind= uid=b052bce5-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:37:41.551719Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9gbqn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9gbqn" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:51.210231Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:31:51.211284Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:31:51.314852Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:31:51.333001Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.373902Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.515844Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646334Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:51.646538Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmi9gbqn-mzg2h Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.808 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:238 when CPU model defined [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:277 should report defined CPU model /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:278 Expected error: <*errors.StatusError | 0xc42075e5a0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1552 ------------------------------ Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running level=info timestamp=2018-07-17T03:36:16.407878Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:36:22 http: TLS handshake error from 10.129.0.1:39320: EOF 2018/07/17 03:36:32 http: TLS handshake error from 10.129.0.1:39326: EOF 2018/07/17 03:36:42 http: TLS handshake error from 10.129.0.1:39332: EOF level=info timestamp=2018-07-17T03:36:46.471377Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:36:52 http: TLS handshake error from 10.129.0.1:39338: EOF 2018/07/17 03:37:02 http: TLS handshake error from 10.129.0.1:39344: EOF 2018/07/17 03:37:12 http: TLS handshake error from 10.129.0.1:39350: EOF 2018/07/17 03:37:22 http: TLS handshake error from 10.129.0.1:39356: EOF 2018/07/17 03:37:32 http: TLS handshake error from 10.129.0.1:39362: EOF 2018/07/17 03:37:42 http: TLS handshake error from 10.129.0.1:39368: EOF level=info timestamp=2018-07-17T03:37:46.365687Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:37:52 http: TLS handshake error from 10.129.0.1:39374: EOF 2018/07/17 03:38:02 http: TLS handshake error from 10.129.0.1:39380: EOF 2018/07/17 03:38:12 http: TLS handshake error from 10.129.0.1:39386: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:35:53 http: TLS handshake error from 10.128.0.1:56466: EOF 2018/07/17 03:36:03 http: TLS handshake error from 10.128.0.1:56516: EOF 2018/07/17 03:36:13 http: TLS handshake error from 10.128.0.1:56566: EOF 2018/07/17 03:36:23 http: TLS handshake error from 10.128.0.1:56620: EOF 2018/07/17 03:36:33 http: TLS handshake error from 10.128.0.1:56672: EOF 2018/07/17 03:36:43 http: TLS handshake error from 10.128.0.1:56722: EOF 2018/07/17 03:36:53 http: TLS handshake error from 10.128.0.1:56776: EOF 2018/07/17 03:37:03 http: TLS handshake error from 10.128.0.1:56826: EOF 2018/07/17 03:37:13 http: TLS handshake error from 10.128.0.1:56876: EOF level=info timestamp=2018-07-17T03:37:15.535583Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:37:23 http: TLS handshake error from 10.128.0.1:56930: EOF 2018/07/17 03:37:33 http: TLS handshake error from 10.128.0.1:56980: EOF 2018/07/17 03:37:43 http: TLS handshake error from 10.128.0.1:57030: EOF 2018/07/17 03:37:53 http: TLS handshake error from 10.128.0.1:57084: EOF 2018/07/17 03:38:03 http: TLS handshake error from 10.128.0.1:57134: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:36:09.050770Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmillzvt\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmillzvt, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 78fedfa0-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmillzvt" level=info timestamp=2018-07-17T03:36:39.214440Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqq54q kind= uid=8b552389-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:36:39.219729Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqq54q kind= uid=8b552389-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:36:39.961789Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqq54q\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiqq54q, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 8b552389-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqq54q" level=info timestamp=2018-07-17T03:37:10.444879Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis98xg kind= uid=9df225e4-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:37:10.451355Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis98xg kind= uid=9df225e4-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:37:11.097902Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis98xg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis98xg" level=info timestamp=2018-07-17T03:37:11.111081Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis98xg\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmis98xg, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 9df225e4-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis98xg" level=info timestamp=2018-07-17T03:37:41.291073Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9gbqn kind= uid=b052bce5-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:37:41.292066Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9gbqn kind= uid=b052bce5-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:37:41.551719Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9gbqn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9gbqn" level=info timestamp=2018-07-17T03:37:41.920678Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9gbqn\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9gbqn, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b052bce5-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9gbqn" level=info timestamp=2018-07-17T03:38:12.106731Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihqqgz kind= uid=c2b12fcd-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:38:12.114878Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihqqgz kind= uid=c2b12fcd-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:38:12.447262Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihqqgz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihqqgz" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:51.210231Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:31:51.211284Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:31:51.314852Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:31:51.333001Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.373902Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.515844Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646334Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:51.646538Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmihqqgz-9nbsz Pod phase: Pending • Failure in Spec Setup (BeforeEach) [30.789 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:238 when CPU model not defined [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:305 should report CPU model from libvirt capabilities /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:306 Expected error: <*errors.StatusError | 0xc420154c60>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1552 ------------------------------ Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running level=info timestamp=2018-07-17T03:36:46.471377Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:36:52 http: TLS handshake error from 10.129.0.1:39338: EOF 2018/07/17 03:37:02 http: TLS handshake error from 10.129.0.1:39344: EOF 2018/07/17 03:37:12 http: TLS handshake error from 10.129.0.1:39350: EOF 2018/07/17 03:37:22 http: TLS handshake error from 10.129.0.1:39356: EOF 2018/07/17 03:37:32 http: TLS handshake error from 10.129.0.1:39362: EOF 2018/07/17 03:37:42 http: TLS handshake error from 10.129.0.1:39368: EOF level=info timestamp=2018-07-17T03:37:46.365687Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:37:52 http: TLS handshake error from 10.129.0.1:39374: EOF 2018/07/17 03:38:02 http: TLS handshake error from 10.129.0.1:39380: EOF 2018/07/17 03:38:12 http: TLS handshake error from 10.129.0.1:39386: EOF level=info timestamp=2018-07-17T03:38:16.498018Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:38:22 http: TLS handshake error from 10.129.0.1:39392: EOF 2018/07/17 03:38:32 http: TLS handshake error from 10.129.0.1:39398: EOF 2018/07/17 03:38:42 http: TLS handshake error from 10.129.0.1:39404: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:36:23 http: TLS handshake error from 10.128.0.1:56620: EOF 2018/07/17 03:36:33 http: TLS handshake error from 10.128.0.1:56672: EOF 2018/07/17 03:36:43 http: TLS handshake error from 10.128.0.1:56722: EOF 2018/07/17 03:36:53 http: TLS handshake error from 10.128.0.1:56776: EOF 2018/07/17 03:37:03 http: TLS handshake error from 10.128.0.1:56826: EOF 2018/07/17 03:37:13 http: TLS handshake error from 10.128.0.1:56876: EOF level=info timestamp=2018-07-17T03:37:15.535583Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:37:23 http: TLS handshake error from 10.128.0.1:56930: EOF 2018/07/17 03:37:33 http: TLS handshake error from 10.128.0.1:56980: EOF 2018/07/17 03:37:43 http: TLS handshake error from 10.128.0.1:57030: EOF 2018/07/17 03:37:53 http: TLS handshake error from 10.128.0.1:57084: EOF 2018/07/17 03:38:03 http: TLS handshake error from 10.128.0.1:57134: EOF 2018/07/17 03:38:13 http: TLS handshake error from 10.128.0.1:57184: EOF 2018/07/17 03:38:23 http: TLS handshake error from 10.128.0.1:57238: EOF 2018/07/17 03:38:33 http: TLS handshake error from 10.128.0.1:57288: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:37:10.444879Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis98xg kind= uid=9df225e4-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:37:10.451355Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis98xg kind= uid=9df225e4-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:37:11.097902Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis98xg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis98xg" level=info timestamp=2018-07-17T03:37:11.111081Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis98xg\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmis98xg, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 9df225e4-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis98xg" level=info timestamp=2018-07-17T03:37:41.291073Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9gbqn kind= uid=b052bce5-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:37:41.292066Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9gbqn kind= uid=b052bce5-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:37:41.551719Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9gbqn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9gbqn" level=info timestamp=2018-07-17T03:37:41.920678Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9gbqn\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9gbqn, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b052bce5-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9gbqn" level=info timestamp=2018-07-17T03:38:12.106731Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihqqgz kind= uid=c2b12fcd-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:38:12.114878Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihqqgz kind= uid=c2b12fcd-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:38:12.447262Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihqqgz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihqqgz" level=info timestamp=2018-07-17T03:38:12.850200Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihqqgz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihqqgz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c2b12fcd-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihqqgz" level=info timestamp=2018-07-17T03:38:12.956444Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihqqgz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihqqgz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c2b12fcd-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihqqgz" level=info timestamp=2018-07-17T03:38:43.301773Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij4tcz kind= uid=d53bface-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:38:43.303881Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij4tcz kind= uid=d53bface-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:51.210231Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:31:51.211284Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:31:51.314852Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:31:51.333001Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.373902Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.515844Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646334Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:51.646538Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmij4tcz-t6hsn Pod phase: Pending • Failure [31.158 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 New VirtualMachineInstance with all supported drives /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:326 should have all the device nodes [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:349 Expected error: <*errors.StatusError | 0xc4208f63f0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:351 ------------------------------ Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running 2018/07/17 03:37:12 http: TLS handshake error from 10.129.0.1:39350: EOF 2018/07/17 03:37:22 http: TLS handshake error from 10.129.0.1:39356: EOF 2018/07/17 03:37:32 http: TLS handshake error from 10.129.0.1:39362: EOF 2018/07/17 03:37:42 http: TLS handshake error from 10.129.0.1:39368: EOF level=info timestamp=2018-07-17T03:37:46.365687Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:37:52 http: TLS handshake error from 10.129.0.1:39374: EOF 2018/07/17 03:38:02 http: TLS handshake error from 10.129.0.1:39380: EOF 2018/07/17 03:38:12 http: TLS handshake error from 10.129.0.1:39386: EOF level=info timestamp=2018-07-17T03:38:16.498018Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:38:22 http: TLS handshake error from 10.129.0.1:39392: EOF 2018/07/17 03:38:32 http: TLS handshake error from 10.129.0.1:39398: EOF 2018/07/17 03:38:42 http: TLS handshake error from 10.129.0.1:39404: EOF 2018/07/17 03:38:52 http: TLS handshake error from 10.129.0.1:39412: EOF 2018/07/17 03:39:02 http: TLS handshake error from 10.129.0.1:39418: EOF 2018/07/17 03:39:12 http: TLS handshake error from 10.129.0.1:39424: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:37:23 http: TLS handshake error from 10.128.0.1:56930: EOF 2018/07/17 03:37:33 http: TLS handshake error from 10.128.0.1:56980: EOF 2018/07/17 03:37:43 http: TLS handshake error from 10.128.0.1:57030: EOF 2018/07/17 03:37:53 http: TLS handshake error from 10.128.0.1:57084: EOF 2018/07/17 03:38:03 http: TLS handshake error from 10.128.0.1:57134: EOF 2018/07/17 03:38:13 http: TLS handshake error from 10.128.0.1:57184: EOF 2018/07/17 03:38:23 http: TLS handshake error from 10.128.0.1:57238: EOF 2018/07/17 03:38:33 http: TLS handshake error from 10.128.0.1:57288: EOF 2018/07/17 03:38:43 http: TLS handshake error from 10.128.0.1:57342: EOF level=info timestamp=2018-07-17T03:38:45.515888Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:38:53 http: TLS handshake error from 10.128.0.1:57396: EOF 2018/07/17 03:39:03 http: TLS handshake error from 10.128.0.1:57446: EOF level=info timestamp=2018-07-17T03:39:11.732799Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T03:39:11.801105Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:39:13 http: TLS handshake error from 10.128.0.1:57504: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:37:41.292066Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9gbqn kind= uid=b052bce5-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:37:41.551719Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9gbqn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9gbqn" level=info timestamp=2018-07-17T03:37:41.920678Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9gbqn\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9gbqn, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b052bce5-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9gbqn" level=info timestamp=2018-07-17T03:38:12.106731Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihqqgz kind= uid=c2b12fcd-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:38:12.114878Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihqqgz kind= uid=c2b12fcd-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:38:12.447262Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihqqgz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihqqgz" level=info timestamp=2018-07-17T03:38:12.850200Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihqqgz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihqqgz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c2b12fcd-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihqqgz" level=info timestamp=2018-07-17T03:38:12.956444Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihqqgz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihqqgz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c2b12fcd-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihqqgz" level=info timestamp=2018-07-17T03:38:43.301773Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij4tcz kind= uid=d53bface-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:38:43.303881Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij4tcz kind= uid=d53bface-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:38:43.910069Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmij4tcz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmij4tcz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d53bface-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmij4tcz" level=info timestamp=2018-07-17T03:39:14.112692Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwzrs kind= uid=e7a747f5-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:39:14.113143Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwzrs kind= uid=e7a747f5-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:39:14.396095Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifwzrs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifwzrs" level=info timestamp=2018-07-17T03:39:14.523096Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifwzrs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifwzrs" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:51.210231Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:31:51.211284Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:31:51.314852Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:31:51.333001Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.373902Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:31:51 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:31:51.515844Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646334Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:51.646538Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:31:51.646608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmifwzrs-d654f Pod phase: Pending • Failure [30.924 seconds] Health Monitoring /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:37 A VirtualMachineInstance with a watchdog device /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:56 should be shut down when the watchdog expires [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:57 Expected <*errors.StatusError | 0xc42075f680>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } to be nil /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:47 ------------------------------ STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running 2018/07/17 03:38:42 http: TLS handshake error from 10.129.0.1:39404: EOF 2018/07/17 03:38:52 http: TLS handshake error from 10.129.0.1:39412: EOF 2018/07/17 03:39:02 http: TLS handshake error from 10.129.0.1:39418: EOF 2018/07/17 03:39:12 http: TLS handshake error from 10.129.0.1:39424: EOF level=info timestamp=2018-07-17T03:39:16.554689Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:39:22 http: TLS handshake error from 10.129.0.1:39430: EOF 2018/07/17 03:39:32 http: TLS handshake error from 10.129.0.1:39436: EOF 2018/07/17 03:39:42 http: TLS handshake error from 10.129.0.1:39442: EOF 2018/07/17 03:39:52 http: TLS handshake error from 10.129.0.1:39448: EOF 2018/07/17 03:40:02 http: TLS handshake error from 10.129.0.1:39454: EOF 2018/07/17 03:40:12 http: TLS handshake error from 10.129.0.1:39460: EOF 2018/07/17 03:40:22 http: TLS handshake error from 10.129.0.1:39466: EOF 2018/07/17 03:40:32 http: TLS handshake error from 10.129.0.1:39472: EOF 2018/07/17 03:40:42 http: TLS handshake error from 10.129.0.1:39478: EOF level=info timestamp=2018-07-17T03:40:46.538449Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:39:03 http: TLS handshake error from 10.128.0.1:57446: EOF level=info timestamp=2018-07-17T03:39:11.732799Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T03:39:11.801105Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:39:13 http: TLS handshake error from 10.128.0.1:57504: EOF 2018/07/17 03:39:23 http: TLS handshake error from 10.128.0.1:57558: EOF 2018/07/17 03:39:33 http: TLS handshake error from 10.128.0.1:57608: EOF 2018/07/17 03:39:43 http: TLS handshake error from 10.128.0.1:57658: EOF level=info timestamp=2018-07-17T03:39:45.326418Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:39:53 http: TLS handshake error from 10.128.0.1:57712: EOF 2018/07/17 03:40:03 http: TLS handshake error from 10.128.0.1:57762: EOF 2018/07/17 03:40:13 http: TLS handshake error from 10.128.0.1:57814: EOF level=info timestamp=2018-07-17T03:40:15.442207Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:40:23 http: TLS handshake error from 10.128.0.1:57868: EOF 2018/07/17 03:40:33 http: TLS handshake error from 10.128.0.1:57918: EOF 2018/07/17 03:40:43 http: TLS handshake error from 10.128.0.1:57968: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:38:12.106731Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihqqgz kind= uid=c2b12fcd-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:38:12.114878Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihqqgz kind= uid=c2b12fcd-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:38:12.447262Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihqqgz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihqqgz" level=info timestamp=2018-07-17T03:38:12.850200Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihqqgz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihqqgz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c2b12fcd-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihqqgz" level=info timestamp=2018-07-17T03:38:12.956444Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihqqgz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihqqgz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c2b12fcd-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihqqgz" level=info timestamp=2018-07-17T03:38:43.301773Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij4tcz kind= uid=d53bface-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:38:43.303881Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij4tcz kind= uid=d53bface-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:38:43.910069Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmij4tcz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmij4tcz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d53bface-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmij4tcz" level=info timestamp=2018-07-17T03:39:14.112692Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwzrs kind= uid=e7a747f5-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:39:14.113143Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwzrs kind= uid=e7a747f5-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:39:14.396095Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifwzrs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifwzrs" level=info timestamp=2018-07-17T03:39:14.523096Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifwzrs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifwzrs" level=info timestamp=2018-07-17T03:39:44.985884Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:39:44.986793Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:39:45.189753Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5nl6h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5nl6h" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:31:54.210407Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.210589Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind= uid=cbde5837-8971-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:31:54.280162Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:31:54.280290Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2r2bl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:40:01.854869Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:40:02.825323Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-17T03:40:02.826478Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmi5nl6h kind=Domain uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Domain is in state Paused reason Unknown" level=info timestamp=2018-07-17T03:40:03.597119Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:40:03.597650Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmi5nl6h kind=Domain uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-17T03:40:03.677216Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:40:03.714886Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:40:03.715382Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:40:03.804561Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:40:03.827885Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:40:03.844463Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmi5nl6h-g97xk Pod phase: Running level=info timestamp=2018-07-17T03:40:02.022114Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-17T03:40:02.795380Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-17T03:40:02.801720Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 631939b2-81fb-49ca-86a6-b23659e8baea" level=info timestamp=2018-07-17T03:40:02.808418Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-17T03:40:02.828355Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:40:03.412423Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-17T03:40:03.509333Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Domain started." level=info timestamp=2018-07-17T03:40:03.514187Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T03:40:03.515292Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T03:40:03.606865Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:40:03.607219Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-17T03:40:03.654882Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T03:40:03.684857Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:40:03.826194Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 631939b2-81fb-49ca-86a6-b23659e8baea: 149" level=info timestamp=2018-07-17T03:40:03.842648Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Synced vmi" • Failure [92.816 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 92.050s. Expected error: <*errors.StatusError | 0xc4209345a0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 ------------------------------ STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running 2018/07/17 03:40:12 http: TLS handshake error from 10.129.0.1:39460: EOF 2018/07/17 03:40:22 http: TLS handshake error from 10.129.0.1:39466: EOF 2018/07/17 03:40:32 http: TLS handshake error from 10.129.0.1:39472: EOF 2018/07/17 03:40:42 http: TLS handshake error from 10.129.0.1:39478: EOF level=info timestamp=2018-07-17T03:40:46.538449Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:40:52 http: TLS handshake error from 10.129.0.1:39484: EOF 2018/07/17 03:41:02 http: TLS handshake error from 10.129.0.1:39490: EOF 2018/07/17 03:41:12 http: TLS handshake error from 10.129.0.1:39496: EOF 2018/07/17 03:41:22 http: TLS handshake error from 10.129.0.1:39502: EOF 2018/07/17 03:41:32 http: TLS handshake error from 10.129.0.1:39508: EOF 2018/07/17 03:41:42 http: TLS handshake error from 10.129.0.1:39514: EOF 2018/07/17 03:41:52 http: TLS handshake error from 10.129.0.1:39520: EOF 2018/07/17 03:42:02 http: TLS handshake error from 10.129.0.1:39526: EOF 2018/07/17 03:42:12 http: TLS handshake error from 10.129.0.1:39532: EOF level=info timestamp=2018-07-17T03:42:16.350705Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-lrr44 Pod phase: Running level=info timestamp=2018-07-17T03:40:15.442207Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:40:23 http: TLS handshake error from 10.128.0.1:57868: EOF 2018/07/17 03:40:33 http: TLS handshake error from 10.128.0.1:57918: EOF 2018/07/17 03:40:43 http: TLS handshake error from 10.128.0.1:57968: EOF 2018/07/17 03:40:53 http: TLS handshake error from 10.128.0.1:58024: EOF 2018/07/17 03:41:03 http: TLS handshake error from 10.128.0.1:58080: EOF 2018/07/17 03:41:13 http: TLS handshake error from 10.128.0.1:58130: EOF level=info timestamp=2018-07-17T03:41:15.487691Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:41:23 http: TLS handshake error from 10.128.0.1:58184: EOF 2018/07/17 03:41:33 http: TLS handshake error from 10.128.0.1:58234: EOF 2018/07/17 03:41:43 http: TLS handshake error from 10.128.0.1:58284: EOF level=info timestamp=2018-07-17T03:41:45.497019Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:41:53 http: TLS handshake error from 10.128.0.1:58338: EOF 2018/07/17 03:42:03 http: TLS handshake error from 10.128.0.1:58388: EOF 2018/07/17 03:42:13 http: TLS handshake error from 10.128.0.1:58438: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:38:12.850200Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihqqgz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihqqgz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c2b12fcd-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihqqgz" level=info timestamp=2018-07-17T03:38:12.956444Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihqqgz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihqqgz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c2b12fcd-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihqqgz" level=info timestamp=2018-07-17T03:38:43.301773Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij4tcz kind= uid=d53bface-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:38:43.303881Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij4tcz kind= uid=d53bface-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:38:43.910069Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmij4tcz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmij4tcz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d53bface-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmij4tcz" level=info timestamp=2018-07-17T03:39:14.112692Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwzrs kind= uid=e7a747f5-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:39:14.113143Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwzrs kind= uid=e7a747f5-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:39:14.396095Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifwzrs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifwzrs" level=info timestamp=2018-07-17T03:39:14.523096Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifwzrs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifwzrs" level=info timestamp=2018-07-17T03:39:44.985884Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:39:44.986793Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:39:45.189753Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5nl6h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5nl6h" level=info timestamp=2018-07-17T03:41:17.911129Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:41:17.911988Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:41:18.188954Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih2l4v\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmih2l4v" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:40:48.899029Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-17T03:40:48.899589Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmi5nl6h kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-17T03:40:48.917438Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi5nl6h kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:40:48.917723Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi5nl6h kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:41:35.150343Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:41:35.839229Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-17T03:41:35.841457Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmih2l4v kind=Domain uid=3174422e-8973-11e8-a112-525500d15501 msg="Domain is in state Paused reason Unknown" level=info timestamp=2018-07-17T03:41:36.543533Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:41:36.552362Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:41:36.554106Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmih2l4v kind=Domain uid=3174422e-8973-11e8-a112-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-17T03:41:36.557903Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:41:36.578034Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:41:36.620055Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:41:36.624087Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:41:36.633993Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmih2l4v-55b24 Pod phase: Running level=info timestamp=2018-07-17T03:41:35.284322Z pos=manager.go:157 component=virt-launcher namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Domain defined." level=info timestamp=2018-07-17T03:41:35.814460Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-17T03:41:35.840471Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:41:36.168036Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 6ce37463-2a8a-4cc4-af24-0ce433515eea" level=info timestamp=2018-07-17T03:41:36.223920Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-17T03:41:36.489002Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-17T03:41:36.506066Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Domain started." level=info timestamp=2018-07-17T03:41:36.528642Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T03:41:36.533246Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T03:41:36.555091Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:41:36.555652Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-17T03:41:36.574902Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T03:41:36.586268Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:41:36.632890Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T03:41:37.230625Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 6ce37463-2a8a-4cc4-af24-0ce433515eea: 148" • Failure [93.025 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 92.120s. Expected error: <*errors.StatusError | 0xc4208f61b0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 ------------------------------ STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running 2018/07/17 03:42:02 http: TLS handshake error from 10.129.0.1:39526: EOF 2018/07/17 03:42:12 http: TLS handshake error from 10.129.0.1:39532: EOF level=info timestamp=2018-07-17T03:42:16.350705Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:42:22 http: TLS handshake error from 10.129.0.1:39540: EOF 2018/07/17 03:42:32 http: TLS handshake error from 10.129.0.1:39546: EOF 2018/07/17 03:42:42 http: TLS handshake error from 10.129.0.1:39552: EOF level=info timestamp=2018-07-17T03:42:46.424469Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:42:52 http: TLS handshake error from 10.129.0.1:39558: EOF 2018/07/17 03:43:02 http: TLS handshake error from 10.129.0.1:39564: EOF 2018/07/17 03:43:12 http: TLS handshake error from 10.129.0.1:39570: EOF level=info timestamp=2018-07-17T03:43:16.575287Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:43:22 http: TLS handshake error from 10.129.0.1:39576: EOF 2018/07/17 03:43:32 http: TLS handshake error from 10.129.0.1:39582: EOF 2018/07/17 03:43:42 http: TLS handshake error from 10.129.0.1:39588: EOF 2018/07/17 03:43:52 http: TLS handshake error from 10.129.0.1:39594: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:41:43 http: TLS handshake error from 10.128.0.1:58284: EOF level=info timestamp=2018-07-17T03:41:45.497019Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:41:53 http: TLS handshake error from 10.128.0.1:58338: EOF 2018/07/17 03:42:03 http: TLS handshake error from 10.128.0.1:58388: EOF 2018/07/17 03:42:13 http: TLS handshake error from 10.128.0.1:58438: EOF 2018/07/17 03:42:23 http: TLS handshake error from 10.128.0.1:58492: EOF 2018/07/17 03:42:33 http: TLS handshake error from 10.128.0.1:58542: EOF 2018/07/17 03:42:43 http: TLS handshake error from 10.128.0.1:58592: EOF 2018/07/17 03:42:53 http: TLS handshake error from 10.128.0.1:58646: EOF 2018/07/17 03:43:03 http: TLS handshake error from 10.128.0.1:58696: EOF 2018/07/17 03:43:13 http: TLS handshake error from 10.128.0.1:58746: EOF 2018/07/17 03:43:23 http: TLS handshake error from 10.128.0.1:58800: EOF 2018/07/17 03:43:33 http: TLS handshake error from 10.128.0.1:58850: EOF 2018/07/17 03:43:43 http: TLS handshake error from 10.128.0.1:58902: EOF level=info timestamp=2018-07-17T03:43:45.453367Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:38:43.301773Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij4tcz kind= uid=d53bface-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:38:43.303881Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmij4tcz kind= uid=d53bface-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:38:43.910069Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmij4tcz\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmij4tcz, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d53bface-8972-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmij4tcz" level=info timestamp=2018-07-17T03:39:14.112692Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwzrs kind= uid=e7a747f5-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:39:14.113143Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwzrs kind= uid=e7a747f5-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:39:14.396095Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifwzrs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifwzrs" level=info timestamp=2018-07-17T03:39:14.523096Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifwzrs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifwzrs" level=info timestamp=2018-07-17T03:39:44.985884Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:39:44.986793Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:39:45.189753Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5nl6h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5nl6h" level=info timestamp=2018-07-17T03:41:17.911129Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:41:17.911988Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:41:18.188954Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih2l4v\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmih2l4v" level=info timestamp=2018-07-17T03:42:50.921800Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:42:50.927567Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:42:21.905599Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmih2l4v kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-17T03:42:21.915384Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmih2l4v kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:42:21.915672Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmih2l4v kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:42:21.926329Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-17T03:43:07.172395Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:43:08.023258Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-17T03:43:08.023725Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmi6rscj kind=Domain uid=68df81d5-8973-11e8-a112-525500d15501 msg="Domain is in state Paused reason Unknown" level=info timestamp=2018-07-17T03:43:09.188587Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:43:09.189235Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmi6rscj kind=Domain uid=68df81d5-8973-11e8-a112-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-17T03:43:09.237059Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:43:09.239098Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:43:09.253276Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:43:09.275310Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:43:09.275671Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:43:09.281277Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmi6rscj-l9lc2 Pod phase: Running level=info timestamp=2018-07-17T03:43:07.342053Z pos=manager.go:157 component=virt-launcher namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Domain defined." level=info timestamp=2018-07-17T03:43:08.003048Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-17T03:43:08.020792Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID b725442b-a7e9-4a6c-8715-e2abd8018c56" level=info timestamp=2018-07-17T03:43:08.023704Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-17T03:43:08.026469Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:43:09.038006Z pos=monitor.go:222 component=virt-launcher msg="Found PID for b725442b-a7e9-4a6c-8715-e2abd8018c56: 147" level=info timestamp=2018-07-17T03:43:09.110662Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-17T03:43:09.162241Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Domain started." level=info timestamp=2018-07-17T03:43:09.168133Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T03:43:09.177885Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T03:43:09.198450Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:43:09.198968Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-17T03:43:09.245153Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T03:43:09.255493Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:43:09.280633Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Synced vmi" • Failure [92.944 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 92.038s. Expected error: <*errors.StatusError | 0xc420934990>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 ------------------------------ STEP: Starting and stopping the VirtualMachineInstance number of times STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running 2018/07/17 03:43:22 http: TLS handshake error from 10.129.0.1:39576: EOF 2018/07/17 03:43:32 http: TLS handshake error from 10.129.0.1:39582: EOF 2018/07/17 03:43:42 http: TLS handshake error from 10.129.0.1:39588: EOF 2018/07/17 03:43:52 http: TLS handshake error from 10.129.0.1:39594: EOF 2018/07/17 03:44:02 http: TLS handshake error from 10.129.0.1:39600: EOF 2018/07/17 03:44:12 http: TLS handshake error from 10.129.0.1:39606: EOF level=info timestamp=2018-07-17T03:44:16.468140Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:44:22 http: TLS handshake error from 10.129.0.1:39612: EOF 2018/07/17 03:44:32 http: TLS handshake error from 10.129.0.1:39618: EOF 2018/07/17 03:44:42 http: TLS handshake error from 10.129.0.1:39624: EOF level=info timestamp=2018-07-17T03:44:46.447424Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:44:52 http: TLS handshake error from 10.129.0.1:39630: EOF 2018/07/17 03:45:02 http: TLS handshake error from 10.129.0.1:39636: EOF 2018/07/17 03:45:12 http: TLS handshake error from 10.129.0.1:39642: EOF 2018/07/17 03:45:22 http: TLS handshake error from 10.129.0.1:39648: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:43:23 http: TLS handshake error from 10.128.0.1:58800: EOF 2018/07/17 03:43:33 http: TLS handshake error from 10.128.0.1:58850: EOF 2018/07/17 03:43:43 http: TLS handshake error from 10.128.0.1:58902: EOF level=info timestamp=2018-07-17T03:43:45.453367Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:43:53 http: TLS handshake error from 10.128.0.1:58956: EOF 2018/07/17 03:44:03 http: TLS handshake error from 10.128.0.1:59006: EOF 2018/07/17 03:44:13 http: TLS handshake error from 10.128.0.1:59056: EOF 2018/07/17 03:44:23 http: TLS handshake error from 10.128.0.1:59110: EOF 2018/07/17 03:44:33 http: TLS handshake error from 10.128.0.1:59160: EOF 2018/07/17 03:44:43 http: TLS handshake error from 10.128.0.1:59210: EOF 2018/07/17 03:44:53 http: TLS handshake error from 10.128.0.1:59264: EOF 2018/07/17 03:45:03 http: TLS handshake error from 10.128.0.1:59314: EOF 2018/07/17 03:45:13 http: TLS handshake error from 10.128.0.1:59364: EOF level=info timestamp=2018-07-17T03:45:15.586529Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:45:23 http: TLS handshake error from 10.128.0.1:59418: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:39:14.113143Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifwzrs kind= uid=e7a747f5-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:39:14.396095Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifwzrs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifwzrs" level=info timestamp=2018-07-17T03:39:14.523096Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifwzrs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifwzrs" level=info timestamp=2018-07-17T03:39:44.985884Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:39:44.986793Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:39:45.189753Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5nl6h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5nl6h" level=info timestamp=2018-07-17T03:41:17.911129Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:41:17.911988Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:41:18.188954Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih2l4v\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmih2l4v" level=info timestamp=2018-07-17T03:42:50.921800Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:42:50.927567Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:44:23.749797Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:44:23.759381Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:44:24.014600Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqqlg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqqlg" level=info timestamp=2018-07-17T03:44:24.053491Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqqlg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqqlg" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:44:42.110640Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-17T03:44:42.119502Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind=Domain uid=a03908b9-8973-11e8-a112-525500d15501 msg="Domain is in state Paused reason Unknown" level=info timestamp=2018-07-17T03:44:42.897230Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:44:42.903959Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:44:42.914632Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:44:42.915723Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind=Domain uid=a03908b9-8973-11e8-a112-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-17T03:44:42.937536Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:44:42.994563Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:44:42.996912Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="No update processing required" level=error timestamp=2018-07-17T03:44:43.064892Z pos=vm.go:404 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqqlg\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-17T03:44:43.065241Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqqlg\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvminqqlg" level=info timestamp=2018-07-17T03:44:43.065508Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:44:43.073853Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:44:43.074356Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:44:43.085089Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvminqqlg-x9wwq Pod phase: Running level=info timestamp=2018-07-17T03:44:42.089971Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-17T03:44:42.117671Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:44:42.300703Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 42c4751f-532f-497a-8222-4f8985d9cc3f" level=info timestamp=2018-07-17T03:44:42.325568Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-17T03:44:42.866222Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-17T03:44:42.883705Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Domain started." level=info timestamp=2018-07-17T03:44:42.889041Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T03:44:42.890030Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T03:44:42.917482Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:44:42.918014Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-17T03:44:42.933518Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T03:44:42.944408Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:44:43.072709Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T03:44:43.081371Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T03:44:43.339263Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 42c4751f-532f-497a-8222-4f8985d9cc3f: 151" • Failure [92.719 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 92.064s. Expected error: <*errors.StatusError | 0xc420154750>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 ------------------------------ STEP: Starting and stopping the VirtualMachineInstance number of times STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running 2018/07/17 03:44:52 http: TLS handshake error from 10.129.0.1:39630: EOF 2018/07/17 03:45:02 http: TLS handshake error from 10.129.0.1:39636: EOF 2018/07/17 03:45:12 http: TLS handshake error from 10.129.0.1:39642: EOF 2018/07/17 03:45:22 http: TLS handshake error from 10.129.0.1:39648: EOF 2018/07/17 03:45:32 http: TLS handshake error from 10.129.0.1:39654: EOF 2018/07/17 03:45:42 http: TLS handshake error from 10.129.0.1:39660: EOF 2018/07/17 03:45:52 http: TLS handshake error from 10.129.0.1:39668: EOF 2018/07/17 03:46:02 http: TLS handshake error from 10.129.0.1:39674: EOF 2018/07/17 03:46:12 http: TLS handshake error from 10.129.0.1:39680: EOF level=info timestamp=2018-07-17T03:46:16.560557Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:46:22 http: TLS handshake error from 10.129.0.1:39686: EOF 2018/07/17 03:46:32 http: TLS handshake error from 10.129.0.1:39692: EOF 2018/07/17 03:46:42 http: TLS handshake error from 10.129.0.1:39698: EOF level=info timestamp=2018-07-17T03:46:46.511651Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:46:52 http: TLS handshake error from 10.129.0.1:39704: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:44:53 http: TLS handshake error from 10.128.0.1:59264: EOF 2018/07/17 03:45:03 http: TLS handshake error from 10.128.0.1:59314: EOF 2018/07/17 03:45:13 http: TLS handshake error from 10.128.0.1:59364: EOF level=info timestamp=2018-07-17T03:45:15.586529Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:45:23 http: TLS handshake error from 10.128.0.1:59418: EOF 2018/07/17 03:45:33 http: TLS handshake error from 10.128.0.1:59468: EOF 2018/07/17 03:45:43 http: TLS handshake error from 10.128.0.1:59518: EOF level=info timestamp=2018-07-17T03:45:45.526082Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:45:53 http: TLS handshake error from 10.128.0.1:59572: EOF 2018/07/17 03:46:03 http: TLS handshake error from 10.128.0.1:59622: EOF 2018/07/17 03:46:13 http: TLS handshake error from 10.128.0.1:59672: EOF 2018/07/17 03:46:23 http: TLS handshake error from 10.128.0.1:59726: EOF 2018/07/17 03:46:33 http: TLS handshake error from 10.128.0.1:59776: EOF 2018/07/17 03:46:43 http: TLS handshake error from 10.128.0.1:59826: EOF 2018/07/17 03:46:53 http: TLS handshake error from 10.128.0.1:59880: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:39:14.523096Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifwzrs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifwzrs" level=info timestamp=2018-07-17T03:39:44.985884Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:39:44.986793Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:39:45.189753Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5nl6h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5nl6h" level=info timestamp=2018-07-17T03:41:17.911129Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:41:17.911988Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:41:18.188954Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih2l4v\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmih2l4v" level=info timestamp=2018-07-17T03:42:50.921800Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:42:50.927567Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:44:23.749797Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:44:23.759381Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:44:24.014600Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqqlg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqqlg" level=info timestamp=2018-07-17T03:44:24.053491Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqqlg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqqlg" level=info timestamp=2018-07-17T03:45:56.540253Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi98sxj kind= uid=d78507bf-8973-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:45:56.541900Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi98sxj kind= uid=d78507bf-8973-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:45:27.989662Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:45:27.990502Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:45:27.990890Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:45:27.991069Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvminqqlg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:46:12.829432Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi98sxj kind= uid=d78507bf-8973-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:46:13.767369Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-17T03:46:13.767924Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmi98sxj kind=Domain uid=d78507bf-8973-11e8-a112-525500d15501 msg="Domain is in state Paused reason Unknown" level=info timestamp=2018-07-17T03:46:14.707518Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:46:14.720195Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmi98sxj kind=Domain uid=d78507bf-8973-11e8-a112-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-17T03:46:14.761395Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:46:14.782585Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi98sxj kind= uid=d78507bf-8973-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:46:14.782746Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi98sxj kind= uid=d78507bf-8973-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:46:14.835476Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi98sxj kind= uid=d78507bf-8973-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:46:14.839025Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi98sxj kind= uid=d78507bf-8973-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:46:14.848411Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi98sxj kind= uid=d78507bf-8973-11e8-a112-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmi98sxj-h2t5z Pod phase: Running level=info timestamp=2018-07-17T03:46:13.090883Z pos=manager.go:157 component=virt-launcher namespace=kubevirt-test-default name=testvmi98sxj kind= uid=d78507bf-8973-11e8-a112-525500d15501 msg="Domain defined." level=info timestamp=2018-07-17T03:46:13.754435Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-17T03:46:13.765632Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 4244e948-81cd-41dd-ad04-9ec4ce01d511" level=info timestamp=2018-07-17T03:46:13.766098Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-17T03:46:13.768401Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:46:14.615968Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-17T03:46:14.647375Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmi98sxj kind= uid=d78507bf-8973-11e8-a112-525500d15501 msg="Domain started." level=info timestamp=2018-07-17T03:46:14.652088Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi98sxj kind= uid=d78507bf-8973-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T03:46:14.654911Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T03:46:14.721234Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:46:14.721833Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-17T03:46:14.756200Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T03:46:14.762416Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:46:14.774860Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 4244e948-81cd-41dd-ad04-9ec4ce01d511: 146" level=info timestamp=2018-07-17T03:46:14.844716Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi98sxj kind= uid=d78507bf-8973-11e8-a112-525500d15501 msg="Synced vmi" • Failure [92.851 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:113 should create a writeable emptyDisk with the right capacity [It] /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:115 Timed out after 92.074s. Expected error: <*errors.StatusError | 0xc4208f61b0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 ------------------------------ STEP: Starting a VirtualMachineInstance Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running level=info timestamp=2018-07-17T03:47:46.586445Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T03:47:47.355600Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:47:52 http: TLS handshake error from 10.129.0.1:39740: EOF level=info timestamp=2018-07-17T03:47:54.290427Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:47:54.383440Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:47:57.551009Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:48:02 http: TLS handshake error from 10.129.0.1:39746: EOF level=info timestamp=2018-07-17T03:48:08.038508Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:48:10.779719Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:48:12 http: TLS handshake error from 10.129.0.1:39752: EOF level=info timestamp=2018-07-17T03:48:18.216574Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:48:22 http: TLS handshake error from 10.129.0.1:39758: EOF level=info timestamp=2018-07-17T03:48:24.555880Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:48:24.589223Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:48:28.537677Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:46:13 http: TLS handshake error from 10.128.0.1:59672: EOF 2018/07/17 03:46:23 http: TLS handshake error from 10.128.0.1:59726: EOF 2018/07/17 03:46:33 http: TLS handshake error from 10.128.0.1:59776: EOF 2018/07/17 03:46:43 http: TLS handshake error from 10.128.0.1:59826: EOF 2018/07/17 03:46:53 http: TLS handshake error from 10.128.0.1:59880: EOF 2018/07/17 03:47:03 http: TLS handshake error from 10.128.0.1:59930: EOF 2018/07/17 03:47:13 http: TLS handshake error from 10.128.0.1:59984: EOF 2018/07/17 03:47:23 http: TLS handshake error from 10.128.0.1:60038: EOF 2018/07/17 03:47:33 http: TLS handshake error from 10.128.0.1:60090: EOF 2018/07/17 03:47:43 http: TLS handshake error from 10.128.0.1:60140: EOF 2018/07/17 03:47:53 http: TLS handshake error from 10.128.0.1:60194: EOF 2018/07/17 03:48:03 http: TLS handshake error from 10.128.0.1:60244: EOF 2018/07/17 03:48:13 http: TLS handshake error from 10.128.0.1:60294: EOF level=info timestamp=2018-07-17T03:48:15.651895Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:48:23 http: TLS handshake error from 10.128.0.1:60348: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:39:44.986793Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5nl6h kind= uid=fa11d0a1-8972-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:39:45.189753Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5nl6h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5nl6h" level=info timestamp=2018-07-17T03:41:17.911129Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:41:17.911988Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih2l4v kind= uid=3174422e-8973-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:41:18.188954Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih2l4v\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmih2l4v" level=info timestamp=2018-07-17T03:42:50.921800Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:42:50.927567Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6rscj kind= uid=68df81d5-8973-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:44:23.749797Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:44:23.759381Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqqlg kind= uid=a03908b9-8973-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:44:24.014600Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqqlg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqqlg" level=info timestamp=2018-07-17T03:44:24.053491Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqqlg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqqlg" level=info timestamp=2018-07-17T03:45:56.540253Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi98sxj kind= uid=d78507bf-8973-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:45:56.541900Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi98sxj kind= uid=d78507bf-8973-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:47:29.291492Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig25cl kind= uid=0ed255f6-8974-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:47:29.294190Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig25cl kind= uid=0ed255f6-8974-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:47:00.397128Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmi98sxj kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-17T03:47:00.397539Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi98sxj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:47:00.402091Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi98sxj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:47:00.406574Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-17T03:47:46.104504Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmig25cl kind= uid=0ed255f6-8974-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:47:46.877484Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type ADDED" level=info timestamp=2018-07-17T03:47:46.889165Z pos=vm.go:657 component=virt-handler namespace=kubevirt-test-default name=testvmig25cl kind=Domain uid=0ed255f6-8974-11e8-a112-525500d15501 msg="Domain is in state Paused reason Unknown" level=info timestamp=2018-07-17T03:47:47.857903Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:47:47.858360Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmig25cl kind=Domain uid=0ed255f6-8974-11e8-a112-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-17T03:47:48.022524Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:47:48.038734Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmig25cl kind= uid=0ed255f6-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:47:48.041831Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmig25cl kind= uid=0ed255f6-8974-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T03:47:48.102091Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmig25cl kind= uid=0ed255f6-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:47:48.102674Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmig25cl kind= uid=0ed255f6-8974-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:47:48.109849Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmig25cl kind= uid=0ed255f6-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvmig25cl-vpstm Pod phase: Running level=info timestamp=2018-07-17T03:47:46.370972Z pos=manager.go:157 component=virt-launcher namespace=kubevirt-test-default name=testvmig25cl kind= uid=0ed255f6-8974-11e8-a112-525500d15501 msg="Domain defined." level=info timestamp=2018-07-17T03:47:46.858441Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-17T03:47:46.881009Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:47:47.080093Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID eae48120-a7d2-4d0e-8c12-3e28935f32f7" level=info timestamp=2018-07-17T03:47:47.081053Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-17T03:47:47.763099Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-17T03:47:47.827474Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmig25cl kind= uid=0ed255f6-8974-11e8-a112-525500d15501 msg="Domain started." level=info timestamp=2018-07-17T03:47:47.832610Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmig25cl kind= uid=0ed255f6-8974-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T03:47:47.848412Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T03:47:47.859131Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:47:47.859554Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-17T03:47:47.896685Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T03:47:48.031828Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T03:47:48.086917Z pos=monitor.go:222 component=virt-launcher msg="Found PID for eae48120-a7d2-4d0e-8c12-3e28935f32f7: 146" level=info timestamp=2018-07-17T03:47:48.109226Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmig25cl kind= uid=0ed255f6-8974-11e8-a112-525500d15501 msg="Synced vmi" • Failure [90.736 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined and a specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:163 should create a writeable emptyDisk with the specified serial number [It] /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:165 Timed out after 90.001s. Expected error: <*errors.StatusError | 0xc420154a20>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "virtualmachineinstances.kubevirt.io \"testvmig25cl\" already exists", Reason: "AlreadyExists", Details: { Name: "testvmig25cl", Group: "kubevirt.io", Kind: "virtualmachineinstances", UID: "", Causes: nil, RetryAfterSeconds: 0, }, Code: 409, }, } virtualmachineinstances.kubevirt.io "testvmig25cl" already exists not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 ------------------------------ STEP: Starting a VirtualMachineInstance • [SLOW TEST:31.870 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should be successfully started /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:207 ------------------------------ • [SLOW TEST:72.302 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should not persist data /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:218 ------------------------------ • [SLOW TEST:117.691 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With VirtualMachineInstance with two PVCs /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:266 should start vmi multiple times /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:278 ------------------------------ • ------------------------------ • [SLOW TEST:20.587 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:20.430 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given an vm /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:20.752 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi preset /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:20.415 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi replica set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ ••••••••••••• ------------------------------ • [SLOW TEST:19.417 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should update VirtualMachine once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:195 ------------------------------ •• Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running level=info timestamp=2018-07-17T03:53:46.379399Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:53:52 http: TLS handshake error from 10.129.0.1:39962: EOF level=info timestamp=2018-07-17T03:53:55.843513Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:53:58.131067Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:53:58.140180Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:54:02 http: TLS handshake error from 10.129.0.1:39968: EOF level=info timestamp=2018-07-17T03:54:05.998257Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:54:12 http: TLS handshake error from 10.129.0.1:39974: EOF level=info timestamp=2018-07-17T03:54:13.795966Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:54:16.094264Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:54:22 http: TLS handshake error from 10.129.0.1:39980: EOF level=info timestamp=2018-07-17T03:54:26.191578Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:54:28.378098Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:54:28.380114Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:54:32 http: TLS handshake error from 10.129.0.1:39986: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:52:53 http: TLS handshake error from 10.128.0.1:33548: EOF 2018/07/17 03:53:03 http: TLS handshake error from 10.128.0.1:33598: EOF 2018/07/17 03:53:13 http: TLS handshake error from 10.128.0.1:33650: EOF level=info timestamp=2018-07-17T03:53:15.579774Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:53:23 http: TLS handshake error from 10.128.0.1:33704: EOF 2018/07/17 03:53:33 http: TLS handshake error from 10.128.0.1:33754: EOF level=info timestamp=2018-07-17T03:53:34.538097Z pos=validating-webhook.go:84 component=virt-api msg="rejected vmi admission" level=info timestamp=2018-07-17T03:53:40.019335Z pos=validating-webhook.go:84 component=virt-api msg="rejected vmi admission" 2018/07/17 03:53:43 http: TLS handshake error from 10.128.0.1:33804: EOF 2018/07/17 03:53:53 http: TLS handshake error from 10.128.0.1:33858: EOF 2018/07/17 03:54:03 http: TLS handshake error from 10.128.0.1:1024: EOF 2018/07/17 03:54:13 http: TLS handshake error from 10.128.0.1:33958: EOF level=info timestamp=2018-07-17T03:54:15.636148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:54:23 http: TLS handshake error from 10.128.0.1:34012: EOF 2018/07/17 03:54:33 http: TLS handshake error from 10.128.0.1:34062: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:54:02.206944Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-17T03:54:02.211930Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:54:02.212186Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-17T03:54:02.369310Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigkwdd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigkwdd" level=info timestamp=2018-07-17T03:54:02.372963Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:54:02.375836Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-17T03:54:02.476121Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:54:02.479711Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-17T03:54:03.264204Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:54:03.264480Z pos=controller_ref_manager.go:291 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad868a8-8975-11e8-a112-525500d15501 msg="patching vmi to remove its controllerRef to kubevirt.io/v1alpha2/VirtualMachine:testvmigkwdd" level=info timestamp=2018-07-17T03:54:03.320353Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:54:04.585345Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigkwdd\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmigkwdd, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0ad868a8-8975-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigkwdd" level=info timestamp=2018-07-17T03:54:34.688999Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv4tpp kind= uid=0c622799-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:54:34.689492Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv4tpp kind= uid=0c622799-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-17T03:54:34.689532Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:54:00.175103Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.175757Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:54:00.181235Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.607950Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-17T03:54:00.608178Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-17T03:54:00.615014Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmimrpft" level=info timestamp=2018-07-17T03:54:00.859521Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.860529Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-17T03:54:00.861223Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-17T03:54:00.862001Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-17T03:54:00.862346Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-17T03:54:00.861759Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.865243Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:54:00.865481Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.869413Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" ------------------------------ • Failure [30.822 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if it gets deleted [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:245 Expected error: <*errors.StatusError | 0xc42075f170>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:127 ------------------------------ Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running level=info timestamp=2018-07-17T03:54:28.378098Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:54:28.380114Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:54:32 http: TLS handshake error from 10.129.0.1:39986: EOF level=info timestamp=2018-07-17T03:54:36.314223Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:54:37.437892Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T03:54:37.442494Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/17 03:54:42 http: TLS handshake error from 10.129.0.1:39992: EOF level=info timestamp=2018-07-17T03:54:44.082718Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:54:46.619824Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:54:46.636270Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:54:52 http: TLS handshake error from 10.129.0.1:39998: EOF level=info timestamp=2018-07-17T03:54:56.914463Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:54:58.685608Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:54:58.692891Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:55:02 http: TLS handshake error from 10.129.0.1:40004: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running level=info timestamp=2018-07-17T03:53:15.579774Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:53:23 http: TLS handshake error from 10.128.0.1:33704: EOF 2018/07/17 03:53:33 http: TLS handshake error from 10.128.0.1:33754: EOF level=info timestamp=2018-07-17T03:53:34.538097Z pos=validating-webhook.go:84 component=virt-api msg="rejected vmi admission" level=info timestamp=2018-07-17T03:53:40.019335Z pos=validating-webhook.go:84 component=virt-api msg="rejected vmi admission" 2018/07/17 03:53:43 http: TLS handshake error from 10.128.0.1:33804: EOF 2018/07/17 03:53:53 http: TLS handshake error from 10.128.0.1:33858: EOF 2018/07/17 03:54:03 http: TLS handshake error from 10.128.0.1:1024: EOF 2018/07/17 03:54:13 http: TLS handshake error from 10.128.0.1:33958: EOF level=info timestamp=2018-07-17T03:54:15.636148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:54:23 http: TLS handshake error from 10.128.0.1:34012: EOF 2018/07/17 03:54:33 http: TLS handshake error from 10.128.0.1:34062: EOF 2018/07/17 03:54:43 http: TLS handshake error from 10.128.0.1:34112: EOF 2018/07/17 03:54:53 http: TLS handshake error from 10.128.0.1:34166: EOF 2018/07/17 03:55:03 http: TLS handshake error from 10.128.0.1:34220: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:54:02.369310Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigkwdd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigkwdd" level=info timestamp=2018-07-17T03:54:02.372963Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:54:02.375836Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-17T03:54:02.476121Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:54:02.479711Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-17T03:54:03.264204Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:54:03.264480Z pos=controller_ref_manager.go:291 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad868a8-8975-11e8-a112-525500d15501 msg="patching vmi to remove its controllerRef to kubevirt.io/v1alpha2/VirtualMachine:testvmigkwdd" level=info timestamp=2018-07-17T03:54:03.320353Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigkwdd kind= uid=0ad550e6-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:54:04.585345Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigkwdd\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmigkwdd, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0ad868a8-8975-11e8-a112-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigkwdd" level=info timestamp=2018-07-17T03:54:34.688999Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv4tpp kind= uid=0c622799-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:54:34.689492Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv4tpp kind= uid=0c622799-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-17T03:54:34.689532Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-17T03:55:05.403459Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=1ead666e-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:55:05.406164Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=1ead666e-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-17T03:55:05.406775Z pos=vm.go:377 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=1ead666e-8975-11e8-a112-525500d15501 msg="Setting stabile UUID 'd5836efb-ef94-5d20-ae3a-7d55da5d0312' (was '')" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:54:00.175103Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.175757Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:54:00.181235Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.607950Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-17T03:54:00.608178Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-17T03:54:00.615014Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmimrpft" level=info timestamp=2018-07-17T03:54:00.859521Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.860529Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-17T03:54:00.861223Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-17T03:54:00.862001Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-17T03:54:00.862346Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-17T03:54:00.861759Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.865243Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:54:00.865481Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.869413Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" • Failure [30.674 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if the VirtualMachineInstance's pod gets deleted [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:265 Expected error: <*errors.StatusError | 0xc42075f0e0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:127 ------------------------------ STEP: Creating a new VMI Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running level=info timestamp=2018-07-17T03:54:58.685608Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:54:58.692891Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:55:02 http: TLS handshake error from 10.129.0.1:40004: EOF level=info timestamp=2018-07-17T03:55:07.160542Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:55:12 http: TLS handshake error from 10.129.0.1:40010: EOF level=info timestamp=2018-07-17T03:55:14.422638Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:55:17.369233Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:55:22 http: TLS handshake error from 10.129.0.1:40016: EOF level=info timestamp=2018-07-17T03:55:23.950034Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:55:24.000386Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:55:24.039163Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:55:27.692139Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:55:29.110058Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:55:29.111268Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:55:32 http: TLS handshake error from 10.129.0.1:40024: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running level=info timestamp=2018-07-17T03:53:40.019335Z pos=validating-webhook.go:84 component=virt-api msg="rejected vmi admission" 2018/07/17 03:53:43 http: TLS handshake error from 10.128.0.1:33804: EOF 2018/07/17 03:53:53 http: TLS handshake error from 10.128.0.1:33858: EOF 2018/07/17 03:54:03 http: TLS handshake error from 10.128.0.1:1024: EOF 2018/07/17 03:54:13 http: TLS handshake error from 10.128.0.1:33958: EOF level=info timestamp=2018-07-17T03:54:15.636148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:54:23 http: TLS handshake error from 10.128.0.1:34012: EOF 2018/07/17 03:54:33 http: TLS handshake error from 10.128.0.1:34062: EOF 2018/07/17 03:54:43 http: TLS handshake error from 10.128.0.1:34112: EOF 2018/07/17 03:54:53 http: TLS handshake error from 10.128.0.1:34166: EOF 2018/07/17 03:55:03 http: TLS handshake error from 10.128.0.1:34220: EOF 2018/07/17 03:55:13 http: TLS handshake error from 10.128.0.1:34270: EOF level=info timestamp=2018-07-17T03:55:15.477148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:55:23 http: TLS handshake error from 10.128.0.1:34324: EOF 2018/07/17 03:55:33 http: TLS handshake error from 10.128.0.1:34374: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:55:05.406775Z pos=vm.go:377 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=1ead666e-8975-11e8-a112-525500d15501 msg="Setting stabile UUID 'd5836efb-ef94-5d20-ae3a-7d55da5d0312' (was '')" level=info timestamp=2018-07-17T03:55:35.428773Z pos=vm.go:287 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=1ead666e-8975-11e8-a112-525500d15501 msg="Failed to create VirtualMachineInstance: /" level=error timestamp=2018-07-17T03:55:35.429404Z pos=vm.go:197 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=1ead666e-8975-11e8-a112-525500d15501 msg="Creating the VirtualMachine failed." level=info timestamp=2018-07-17T03:55:35.429915Z pos=vm.go:681 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=1ead666e-8975-11e8-a112-525500d15501 msg="Processing failure status:: shouldRun: true; noErr: true; noVm: false" level=info timestamp=2018-07-17T03:55:35.430026Z pos=vm.go:692 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=1ead666e-8975-11e8-a112-525500d15501 msg="Reason to fail: FailedCreate" level=error timestamp=2018-07-17T03:55:35.495474Z pos=vm.go:202 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=1ead666e-8975-11e8-a112-525500d15501 reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvminqv7s\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachines/kubevirt-test-default/testvminqv7s, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1ead666e-8975-11e8-a112-525500d15501, UID in object meta: " msg="Updating the VirtualMachine status failed." level=info timestamp=2018-07-17T03:55:35.495773Z pos=vm.go:111 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvminqv7s\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachines/kubevirt-test-default/testvminqv7s, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1ead666e-8975-11e8-a112-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachine kubevirt-test-default/testvminqv7s" level=info timestamp=2018-07-17T03:55:35.508723Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=309757c1-8975-11e8-a112-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-17T03:55:35.508951Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=309757c1-8975-11e8-a112-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvminqv7s" level=info timestamp=2018-07-17T03:55:35.509110Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=309757c1-8975-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:55:35.509625Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=309757c1-8975-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:55:35.741461Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqv7s\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqv7s" level=info timestamp=2018-07-17T03:55:36.157601Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6mbhr kind= uid=31054cba-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:55:36.157829Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6mbhr kind= uid=31054cba-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-17T03:55:36.157913Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:54:00.175103Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.175757Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:54:00.181235Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.607950Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-17T03:54:00.608178Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-17T03:54:00.615014Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmimrpft" level=info timestamp=2018-07-17T03:54:00.859521Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.860529Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-17T03:54:00.861223Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-17T03:54:00.862001Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-17T03:54:00.862346Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-17T03:54:00.861759Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.865243Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:54:00.865481Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.869413Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" Pod name: virt-launcher-testvminqv7s-jzvph Pod phase: Pending • Failure [30.744 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should stop VirtualMachineInstance if running set to false [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:325 Expected error: <*errors.StatusError | 0xc420935050>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:127 ------------------------------ Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running level=info timestamp=2018-07-17T03:55:27.692139Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:55:29.110058Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:55:29.111268Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:55:32 http: TLS handshake error from 10.129.0.1:40024: EOF level=info timestamp=2018-07-17T03:55:37.452993Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T03:55:37.458563Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T03:55:37.985937Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:55:42 http: TLS handshake error from 10.129.0.1:40030: EOF level=info timestamp=2018-07-17T03:55:44.551673Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:55:48.191704Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:55:52 http: TLS handshake error from 10.129.0.1:40036: EOF level=info timestamp=2018-07-17T03:55:58.398079Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:55:59.408058Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:55:59.419119Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:56:02 http: TLS handshake error from 10.129.0.1:40042: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:54:13 http: TLS handshake error from 10.128.0.1:33958: EOF level=info timestamp=2018-07-17T03:54:15.636148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:54:23 http: TLS handshake error from 10.128.0.1:34012: EOF 2018/07/17 03:54:33 http: TLS handshake error from 10.128.0.1:34062: EOF 2018/07/17 03:54:43 http: TLS handshake error from 10.128.0.1:34112: EOF 2018/07/17 03:54:53 http: TLS handshake error from 10.128.0.1:34166: EOF 2018/07/17 03:55:03 http: TLS handshake error from 10.128.0.1:34220: EOF 2018/07/17 03:55:13 http: TLS handshake error from 10.128.0.1:34270: EOF level=info timestamp=2018-07-17T03:55:15.477148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:55:23 http: TLS handshake error from 10.128.0.1:34324: EOF 2018/07/17 03:55:33 http: TLS handshake error from 10.128.0.1:34374: EOF 2018/07/17 03:55:43 http: TLS handshake error from 10.128.0.1:34424: EOF level=info timestamp=2018-07-17T03:55:45.473067Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:55:53 http: TLS handshake error from 10.128.0.1:34478: EOF 2018/07/17 03:56:03 http: TLS handshake error from 10.128.0.1:34528: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:55:35.429915Z pos=vm.go:681 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=1ead666e-8975-11e8-a112-525500d15501 msg="Processing failure status:: shouldRun: true; noErr: true; noVm: false" level=info timestamp=2018-07-17T03:55:35.430026Z pos=vm.go:692 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=1ead666e-8975-11e8-a112-525500d15501 msg="Reason to fail: FailedCreate" level=error timestamp=2018-07-17T03:55:35.495474Z pos=vm.go:202 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=1ead666e-8975-11e8-a112-525500d15501 reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvminqv7s\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachines/kubevirt-test-default/testvminqv7s, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1ead666e-8975-11e8-a112-525500d15501, UID in object meta: " msg="Updating the VirtualMachine status failed." level=info timestamp=2018-07-17T03:55:35.495773Z pos=vm.go:111 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvminqv7s\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachines/kubevirt-test-default/testvminqv7s, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1ead666e-8975-11e8-a112-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachine kubevirt-test-default/testvminqv7s" level=info timestamp=2018-07-17T03:55:35.508723Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=309757c1-8975-11e8-a112-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-17T03:55:35.508951Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=309757c1-8975-11e8-a112-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvminqv7s" level=info timestamp=2018-07-17T03:55:35.509110Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=309757c1-8975-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:55:35.509625Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=309757c1-8975-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:55:35.741461Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqv7s\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqv7s" level=info timestamp=2018-07-17T03:55:36.157601Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6mbhr kind= uid=31054cba-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:55:36.157829Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6mbhr kind= uid=31054cba-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-17T03:55:36.157913Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-17T03:56:07.011553Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitchbj kind= uid=43628c2a-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:56:07.011984Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitchbj kind= uid=43628c2a-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-17T03:56:07.012076Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:54:00.175103Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.175757Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:54:00.181235Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.607950Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-17T03:54:00.608178Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-17T03:54:00.615014Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmimrpft" level=info timestamp=2018-07-17T03:54:00.859521Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.860529Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-17T03:54:00.861223Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-17T03:54:00.862001Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-17T03:54:00.862346Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-17T03:54:00.861759Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.865243Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:54:00.865481Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.869413Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" • Failure [30.830 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should start and stop VirtualMachineInstance multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:333 Expected error: <*errors.StatusError | 0xc420934b40>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:127 ------------------------------ Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running level=info timestamp=2018-07-17T03:55:59.419119Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:56:02 http: TLS handshake error from 10.129.0.1:40042: EOF level=info timestamp=2018-07-17T03:56:08.668936Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:56:10.759076Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:56:10.775956Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:56:10.792252Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:56:12 http: TLS handshake error from 10.129.0.1:40048: EOF level=info timestamp=2018-07-17T03:56:14.707684Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:56:16.391865Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T03:56:18.892773Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:56:22 http: TLS handshake error from 10.129.0.1:40054: EOF level=info timestamp=2018-07-17T03:56:29.102617Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:56:29.832497Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:56:29.841534Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:56:32 http: TLS handshake error from 10.129.0.1:40060: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:54:33 http: TLS handshake error from 10.128.0.1:34062: EOF 2018/07/17 03:54:43 http: TLS handshake error from 10.128.0.1:34112: EOF 2018/07/17 03:54:53 http: TLS handshake error from 10.128.0.1:34166: EOF 2018/07/17 03:55:03 http: TLS handshake error from 10.128.0.1:34220: EOF 2018/07/17 03:55:13 http: TLS handshake error from 10.128.0.1:34270: EOF level=info timestamp=2018-07-17T03:55:15.477148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:55:23 http: TLS handshake error from 10.128.0.1:34324: EOF 2018/07/17 03:55:33 http: TLS handshake error from 10.128.0.1:34374: EOF 2018/07/17 03:55:43 http: TLS handshake error from 10.128.0.1:34424: EOF level=info timestamp=2018-07-17T03:55:45.473067Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:55:53 http: TLS handshake error from 10.128.0.1:34478: EOF 2018/07/17 03:56:03 http: TLS handshake error from 10.128.0.1:34528: EOF 2018/07/17 03:56:13 http: TLS handshake error from 10.128.0.1:34580: EOF 2018/07/17 03:56:23 http: TLS handshake error from 10.128.0.1:34634: EOF 2018/07/17 03:56:33 http: TLS handshake error from 10.128.0.1:34684: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:55:35.495773Z pos=vm.go:111 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvminqv7s\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachines/kubevirt-test-default/testvminqv7s, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1ead666e-8975-11e8-a112-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachine kubevirt-test-default/testvminqv7s" level=info timestamp=2018-07-17T03:55:35.508723Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=309757c1-8975-11e8-a112-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-17T03:55:35.508951Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=309757c1-8975-11e8-a112-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvminqv7s" level=info timestamp=2018-07-17T03:55:35.509110Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=309757c1-8975-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:55:35.509625Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminqv7s kind= uid=309757c1-8975-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:55:35.741461Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminqv7s\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminqv7s" level=info timestamp=2018-07-17T03:55:36.157601Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6mbhr kind= uid=31054cba-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:55:36.157829Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6mbhr kind= uid=31054cba-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-17T03:55:36.157913Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-17T03:56:07.011553Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitchbj kind= uid=43628c2a-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:56:07.011984Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitchbj kind= uid=43628c2a-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-17T03:56:07.012076Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-17T03:56:37.732697Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix8m5b kind= uid=55b53184-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:56:37.736686Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix8m5b kind= uid=55b53184-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-17T03:56:37.738441Z pos=vm.go:377 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix8m5b kind= uid=55b53184-8975-11e8-a112-525500d15501 msg="Setting stabile UUID '1c0ed65c-2ddd-5906-94ba-491317152d8f' (was '')" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:54:00.175103Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.175757Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:54:00.181235Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.607950Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-17T03:54:00.608178Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-17T03:54:00.615014Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmimrpft" level=info timestamp=2018-07-17T03:54:00.859521Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.860529Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-17T03:54:00.861223Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-17T03:54:00.862001Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-17T03:54:00.862346Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-17T03:54:00.861759Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.865243Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:54:00.865481Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.869413Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" • Failure [30.788 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should not update the VirtualMachineInstance spec if Running [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:346 Expected error: <*errors.StatusError | 0xc42075f0e0>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:127 ------------------------------ Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running level=info timestamp=2018-07-17T03:56:18.892773Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:56:22 http: TLS handshake error from 10.129.0.1:40054: EOF level=info timestamp=2018-07-17T03:56:29.102617Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:56:29.832497Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:56:29.841534Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:56:32 http: TLS handshake error from 10.129.0.1:40060: EOF level=info timestamp=2018-07-17T03:56:39.255051Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:56:42 http: TLS handshake error from 10.129.0.1:40066: EOF level=info timestamp=2018-07-17T03:56:44.832210Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:56:49.443505Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:56:52 http: TLS handshake error from 10.129.0.1:40072: EOF level=info timestamp=2018-07-17T03:56:59.650874Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:57:00.200569Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T03:57:00.206686Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 03:57:02 http: TLS handshake error from 10.129.0.1:40078: EOF Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:55:13 http: TLS handshake error from 10.128.0.1:34270: EOF level=info timestamp=2018-07-17T03:55:15.477148Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:55:23 http: TLS handshake error from 10.128.0.1:34324: EOF 2018/07/17 03:55:33 http: TLS handshake error from 10.128.0.1:34374: EOF 2018/07/17 03:55:43 http: TLS handshake error from 10.128.0.1:34424: EOF level=info timestamp=2018-07-17T03:55:45.473067Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:55:53 http: TLS handshake error from 10.128.0.1:34478: EOF 2018/07/17 03:56:03 http: TLS handshake error from 10.128.0.1:34528: EOF 2018/07/17 03:56:13 http: TLS handshake error from 10.128.0.1:34580: EOF 2018/07/17 03:56:23 http: TLS handshake error from 10.128.0.1:34634: EOF 2018/07/17 03:56:33 http: TLS handshake error from 10.128.0.1:34684: EOF 2018/07/17 03:56:43 http: TLS handshake error from 10.128.0.1:34734: EOF level=info timestamp=2018-07-17T03:56:45.643722Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:56:53 http: TLS handshake error from 10.128.0.1:34788: EOF 2018/07/17 03:57:03 http: TLS handshake error from 10.128.0.1:34838: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:31:50.507828Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-t5fjs Pod phase: Running level=info timestamp=2018-07-17T03:57:07.762559Z pos=vm.go:287 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix8m5b kind= uid=55b53184-8975-11e8-a112-525500d15501 msg="Failed to create VirtualMachineInstance: /" level=error timestamp=2018-07-17T03:57:07.764729Z pos=vm.go:197 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix8m5b kind= uid=55b53184-8975-11e8-a112-525500d15501 msg="Creating the VirtualMachine failed." level=info timestamp=2018-07-17T03:57:07.770124Z pos=vm.go:681 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix8m5b kind= uid=55b53184-8975-11e8-a112-525500d15501 msg="Processing failure status:: shouldRun: true; noErr: true; noVm: false" level=info timestamp=2018-07-17T03:57:07.771121Z pos=vm.go:692 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix8m5b kind= uid=55b53184-8975-11e8-a112-525500d15501 msg="Reason to fail: FailedCreate" level=error timestamp=2018-07-17T03:57:07.793639Z pos=vm.go:202 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix8m5b kind= uid=55b53184-8975-11e8-a112-525500d15501 reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvmix8m5b\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachines/kubevirt-test-default/testvmix8m5b, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 55b53184-8975-11e8-a112-525500d15501, UID in object meta: " msg="Updating the VirtualMachine status failed." level=info timestamp=2018-07-17T03:57:07.793819Z pos=vm.go:111 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachines.kubevirt.io \"testvmix8m5b\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachines/kubevirt-test-default/testvmix8m5b, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 55b53184-8975-11e8-a112-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachine kubevirt-test-default/testvmix8m5b" level=info timestamp=2018-07-17T03:57:07.822056Z pos=vm.go:459 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix8m5b kind= uid=679fb665-8975-11e8-a112-525500d15501 msg="Looking for VirtualMachineInstance Ref" level=error timestamp=2018-07-17T03:57:07.826559Z pos=vm.go:462 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix8m5b kind= uid=679fb665-8975-11e8-a112-525500d15501 msg="Cant find the matching VM for VirtualMachineInstance: testvmix8m5b" level=info timestamp=2018-07-17T03:57:07.826884Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix8m5b kind= uid=679fb665-8975-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:57:07.827254Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix8m5b kind= uid=679fb665-8975-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:57:08.072244Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix8m5b\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix8m5b" level=info timestamp=2018-07-17T03:57:08.144547Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix8m5b\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix8m5b" level=info timestamp=2018-07-17T03:57:08.387204Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8pqjs kind= uid=67fdf216-8975-11e8-a112-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-17T03:57:08.387517Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8pqjs kind= uid=67fdf216-8975-11e8-a112-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-17T03:57:08.388001Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T03:54:00.175103Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.175757Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:54:00.181235Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.607950Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-17T03:54:00.608178Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-17T03:54:00.615014Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmimrpft" level=info timestamp=2018-07-17T03:54:00.859521Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind= uid=fdc95155-8974-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.860529Z pos=vm.go:330 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-17T03:54:00.861223Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-17T03:54:00.862001Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-17T03:54:00.862346Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-17T03:54:00.861759Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.865243Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:54:00.865481Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimrpft kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:54:00.869413Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:22:20.842580Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-17T03:22:20.853268Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-17T03:22:20.854153Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-17T03:22:20.953715Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-17T03:22:20.967766Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/17 03:22:20 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-17T03:22:21.023619Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/17 03:22:21 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" • Failure [30.626 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should survive guest shutdown, multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:387 Expected error: <*errors.StatusError | 0xc42075fc20>: { ErrStatus: { TypeMeta: {Kind: "", APIVersion: ""}, ListMeta: {SelfLink: "", ResourceVersion: "", Continue: ""}, Status: "Failure", Message: "Timeout: request did not complete within allowed duration", Reason: "Timeout", Details: {Name: "", Group: "", Kind: "", UID: "", Causes: nil, RetryAfterSeconds: 0}, Code: 504, }, } Timeout: request did not complete within allowed duration not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:127 ------------------------------ STEP: Creating new VMI, not running • [SLOW TEST:44.737 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should start a VirtualMachineInstance once /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:436 ------------------------------ • [SLOW TEST:38.089 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should stop a VirtualMachineInstance once /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:467 ------------------------------ • ------------------------------ • [SLOW TEST:7.518 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should scale /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 to five, to six and then to zero replicas /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ •• ------------------------------ • [SLOW TEST:23.628 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should update readyReplicas once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:157 ------------------------------ •• ------------------------------ • [SLOW TEST:5.901 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should not scale when paused and scale when resume /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:223 ------------------------------ • [SLOW TEST:7.978 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should remove the finished VM /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:279 ------------------------------ • [SLOW TEST:38.253 seconds] LeaderElection /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:43 Start a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:53 when the controller pod is not running /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:54 should success /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:55 ------------------------------ • [SLOW TEST:42.650 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 should have cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:82 ------------------------------ Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running level=info timestamp=2018-07-17T04:00:42.949661Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T04:00:42.957567Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T04:00:44.383422Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T04:00:46.850343Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-07-17T04:00:50.702542Z pos=subresource.go:100 component=virt-api reason="websocket: close 1006 (abnormal closure): unexpected EOF" msg="error ecountered reading from websocket stream" level=error timestamp=2018-07-17T04:00:50.703329Z pos=subresource.go:109 component=virt-api reason="websocket: close 1006 (abnormal closure): unexpected EOF" msg="Error in websocket proxy" 2018/07/17 04:00:50 http: response.WriteHeader on hijacked connection level=info timestamp=2018-07-17T04:00:50.703954Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi8v46l/console proto=HTTP/1.1 statusCode=500 contentLength=0 level=error timestamp=2018-07-17T04:00:50.714848Z pos=subresource.go:94 component=virt-api reason="tls: use of closed connection" msg="error ecountered reading from remote podExec stream" 2018/07/17 04:00:52 http: TLS handshake error from 10.129.0.1:40220: EOF level=info timestamp=2018-07-17T04:00:54.816549Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 04:01:02 http: TLS handshake error from 10.129.0.1:40226: EOF level=info timestamp=2018-07-17T04:01:04.171396Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T04:01:04.174214Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T04:01:05.144313Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 03:59:13 http: TLS handshake error from 10.128.0.1:35514: EOF level=info timestamp=2018-07-17T03:59:15.577720Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:59:23 http: TLS handshake error from 10.128.0.1:35568: EOF 2018/07/17 03:59:33 http: TLS handshake error from 10.128.0.1:35620: EOF 2018/07/17 03:59:43 http: TLS handshake error from 10.128.0.1:35666: EOF level=info timestamp=2018-07-17T03:59:45.591176Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 03:59:53 http: TLS handshake error from 10.128.0.1:35720: EOF 2018/07/17 04:00:03 http: TLS handshake error from 10.128.0.1:35770: EOF 2018/07/17 04:00:13 http: TLS handshake error from 10.128.0.1:35820: EOF 2018/07/17 04:00:23 http: TLS handshake error from 10.128.0.1:35874: EOF 2018/07/17 04:00:33 http: TLS handshake error from 10.128.0.1:35930: EOF 2018/07/17 04:00:43 http: TLS handshake error from 10.128.0.1:35980: EOF level=info timestamp=2018-07-17T04:00:45.632336Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 04:00:53 http: TLS handshake error from 10.128.0.1:36132: EOF 2018/07/17 04:01:03 http: TLS handshake error from 10.128.0.1:36186: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T03:59:48.729928Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-17T03:59:48.730344Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-17T03:59:48.732884Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-17T03:59:48.733336Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-17T03:59:48.733593Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-17T03:59:48.734018Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-17T03:59:50.786829Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikcqhs kind= uid=da0e025c-8975-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T03:59:50.787619Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikcqhs kind= uid=da0e025c-8975-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T03:59:51.126095Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikcqhs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikcqhs" level=info timestamp=2018-07-17T04:00:08.403583Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8v46l kind= uid=e48efe44-8975-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T04:00:08.404244Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8v46l kind= uid=e48efe44-8975-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T04:00:08.707484Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8v46l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8v46l" level=info timestamp=2018-07-17T04:00:08.764511Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8v46l\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8v46l" level=info timestamp=2018-07-17T04:00:50.915705Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiktljz kind= uid=fdeaa0e2-8975-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T04:00:50.916106Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiktljz kind= uid=fdeaa0e2-8975-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-vcstk Pod phase: Running level=info timestamp=2018-07-17T03:59:33.671378Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T04:00:26.913137Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi8v46l kind= uid=e48efe44-8975-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T04:00:50.767762Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmi8v46l kind= uid=e48efe44-8975-11e8-a112-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-17T04:00:50.768236Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi8v46l kind= uid=e48efe44-8975-11e8-a112-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-17T04:00:50.841460Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmi8v46l kind= uid=e48efe44-8975-11e8-a112-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmi8v46l" level=info timestamp=2018-07-17T04:00:51.091164Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-17T04:00:51.091488Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmi8v46l kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-17T04:00:51.107061Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi8v46l kind= uid=e48efe44-8975-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T04:00:51.107264Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi8v46l kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T04:00:51.107417Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi8v46l kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T04:00:51.108933Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi8v46l kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T04:00:51.109049Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi8v46l kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T04:01:07.592408Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiktljz kind= uid=fdeaa0e2-8975-11e8-a112-525500d15501 msg="Processing vmi update" level=error timestamp=2018-07-17T04:01:07.617544Z pos=vm.go:397 component=virt-handler namespace=kubevirt-test-default name=testvmiktljz kind= uid=fdeaa0e2-8975-11e8-a112-525500d15501 reason="server error. command Launcher.Sync failed: virError(Code=0, Domain=0, Message='Missing error')" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-17T04:01:07.715245Z pos=vm.go:251 component=virt-handler reason="server error. command Launcher.Sync failed: virError(Code=0, Domain=0, Message='Missing error')" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiktljz" level=info timestamp=2018-07-17T04:01:07.715678Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmiktljz kind= uid=fdeaa0e2-8975-11e8-a112-525500d15501 msg="Processing vmi update" Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:59:01.551327Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:59:01.553696Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind=Domain uid=b3087763-8975-11e8-a112-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-17T03:59:01.575230Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:59:01.615861Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind= uid=b3087763-8975-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:59:01.616086Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind= uid=b3087763-8975-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:59:01.633540Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind= uid=b3087763-8975-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:59:08.107141Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind= uid=b3087763-8975-11e8-a112-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-17T03:59:08.111160Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind= uid=b3087763-8975-11e8-a112-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-17T03:59:08.115195Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind= uid=b3087763-8975-11e8-a112-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmi89b5xfr4ss" level=info timestamp=2018-07-17T03:59:08.336145Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-17T03:59:08.343912Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-17T03:59:08.343538Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind= uid=b3087763-8975-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:59:08.346715Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:59:08.347166Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:59:08.351873Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" Pod name: virt-launcher-testvmiktljz-8sp68 Pod phase: Running level=info timestamp=2018-07-17T04:00:56.294828Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-17T04:00:56.298188Z pos=libvirt.go:256 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-17T04:01:06.996832Z pos=libvirt.go:271 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-17T04:01:07.039742Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiktljz" level=info timestamp=2018-07-17T04:01:07.043180Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-17T04:01:07.044139Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" level=error timestamp=2018-07-17T04:01:07.609429Z pos=manager.go:159 component=virt-launcher namespace=kubevirt-test-default name=testvmiktljz kind= uid=fdeaa0e2-8975-11e8-a112-525500d15501 reason="virError(Code=0, Domain=0, Message='Missing error')" msg="Getting the domain failed." level=error timestamp=2018-07-17T04:01:07.609903Z pos=server.go:68 component=virt-launcher namespace=kubevirt-test-default name=testvmiktljz kind= uid=fdeaa0e2-8975-11e8-a112-525500d15501 reason="virError(Code=0, Domain=0, Message='Missing error')" msg="Failed to sync vmi" level=info timestamp=2018-07-17T04:01:07.753746Z pos=cloud-init.go:254 component=virt-launcher msg="generated nocloud iso file /var/run/libvirt/kubevirt-ephemeral-disk/cloud-init-data/kubevirt-test-default/testvmiktljz/noCloud.iso" level=error timestamp=2018-07-17T04:01:07.799186Z pos=common.go:126 component=virt-launcher msg="updated MAC for interface: eth0 - 0a:58:0a:7d:f6:27" level=info timestamp=2018-07-17T04:01:07.806676Z pos=converter.go:729 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-17T04:01:07.807745Z pos=converter.go:730 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-17T04:01:07.808447Z pos=dhcp.go:62 component=virt-launcher msg="Starting SingleClientDHCPServer" level=info timestamp=2018-07-17T04:01:07.838937Z pos=manager.go:157 component=virt-launcher namespace=kubevirt-test-default name=testvmiktljz kind= uid=fdeaa0e2-8975-11e8-a112-525500d15501 msg="Domain defined." level=info timestamp=2018-07-17T04:01:07.839685Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" • Failure [91.983 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 with injected ssh-key /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:92 should have ssh-key under authorized keys [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:93 Unexpected Warning event received. Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 ------------------------------ STEP: Starting a VirtualMachineInstance STEP: Waiting the VirtualMachineInstance start level=info timestamp=2018-07-17T04:00:50.986609Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmiktljz-8sp68" level=info timestamp=2018-07-17T04:01:07.361703Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmiktljz-8sp68" level=error timestamp=2018-07-17T04:01:07.506417Z pos=utils.go:241 component=tests reason="unexpected warning event received" msg="server error. command Launcher.Sync failed: virError(Code=0, Domain=0, Message='Missing error')" STEP: Expecting the VirtualMachineInstance console STEP: Checking that the VirtualMachineInstance serial console output equals to expected one level=info timestamp=2018-07-17T04:02:22.454645Z pos=vmi_userdata_test.go:72 component=tests namespace=kubevirt-test-default name=testvmiktljz kind=VirtualMachineInstance uid= msg="[{2 \r\n\r\n\r\r\nFedora 27 (Cloud Edition)\r\nKernel 4.13.9-300.fc27.x86_64 on an x86_64 (ttyS0)\r\n\r\ntestvmiktljz login: [login:]} {4 \r\r\nFedora 27 (Cloud Edition)\r\nKernel 4.13.9-300.fc27.x86_64 on an x86_64 (ttyS0)\r\n\r\ntestvmiktljz login: fedora\r\r\nPassword: [Password:]} {6 []} {8 \r\n[fedora@testvmiktljz ~]$ cat /home/fedora/.ssh/authorized_keys\r\nssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA6NF8iallvQVp22WDkT test-ssh-key\r\n[ [test-ssh-key]}]" • [SLOW TEST:52.016 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userData source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:118 should process provided cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:119 ------------------------------ • [SLOW TEST:45.399 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 should take user-data from k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:162 ------------------------------ • [SLOW TEST:72.506 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting and stopping the same VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:90 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:91 should success multiple times /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:92 ------------------------------ • [SLOW TEST:18.061 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:111 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:112 should not modify the spec on status update /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:113 ------------------------------ • [SLOW TEST:37.459 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting multiple VMIs /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:129 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:130 should success /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:131 ------------------------------ ••• ------------------------------ • [SLOW TEST:8.064 seconds] Templates /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:42 Launching VMI from VM Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:60 with given Fedora Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:193 with given VM JSON from the Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:152 with given VM from the VM JSON /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:158 with given VMI from the VM /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:163 should succeed to terminate the VMI using oc-patch command /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:166 ------------------------------ • ------------------------------ • [SLOW TEST:5.015 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:48 Without permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:56 should not be able to access subresource endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:57 ------------------------------ • ------------------------------ • [SLOW TEST:5.473 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization For Version Command /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:63 Without permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:71 should be able to access subresource version endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:72 ------------------------------ • [SLOW TEST:19.701 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should successfully start with hook sidecar annotation /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:60 ------------------------------ • [SLOW TEST:18.191 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should call Collect and OnDefineDomain on the hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:67 ------------------------------ • [SLOW TEST:20.564 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should update domain XML with SM BIOS properties /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:83 ------------------------------ • [SLOW TEST:18.842 seconds] VNC /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:54 with VNC connection /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:62 should allow accessing the VNC device /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:64 ------------------------------ •• ------------------------------ • [SLOW TEST:117.187 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • Pod name: disks-images-provider-gmdx6 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-mnlpj Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-95r5c Pod phase: Running level=info timestamp=2018-07-17T04:12:24.219526Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T04:12:26.463380Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 04:12:32 http: TLS handshake error from 10.129.0.1:40684: EOF level=info timestamp=2018-07-17T04:12:34.451100Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T04:12:38.147480Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-17T04:12:38.151390Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/17 04:12:42 http: TLS handshake error from 10.129.0.1:40690: EOF level=info timestamp=2018-07-17T04:12:43.807756Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T04:12:43.837208Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T04:12:44.651293Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 04:12:52 http: TLS handshake error from 10.129.0.1:40696: EOF level=info timestamp=2018-07-17T04:12:54.859110Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-17T04:12:56.701454Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/17 04:13:02 http: TLS handshake error from 10.129.0.1:40702: EOF level=info timestamp=2018-07-17T04:13:05.070213Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-lrr44 Pod phase: Running 2018/07/17 04:11:03 http: TLS handshake error from 10.128.0.1:39540: EOF 2018/07/17 04:11:13 http: TLS handshake error from 10.128.0.1:39592: EOF 2018/07/17 04:11:23 http: TLS handshake error from 10.128.0.1:39646: EOF 2018/07/17 04:11:33 http: TLS handshake error from 10.128.0.1:39696: EOF 2018/07/17 04:11:43 http: TLS handshake error from 10.128.0.1:39746: EOF level=info timestamp=2018-07-17T04:11:45.583269Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 04:11:53 http: TLS handshake error from 10.128.0.1:39800: EOF 2018/07/17 04:12:03 http: TLS handshake error from 10.128.0.1:39850: EOF 2018/07/17 04:12:13 http: TLS handshake error from 10.128.0.1:39900: EOF 2018/07/17 04:12:23 http: TLS handshake error from 10.128.0.1:39954: EOF 2018/07/17 04:12:33 http: TLS handshake error from 10.128.0.1:40004: EOF 2018/07/17 04:12:43 http: TLS handshake error from 10.128.0.1:40054: EOF level=info timestamp=2018-07-17T04:12:45.615151Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/17 04:12:53 http: TLS handshake error from 10.128.0.1:40108: EOF 2018/07/17 04:13:03 http: TLS handshake error from 10.128.0.1:40158: EOF Pod name: virt-controller-7d57d96b65-mr5d9 Pod phase: Running level=info timestamp=2018-07-17T04:07:05.732525Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5t42m kind= uid=dd53f4ac-8976-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T04:07:05.732742Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5t42m kind= uid=dd53f4ac-8976-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T04:07:23.972179Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw8qlf kind= uid=e832dc5c-8976-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T04:07:23.972616Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw8qlf kind= uid=e832dc5c-8976-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T04:07:24.106892Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiw8qlf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiw8qlf" level=info timestamp=2018-07-17T04:07:44.562617Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqtkxq kind= uid=f477e032-8976-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T04:07:44.563215Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqtkxq kind= uid=f477e032-8976-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T04:08:03.714887Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis8gq2 kind= uid=ffd24074-8976-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T04:08:03.715083Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis8gq2 kind= uid=ffd24074-8976-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T04:08:03.715714Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T04:08:03.715805Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T04:08:03.716078Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigk92k kind= uid=ffdaff99-8976-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T04:08:03.716125Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigk92k kind= uid=ffdaff99-8976-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-17T04:08:03.805947Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipkfb2 kind= uid=ffdde744-8976-11e8-a112-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-17T04:08:03.806086Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipkfb2 kind= uid=ffdde744-8976-11e8-a112-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-vcstk Pod phase: Running level=info timestamp=2018-07-17T03:59:33.671378Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-p5l6g Pod phase: Running level=info timestamp=2018-07-17T04:08:36.873998Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmis8gq2 kind= uid=ffd24074-8976-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T04:08:37.087608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T04:08:37.087752Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T04:08:37.641761Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T04:08:37.996394Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T04:08:37.996632Z pos=vm.go:392 component=virt-handler namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 msg="No update processing required" level=info timestamp=2018-07-17T04:08:38.109608Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmigk92k kind= uid=ffdaff99-8976-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T04:08:38.189688Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmigk92k kind= uid=ffdaff99-8976-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T04:08:38.704259Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmigk92k kind= uid=ffdaff99-8976-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=error timestamp=2018-07-17T04:08:38.671542Z pos=vm.go:404 component=virt-handler namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2xtwm\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-17T04:08:38.739832Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2xtwm\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2xtwm" level=info timestamp=2018-07-17T04:08:38.757880Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T04:08:39.346762Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T04:08:40.084467Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T04:08:41.024739Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-zrw7w Pod phase: Running level=info timestamp=2018-07-17T03:59:01.551327Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:59:01.553696Z pos=vm.go:688 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind=Domain uid=b3087763-8975-11e8-a112-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-17T03:59:01.575230Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-17T03:59:01.615861Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind= uid=b3087763-8975-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:59:01.616086Z pos=vm.go:389 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind= uid=b3087763-8975-11e8-a112-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-17T03:59:01.633540Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind= uid=b3087763-8975-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:59:08.107141Z pos=vm.go:342 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind= uid=b3087763-8975-11e8-a112-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-17T03:59:08.111160Z pos=vm.go:383 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind= uid=b3087763-8975-11e8-a112-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-17T03:59:08.115195Z pos=vm.go:540 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind= uid=b3087763-8975-11e8-a112-525500d15501 msg="grace period expired, killing deleted VirtualMachineInstance testvmi89b5xfr4ss" level=info timestamp=2018-07-17T03:59:08.336145Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" level=info timestamp=2018-07-17T03:59:08.343912Z pos=vm.go:678 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-17T03:59:08.343538Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind= uid=b3087763-8975-11e8-a112-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:59:08.346715Z pos=vm.go:386 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-17T03:59:08.347166Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi89b5xfr4ss kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-17T03:59:08.351873Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type DELETED" Pod name: virt-launcher-testvmi2xtwm-4pjnb Pod phase: Running level=info timestamp=2018-07-17T04:08:31.076383Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-17T04:08:31.116242Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID db06be0c-babe-4af1-806f-0d3d73511a01" level=info timestamp=2018-07-17T04:08:31.117866Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-17T04:08:31.976815Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 msg="Domain started." level=info timestamp=2018-07-17T04:08:31.980923Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T04:08:32.111293Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T04:08:32.111539Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-17T04:08:32.148204Z pos=monitor.go:222 component=virt-launcher msg="Found PID for db06be0c-babe-4af1-806f-0d3d73511a01: 165" level=info timestamp=2018-07-17T04:08:32.193486Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T04:08:34.441115Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T04:08:34.441227Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-17T04:08:34.491026Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T04:08:37.805318Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T04:08:39.154611Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T04:08:40.992373Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2xtwm kind= uid=ffd74365-8976-11e8-a112-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmigk92k-s9mhq Pod phase: Running level=info timestamp=2018-07-17T04:08:32.208219Z pos=manager.go:157 component=virt-launcher namespace=kubevirt-test-default name=testvmigk92k kind= uid=ffdaff99-8976-11e8-a112-525500d15501 msg="Domain defined." level=info timestamp=2018-07-17T04:08:33.184924Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-17T04:08:33.197130Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID d3cc82bc-b391-42c4-9259-7e9c2c0544d6" level=info timestamp=2018-07-17T04:08:33.199698Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-17T04:08:34.212265Z pos=monitor.go:222 component=virt-launcher msg="Found PID for d3cc82bc-b391-42c4-9259-7e9c2c0544d6: 162" level=info timestamp=2018-07-17T04:08:34.390170Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmigk92k kind= uid=ffdaff99-8976-11e8-a112-525500d15501 msg="Domain started." level=info timestamp=2018-07-17T04:08:34.396256Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmigk92k kind= uid=ffdaff99-8976-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T04:08:34.690802Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T04:08:34.691664Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-17T04:08:34.736497Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T04:08:35.382272Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T04:08:35.382964Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-17T04:08:35.419069Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T04:08:36.819865Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T04:08:38.444705Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmigk92k kind= uid=ffdaff99-8976-11e8-a112-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmipkfb2-k4dz2 Pod phase: Running level=info timestamp=2018-07-17T04:08:27.701034Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-17T04:08:28.198363Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-17T04:08:28.201475Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 1a3017a3-dd58-43ad-8d06-2aec4631b37f" level=info timestamp=2018-07-17T04:08:28.201684Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-17T04:08:28.310334Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T04:08:29.038367Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-17T04:08:29.039596Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmipkfb2 kind= uid=ffdde744-8976-11e8-a112-525500d15501 msg="Domain started." level=info timestamp=2018-07-17T04:08:29.045644Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipkfb2 kind= uid=ffdde744-8976-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T04:08:29.078063Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T04:08:29.217855Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 1a3017a3-dd58-43ad-8d06-2aec4631b37f: 159" level=info timestamp=2018-07-17T04:08:29.902645Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T04:08:29.902934Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-17T04:08:29.929748Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T04:08:30.123870Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T04:08:34.226917Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmipkfb2 kind= uid=ffdde744-8976-11e8-a112-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmis8gq2-g98bm Pod phase: Running level=info timestamp=2018-07-17T04:08:28.268849Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-17T04:08:28.275531Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID c5e801ea-2458-4185-a629-d04fea590c0b" level=info timestamp=2018-07-17T04:08:28.276908Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-17T04:08:28.330685Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T04:08:29.302160Z pos=monitor.go:222 component=virt-launcher msg="Found PID for c5e801ea-2458-4185-a629-d04fea590c0b: 157" level=info timestamp=2018-07-17T04:08:29.380226Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-17T04:08:29.411901Z pos=manager.go:188 component=virt-launcher namespace=kubevirt-test-default name=testvmis8gq2 kind= uid=ffd24074-8976-11e8-a112-525500d15501 msg="Domain started." level=info timestamp=2018-07-17T04:08:29.415578Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmis8gq2 kind= uid=ffd24074-8976-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T04:08:29.416536Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T04:08:29.920531Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T04:08:29.920686Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-17T04:08:29.949123Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-17T04:08:32.144312Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmis8gq2 kind= uid=ffd24074-8976-11e8-a112-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-17T04:08:34.422390Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-17T04:08:35.449855Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmis8gq2 kind= uid=ffd24074-8976-11e8-a112-525500d15501 msg="Synced vmi" ------------------------------ • Failure [188.090 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with custom MAC address [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: : 180000000000 expect: timer expired after 180 seconds not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:224 ------------------------------ STEP: checking br1 MTU inside the pod level=info timestamp=2018-07-17T04:10:03.869752Z pos=vmi_networking_test.go:185 component=tests msg="4: br1: mtu 1450 qdisc noqueue state UP group default \n link/ether 0a:58:0a:52:25:00 brd ff:ff:ff:ff:ff:ff\n inet 169.254.75.86/32 brd 169.254.75.86 scope global br1\n valid_lft forever preferred_lft forever\n inet6 fe80::858:aff:fe52:2500/64 scope link \n valid_lft forever preferred_lft forever\n" STEP: checking eth0 MTU inside the VirtualMachineInstance level=info timestamp=2018-07-17T04:10:04.567275Z pos=vmi_networking_test.go:205 component=tests msg="[{1 \r\n$ [$ ]} {3 ip address show eth0\r\n2: eth0: mtu 1450 qdisc pfifo_fast qlen 1000\r\n [2: eth0: mtu 1450 qdisc pfifo_fast qlen 1000\r\n]} {5 link/ether 0 [0]}]" STEP: checking the VirtualMachineInstance can send MTU sized frames to another VirtualMachineInstance level=info timestamp=2018-07-17T04:13:10.621385Z pos=utils.go:1190 component=tests namespace=kubevirt-test-default name=testvmi2xtwm kind=VirtualMachineInstance uid=ffd74365-8976-11e8-a112-525500d15501 msg="[{1 \r\n\r\n$ [$ ]} {3 ping 10.129.0.114 -c 1 -w 5 -s 1422\r\nPING 10.129.0.114 (10.129.0.114): 1422 data bytes\r\n\r\n--- 10.129.0.114 ping statistics ---\r\n5 packets transmitted, 0 packets received, 100% packet loss\r\n$ [$ ]} {5 echo $?\r\n1\r\n$ []}]" • ------------------------------ • [SLOW TEST:6.372 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on the same node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ ••••• ------------------------------ • [SLOW TEST:6.315 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a subdomain and a headless service given /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:330 should be able to reach the vmi via its unique fully qualified domain name /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:353 ------------------------------ • [SLOW TEST:37.049 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom interface model /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:379 should expose the right device type to the guest /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:380 ------------------------------ • ------------------------------ • [SLOW TEST:31.733 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:413 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:414 ------------------------------ • [SLOW TEST:33.071 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address in non-conventional format /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:425 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:426 ------------------------------ • [SLOW TEST:32.567 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address and slirp interface /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:438 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:439 ------------------------------ • [SLOW TEST:51.600 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:35 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:64 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a cirros image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should return that we are running cirros /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:67 ------------------------------ • [SLOW TEST:45.696 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:35 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:64 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a fedora image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:76 should return that we are running fedora /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:77 ------------------------------ • [SLOW TEST:35.851 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:35 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:64 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 should be able to reconnect to console multiple times /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:86 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.023 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to start a vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:133 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1342 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.044 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to stop a running vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:139 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1342 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.021 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have correct UUID /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:192 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1342 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.034 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have pod IP /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:208 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1342 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.032 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to start a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:242 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1342 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.035 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to stop a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:250 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1342 ------------------------------ • ------------------------------ • [SLOW TEST:94.335 seconds] Slirp /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:39 should be able to /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 VirtualMachineInstance with slirp interface /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • Waiting for namespace kubevirt-test-default to be removed, this can take a while ... Waiting for namespace kubevirt-test-alternative to be removed, this can take a while ... Summarizing 28 Failures: [Fail] VMIlifecycle Creating a VirtualMachineInstance when virt-handler is not responsive [BeforeEach] the node controller should react /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:378 [Fail] VMIlifecycle Creating a VirtualMachineInstance VM Accelerated Mode [It] should request a KVM and TUN device /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:662 [Fail] VMIlifecycle Creating a VirtualMachineInstance VM Accelerated Mode [It] should not enable emulation in virt-launcher /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:697 [Fail] VMIlifecycle Delete a VirtualMachineInstance's Pod [It] should result in the VirtualMachineInstance moving to a finalized state /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:768 [Fail] VMIlifecycle Delete a VirtualMachineInstance with an active pod. [It] should result in pod being terminated /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:802 [Fail] VMIlifecycle Delete a VirtualMachineInstance with grace period greater than 0 [It] should run graceful shutdown /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:846 [Fail] VMIlifecycle Killed VirtualMachineInstance [It] should be in Failed phase /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:877 [Fail] VMIlifecycle Killed VirtualMachineInstance [It] should be left alone by virt-handler /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:904 [Fail] Configurations VirtualMachineInstance definition with 3 CPU cores [It] should report 3 cpu cores under guest OS /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:74 [Fail] Configurations VirtualMachineInstance definition with hugepages with usupported page size [It] should failed to schedule the pod /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:214 [Fail] Configurations with CPU spec [BeforeEach] when CPU model defined should report defined CPU model /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1552 [Fail] Configurations with CPU spec [BeforeEach] when CPU model not defined should report CPU model from libvirt capabilities /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1552 [Fail] Configurations New VirtualMachineInstance with all supported drives [It] should have all the device nodes /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:351 [Fail] Health Monitoring A VirtualMachineInstance with a watchdog device [It] should be shut down when the watchdog expires /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:47 [Fail] Storage Starting a VirtualMachineInstance with Alpine PVC should be successfully started [It] with Disk PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 [Fail] Storage Starting a VirtualMachineInstance with Alpine PVC should be successfully started [It] with CDRom PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 [Fail] Storage Starting a VirtualMachineInstance with Alpine PVC should be successfully started and stopped multiple times [It] with Disk PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 [Fail] Storage Starting a VirtualMachineInstance with Alpine PVC should be successfully started and stopped multiple times [It] with CDRom PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 [Fail] Storage Starting a VirtualMachineInstance With an emptyDisk defined [It] should create a writeable emptyDisk with the right capacity /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 [Fail] Storage Starting a VirtualMachineInstance With an emptyDisk defined and a specified serial number [It] should create a writeable emptyDisk with the specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:64 [Fail] VirtualMachine A valid VirtualMachine given [It] should recreate VirtualMachineInstance if it gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:127 [Fail] VirtualMachine A valid VirtualMachine given [It] should recreate VirtualMachineInstance if the VirtualMachineInstance's pod gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:127 [Fail] VirtualMachine A valid VirtualMachine given [It] should stop VirtualMachineInstance if running set to false /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:127 [Fail] VirtualMachine A valid VirtualMachine given [It] should start and stop VirtualMachineInstance multiple times /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:127 [Fail] VirtualMachine A valid VirtualMachine given [It] should not update the VirtualMachineInstance spec if Running /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:127 [Fail] VirtualMachine A valid VirtualMachine given [It] should survive guest shutdown, multiple times /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:127 [Fail] CloudInit UserData A new VirtualMachineInstance with cloudInitNoCloud userDataBase64 source with injected ssh-key [It] should have ssh-key under authorized keys /root/go/src/kubevirt.io/kubevirt/tests/utils.go:245 [Fail] Networking should be able to reach [It] the Inbound VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:224 Ran 127 of 140 Specs in 3376.796 seconds FAIL! -- 99 Passed | 28 Failed | 0 Pending | 13 Skipped --- FAIL: TestTests (3376.81s) FAIL make: *** [functest] Error 1 + make cluster-down ./cluster/down.sh