+ export WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + [[ openshift-3.10-release =~ openshift-.* ]] + [[ openshift-3.10-release =~ .*-crio-.* ]] + export KUBEVIRT_PROVIDER=os-3.10.0 + KUBEVIRT_PROVIDER=os-3.10.0 + export KUBEVIRT_NUM_NODES=2 + KUBEVIRT_NUM_NODES=2 + export NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + export NAMESPACE=kube-system + NAMESPACE=kube-system + trap '{ make cluster-down; }' EXIT SIGINT SIGTERM SIGSTOP + make cluster-down ./cluster/down.sh + make cluster-up ./cluster/up.sh Downloading ....... Downloading ....... Downloading ....... 2018/07/15 12:40:28 Waiting for host: 192.168.66.102:22 2018/07/15 12:40:31 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/15 12:40:39 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/15 12:40:47 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/15 12:40:55 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/15 12:41:00 Connected to tcp://192.168.66.102:22 + systemctl stop origin-node.service + rm -rf /etc/origin/ /etc/etcd/ /var/lib/origin /var/lib/etcd/ ++ docker ps -q + containers= + '[' -n '' ']' ++ docker ps -q -a + containers='380578eb6b01 04838495ca18 aef4dea680cc 46bea3472251 6251077554f3 fe2a0d7ec163 970722bcd3fd 1eb051c1b629 1052914cea3e e039c376e094 84eb0c833d7c 4e08778d9b1d 0a9a2fcf5f77 50aebcf65f7b 9faedfc6ec82 f59bf1b05c0b 2ec313cb7901 5a782498db8c fef39cf40a53 c8ca0d879d8b aabbef87da66 0ac6cd26ae28 d021d99166a4 ed496b2ca295 e69e08bd00a8 0ddad7779bb0 3ca535d6f49f 2fc9538787a8' + '[' -n '380578eb6b01 04838495ca18 aef4dea680cc 46bea3472251 6251077554f3 fe2a0d7ec163 970722bcd3fd 1eb051c1b629 1052914cea3e e039c376e094 84eb0c833d7c 4e08778d9b1d 0a9a2fcf5f77 50aebcf65f7b 9faedfc6ec82 f59bf1b05c0b 2ec313cb7901 5a782498db8c fef39cf40a53 c8ca0d879d8b aabbef87da66 0ac6cd26ae28 d021d99166a4 ed496b2ca295 e69e08bd00a8 0ddad7779bb0 3ca535d6f49f 2fc9538787a8' ']' + docker rm -f 380578eb6b01 04838495ca18 aef4dea680cc 46bea3472251 6251077554f3 fe2a0d7ec163 970722bcd3fd 1eb051c1b629 1052914cea3e e039c376e094 84eb0c833d7c 4e08778d9b1d 0a9a2fcf5f77 50aebcf65f7b 9faedfc6ec82 f59bf1b05c0b 2ec313cb7901 5a782498db8c fef39cf40a53 c8ca0d879d8b aabbef87da66 0ac6cd26ae28 d021d99166a4 ed496b2ca295 e69e08bd00a8 0ddad7779bb0 3ca535d6f49f 2fc9538787a8 380578eb6b01 04838495ca18 aef4dea680cc 46bea3472251 6251077554f3 fe2a0d7ec163 970722bcd3fd 1eb051c1b629 1052914cea3e e039c376e094 84eb0c833d7c 4e08778d9b1d 0a9a2fcf5f77 50aebcf65f7b 9faedfc6ec82 f59bf1b05c0b 2ec313cb7901 5a782498db8c fef39cf40a53 c8ca0d879d8b aabbef87da66 0ac6cd26ae28 d021d99166a4 ed496b2ca295 e69e08bd00a8 0ddad7779bb0 3ca535d6f49f 2fc9538787a8 2018/07/15 12:41:05 Waiting for host: 192.168.66.101:22 2018/07/15 12:41:08 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/15 12:41:16 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/15 12:41:24 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/15 12:41:32 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/15 12:41:37 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: connection refused. Sleeping 5s 2018/07/15 12:41:42 Connected to tcp://192.168.66.101:22 + inventory_file=/root/inventory + openshift_ansible=/root/openshift-ansible + echo '[new_nodes]' + sed -i '/\[OSEv3:children\]/a new_nodes' /root/inventory + nodes_found=false ++ seq 2 100 + for i in '$(seq 2 100)' ++ printf node%02d 2 + node=node02 ++ printf 192.168.66.1%02d 2 + node_ip=192.168.66.102 + set +e + ping 192.168.66.102 -c 1 PING 192.168.66.102 (192.168.66.102) 56(84) bytes of data. 64 bytes from 192.168.66.102: icmp_seq=1 ttl=64 time=1.77 ms --- 192.168.66.102 ping statistics --- 1 packets transmitted, 1 received, 0% packet loss, time 0ms rtt min/avg/max/mdev = 1.778/1.778/1.778/0.000 ms Found node02. Adding it to the inventory. + '[' 0 -ne 0 ']' + nodes_found=true + set -e + echo '192.168.66.102 node02' + echo 'Found node02. Adding it to the inventory.' + echo 'node02 openshift_node_group_name="node-config-compute" openshift_schedulable=true openshift_ip=192.168.66.102' + for i in '$(seq 2 100)' ++ printf node%02d 3 + node=node03 ++ printf 192.168.66.1%02d 3 + node_ip=192.168.66.103 + set +e + ping 192.168.66.103 -c 1 PING 192.168.66.103 (192.168.66.103) 56(84) bytes of data. From 192.168.66.101 icmp_seq=1 Destination Host Unreachable --- 192.168.66.103 ping statistics --- 1 packets transmitted, 0 received, +1 errors, 100% packet loss, time 0ms + '[' 1 -ne 0 ']' + break + '[' true = true ']' + ansible-playbook -i /root/inventory /root/openshift-ansible/playbooks/openshift-node/scaleup.yml PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) PLAY [Ensure there are new_nodes] ********************************************** TASK [fail] ******************************************************************** skipping: [localhost] TASK [fail] ******************************************************************** skipping: [localhost] PLAY [Initialization Checkpoint Start] ***************************************** TASK [Set install initialization 'In Progress'] ******************************** ok: [node02] PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) [WARNING]: Could not match supplied host pattern, ignoring: oo_lb_to_config PLAY [Ensure that all non-node hosts are accessible] *************************** TASK [Gathering Facts] ********************************************************* ok: [node01] PLAY [Initialize basic host facts] ********************************************* TASK [Gathering Facts] ********************************************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml for node01, node02 TASK [openshift_sanitize_inventory : Check for usage of deprecated variables] *** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : debug] ************************************ skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_stats] ******************************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Assign deprecated variables to correct counterparts] *** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml for node01, node02 included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml for node01, node02 TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Standardize on latest variable names] ***** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Normalize openshift_release] ************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Abort when openshift_release is invalid] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml for node01, node02 TASK [openshift_sanitize_inventory : Ensure that openshift_use_dnsmasq is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that openshift_node_dnsmasq_install_network_manager_hook is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* skipping: [node01] => (item=openshift_hosted_etcd_storage_kind) skipping: [node02] => (item=openshift_hosted_etcd_storage_kind) TASK [openshift_sanitize_inventory : Ensure that dynamic provisioning is set if using dynamic storage] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure clusterid is set along with the cloudprovider] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure ansible_service_broker_remove and ansible_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure template_service_broker_remove and template_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that all requires vsphere configuration variables are set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : ensure provider configuration variables are defined] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure removed web console extension variables are not set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that web console port matches API server port] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : At least one master is schedulable] ******* skipping: [node01] skipping: [node02] TASK [Detecting Operating System from ostree_booted] *************************** ok: [node02] ok: [node01] TASK [set openshift_deployment_type if unset] ********************************** skipping: [node01] skipping: [node02] TASK [check for node already bootstrapped] ************************************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_bootstrapped] ********************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_atomic and openshift_is_containerized] *** ok: [node01] ok: [node02] TASK [Determine Atomic Host Docker Version] ************************************ skipping: [node01] skipping: [node02] TASK [assert atomic host docker version is 1.12 or later] ********************** skipping: [node01] skipping: [node02] PLAY [Retrieve existing master configs and validate] *************************** TASK [openshift_control_plane : stat] ****************************************** ok: [node01] TASK [openshift_control_plane : slurp] ***************************************** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [openshift_control_plane : Check for file paths outside of /etc/origin/master in master's config] *** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Initialize special first-master variables] ******************************* TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Disable web console if required] ***************************************** TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Setup yum repositories for all hosts] ************************************ TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Install Red Hat Subscription manager] ******************* skipping: [node02] TASK [rhel_subscribe : Is host already registered?] **************************** skipping: [node02] TASK [rhel_subscribe : Register host] ****************************************** skipping: [node02] TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Determine if OpenShift Pool Already Attached] *********** skipping: [node02] TASK [rhel_subscribe : Attach to OpenShift Pool] ******************************* skipping: [node02] TASK [rhel_subscribe : Satellite preparation] ********************************** skipping: [node02] TASK [openshift_repos : openshift_repos detect ostree] ************************* ok: [node02] TASK [openshift_repos : Ensure libselinux-python is installed] ***************** ok: [node02] TASK [openshift_repos : Remove openshift_additional.repo file] ***************** ok: [node02] TASK [openshift_repos : Create any additional repos that are defined] ********** TASK [openshift_repos : include_tasks] ***************************************** skipping: [node02] TASK [openshift_repos : include_tasks] ***************************************** included: /root/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml for node02 TASK [openshift_repos : Configure origin gpg keys] ***************************** ok: [node02] TASK [openshift_repos : Configure correct origin release repository] *********** ok: [node02] => (item=/root/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin.repo.j2) TASK [openshift_repos : Ensure clean repo cache in the event repos have been changed manually] *** changed: [node02] => { "msg": "First run of openshift_repos" } TASK [openshift_repos : Record that openshift_repos already ran] *************** ok: [node02] RUNNING HANDLER [openshift_repos : refresh cache] ****************************** changed: [node02] PLAY [Install packages necessary for installer] ******************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Determine if chrony is installed] **************************************** [WARNING]: Consider using the yum, dnf or zypper module rather than running rpm. If you need to use command because yum, dnf or zypper is insufficient you can add warn=False to this command task or set command_warnings=False in ansible.cfg to get rid of this message. changed: [node02] TASK [Install ntp package] ***************************************************** skipping: [node02] TASK [Start and enable ntpd/chronyd] ******************************************* changed: [node02] TASK [Ensure openshift-ansible installer package deps are installed] *********** ok: [node02] => (item=iproute) ok: [node02] => (item=dbus-python) ok: [node02] => (item=PyYAML) ok: [node02] => (item=python-ipaddress) ok: [node02] => (item=libsemanage-python) ok: [node02] => (item=yum-utils) ok: [node02] => (item=python-docker) PLAY [Initialize cluster facts] ************************************************ TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [get openshift_current_version] ******************************************* ok: [node02] ok: [node01] TASK [set_fact openshift_portal_net if present on masters] ********************* ok: [node01] ok: [node02] TASK [Gather Cluster facts] **************************************************** changed: [node02] changed: [node01] TASK [Set fact of no_proxy_internal_hostnames] ********************************* skipping: [node01] skipping: [node02] TASK [Initialize openshift.node.sdn_mtu] *************************************** changed: [node02] ok: [node01] PLAY [Initialize etcd host variables] ****************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role] ************************************************************ TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** ok: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** ok: [node01] => { "msg": "openshift_pkg_version was not defined. Falling back to -3.10.0" } TASK [openshift_version : set_fact] ******************************************** ok: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Verify Requirements] ***************************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [Run variable sanity checks] ********************************************** ok: [node01] TASK [Validate openshift_node_groups and openshift_node_group_name] ************ ok: [node01] PLAY [Initialization Checkpoint End] ******************************************* TASK [Set install initialization 'Complete'] *********************************** ok: [node02] PLAY [Validate node hostnames] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Query DNS for IP address of node02] ************************************** ok: [node02] TASK [Validate openshift_hostname when defined] ******************************** skipping: [node02] TASK [Validate openshift_ip exists on node when defined] *********************** skipping: [node02] PLAY [Configure os_firewall] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [os_firewall : Detecting Atomic Host Operating System] ******************** ok: [node02] TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** ok: [node02] TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** skipping: [node02] TASK [os_firewall : Install firewalld packages] ******************************** skipping: [node02] TASK [os_firewall : Ensure iptables services are not enabled] ****************** skipping: [node02] => (item=iptables) skipping: [node02] => (item=ip6tables) TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** skipping: [node02] TASK [os_firewall : Start and enable firewalld service] ************************ skipping: [node02] TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** skipping: [node02] TASK [os_firewall : Restart polkitd] ******************************************* skipping: [node02] TASK [os_firewall : Wait for polkit action to have been created] *************** skipping: [node02] TASK [os_firewall : Ensure firewalld service is not enabled] ******************* ok: [node02] TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** skipping: [node02] TASK [os_firewall : Install iptables packages] ********************************* ok: [node02] => (item=iptables) ok: [node02] => (item=iptables-services) TASK [os_firewall : Start and enable iptables service] ************************* ok: [node02 -> node02] => (item=node02) TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** skipping: [node02] PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [container_runtime : Setup the docker-storage for overlay] **************** skipping: [node02] TASK [container_runtime : Create file system on extra volume device] *********** TASK [container_runtime : Create mount entry for extra volume] ***************** PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** ok: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* ok: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Add http_proxy to /etc/atomic.conf] ****************** skipping: [node02] TASK [container_runtime : Add https_proxy to /etc/atomic.conf] ***************** skipping: [node02] TASK [container_runtime : Add no_proxy to /etc/atomic.conf] ******************** skipping: [node02] TASK [container_runtime : Get current installed Docker version] **************** ok: [node02] TASK [container_runtime : Error out if Docker pre-installed but too old] ******* skipping: [node02] TASK [container_runtime : Error out if requested Docker is too old] ************ skipping: [node02] TASK [container_runtime : Install Docker] ************************************** skipping: [node02] TASK [container_runtime : Ensure docker.service.d directory exists] ************ ok: [node02] TASK [container_runtime : Configure Docker service unit file] ****************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Set registry params] ********************************* skipping: [node02] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) TASK [container_runtime : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** skipping: [node02] TASK [container_runtime : Set Proxy Settings] ********************************** skipping: [node02] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) TASK [container_runtime : Set various Docker options] ************************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Configure Docker Network OPTIONS] ******************** ok: [node02] TASK [container_runtime : Detect if docker is already started] ***************** ok: [node02] TASK [container_runtime : Start the Docker service] **************************** ok: [node02] TASK [container_runtime : set_fact] ******************************************** ok: [node02] TASK [container_runtime : Check for docker_storage_path/overlay2] ************** ok: [node02] TASK [container_runtime : Fixup SELinux permissions for docker] **************** changed: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* ok: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** ok: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** ok: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] TASK [container_runtime : Fail if Atomic Host since this is an rpm request] **** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Check that overlay is in the kernel] ***************** skipping: [node02] TASK [container_runtime : Add overlay to modprobe.d] *************************** skipping: [node02] TASK [container_runtime : Manually modprobe overlay into the kernel] *********** skipping: [node02] TASK [container_runtime : Enable and start systemd-modules-load] *************** skipping: [node02] TASK [container_runtime : Install cri-o] *************************************** skipping: [node02] TASK [container_runtime : Remove CRI-O default configuration files] ************ skipping: [node02] => (item=/etc/cni/net.d/200-loopback.conf) skipping: [node02] => (item=/etc/cni/net.d/100-crio-bridge.conf) TASK [container_runtime : Create the CRI-O configuration] ********************** skipping: [node02] TASK [container_runtime : Ensure CNI configuration directory exists] *********** skipping: [node02] TASK [container_runtime : Add iptables allow rules] **************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove iptables rules] ******************************* TASK [container_runtime : Add firewalld allow rules] *************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove firewalld allow rules] ************************ TASK [container_runtime : Configure the CNI network] *************************** skipping: [node02] TASK [container_runtime : Create /etc/sysconfig/crio-network] ****************** skipping: [node02] TASK [container_runtime : Start the CRI-O service] ***************************** skipping: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* skipping: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** skipping: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** skipping: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role] ************************************************************ TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** skipping: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Node Preparation Checkpoint Start] *************************************** TASK [Set Node preparation 'In Progress'] ************************************** ok: [node02] PLAY [Only target nodes that have not yet been bootstrapped] ******************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [add_host] **************************************************************** skipping: [localhost] => (item=node02) ok: [localhost] => (item=node01) PLAY [Disable excluders] ******************************************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for node02 TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-docker-excluder version detected] ************ skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-docker-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-excluder version detected] ******************* skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** skipping: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] PLAY [Configure nodes] ********************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_cloud_provider : Set cloud provider facts] ********************* skipping: [node02] TASK [openshift_cloud_provider : Create cloudprovider config dir] ************** skipping: [node02] TASK [openshift_cloud_provider : include the defined cloud provider files] ***** skipping: [node02] TASK [openshift_node : fail] *************************************************** skipping: [node02] TASK [openshift_node : Check for NetworkManager service] *********************** ok: [node02] TASK [openshift_node : Set fact using_network_manager] ************************* ok: [node02] TASK [openshift_node : Install dnsmasq] **************************************** ok: [node02] TASK [openshift_node : ensure origin/node directory exists] ******************** changed: [node02] => (item=/etc/origin) changed: [node02] => (item=/etc/origin/node) TASK [openshift_node : Install NetworkManager during node_bootstrap provisioning] *** skipping: [node02] TASK [openshift_node : Install network manager dispatch script] **************** skipping: [node02] TASK [openshift_node : Install dnsmasq configuration] ************************** ok: [node02] TASK [openshift_node : Deploy additional dnsmasq.conf] ************************* skipping: [node02] TASK [openshift_node : Enable dnsmasq] ***************************************** ok: [node02] TASK [openshift_node : Install network manager dispatch script] **************** ok: [node02] TASK [openshift_node : Add iptables allow rules] ******************************* ok: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) ok: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) ok: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) ok: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) ok: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove iptables rules] ********************************** TASK [openshift_node : Add firewalld allow rules] ****************************** skipping: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) skipping: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) skipping: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) skipping: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) skipping: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove firewalld allow rules] *************************** TASK [openshift_node : Checking for journald.conf] ***************************** ok: [node02] TASK [openshift_node : Create journald persistence directories] **************** ok: [node02] TASK [openshift_node : Update journald setup] ********************************** ok: [node02] => (item={u'var': u'Storage', u'val': u'persistent'}) ok: [node02] => (item={u'var': u'Compress', u'val': True}) ok: [node02] => (item={u'var': u'SyncIntervalSec', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitInterval', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitBurst', u'val': 10000}) ok: [node02] => (item={u'var': u'SystemMaxUse', u'val': u'8G'}) ok: [node02] => (item={u'var': u'SystemKeepFree', u'val': u'20%'}) ok: [node02] => (item={u'var': u'SystemMaxFileSize', u'val': u'10M'}) ok: [node02] => (item={u'var': u'MaxRetentionSec', u'val': u'1month'}) ok: [node02] => (item={u'var': u'MaxFileSec', u'val': u'1day'}) ok: [node02] => (item={u'var': u'ForwardToSyslog', u'val': False}) ok: [node02] => (item={u'var': u'ForwardToWall', u'val': False}) TASK [openshift_node : Restart journald] *************************************** skipping: [node02] TASK [openshift_node : Disable swap] ******************************************* ok: [node02] TASK [openshift_node : Install node, clients, and conntrack packages] ********** ok: [node02] => (item={u'name': u'origin-node-3.10.0*'}) ok: [node02] => (item={u'name': u'origin-clients-3.10.0*'}) ok: [node02] => (item={u'name': u'conntrack-tools'}) TASK [openshift_node : Restart cri-o] ****************************************** skipping: [node02] TASK [openshift_node : restart NetworkManager to ensure resolv.conf is present] *** changed: [node02] TASK [openshift_node : sysctl] ************************************************* ok: [node02] TASK [openshift_node : Check for credentials file for registry auth] *********** skipping: [node02] TASK [openshift_node : Create credentials for registry auth] ******************* skipping: [node02] TASK [openshift_node : Create credentials for registry auth (alternative)] ***** skipping: [node02] TASK [openshift_node : Setup ro mount of /root/.docker for containerized hosts] *** skipping: [node02] TASK [openshift_node : Check that node image is present] *********************** changed: [node02] TASK [openshift_node : Pre-pull node image] ************************************ skipping: [node02] TASK [openshift_node : Copy node script to the node] *************************** ok: [node02] TASK [openshift_node : Install Node service file] ****************************** ok: [node02] TASK [openshift_node : Ensure old system path is set] ************************** skipping: [node02] => (item=/etc/origin/openvswitch) skipping: [node02] => (item=/var/lib/kubelet) skipping: [node02] => (item=/opt/cni/bin) TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Copy node container image to ostree storage] ************ skipping: [node02] TASK [openshift_node : Install or Update node system container] **************** skipping: [node02] TASK [openshift_node : Restart network manager to ensure networking configuration is in place] *** skipping: [node02] TASK [openshift_node : Configure Node settings] ******************************** ok: [node02] => (item={u'regex': u'^OPTIONS=', u'line': u'OPTIONS='}) ok: [node02] => (item={u'regex': u'^DEBUG_LOGLEVEL=', u'line': u'DEBUG_LOGLEVEL=2'}) ok: [node02] => (item={u'regex': u'^IMAGE_VERSION=', u'line': u'IMAGE_VERSION=v3.10.0-rc.0'}) TASK [openshift_node : Configure Proxy Settings] ******************************* skipping: [node02] => (item={u'regex': u'^HTTP_PROXY=', u'line': u'HTTP_PROXY='}) skipping: [node02] => (item={u'regex': u'^HTTPS_PROXY=', u'line': u'HTTPS_PROXY='}) skipping: [node02] => (item={u'regex': u'^NO_PROXY=', u'line': u'NO_PROXY=[],172.30.0.0/16,10.128.0.0/14'}) TASK [openshift_node : file] *************************************************** skipping: [node02] TASK [openshift_node : Create the Node config] ********************************* changed: [node02] TASK [openshift_node : Configure Node Environment Variables] ******************* TASK [openshift_node : Ensure the node static pod directory exists] ************ changed: [node02] TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Install NFS storage plugin dependencies] **************** ok: [node02] TASK [openshift_node : Check for existence of nfs sebooleans] ****************** ok: [node02] => (item=virt_use_nfs) ok: [node02] => (item=virt_sandbox_use_nfs) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-15 12:48:59.704625', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.012366', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-15 12:48:59.692259', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-15 12:49:01.138647', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.006664', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-15 12:49:01.131983', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-15 12:48:59.704625', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.012366', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-15 12:48:59.692259', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-15 12:49:01.138647', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.006664', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-15 12:49:01.131983', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install GlusterFS storage plugin dependencies] ********** ok: [node02] TASK [openshift_node : Check for existence of fusefs sebooleans] *************** ok: [node02] => (item=virt_use_fusefs) ok: [node02] => (item=virt_sandbox_use_fusefs) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-15 12:49:07.906495', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.012192', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-15 12:49:07.894303', '_ansible_ignore_errors': None, 'failed': False}) ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-15 12:49:09.301147', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.014216', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-15 12:49:09.286931', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-15 12:49:07.906495', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.012192', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-15 12:49:07.894303', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-15 12:49:09.301147', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.014216', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-15 12:49:09.286931', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install Ceph storage plugin dependencies] *************** ok: [node02] TASK [openshift_node : Install iSCSI storage plugin dependencies] ************** ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=device-mapper-multipath) TASK [openshift_node : restart services] *************************************** ok: [node02] => (item=multipathd) ok: [node02] => (item=rpcbind) ok: [node02] => (item=iscsid) TASK [openshift_node : Template multipath configuration] *********************** changed: [node02] TASK [openshift_node : Enable and start multipath] ***************************** changed: [node02] TASK [tuned : Check for tuned package] ***************************************** ok: [node02] TASK [tuned : Set tuned OpenShift variables] *********************************** ok: [node02] TASK [tuned : Ensure directory structure exists] ******************************* ok: [node02] => (item={'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'state': 'directory', 'ctime': 1529614575.1009853, 'serole': 'object_r', 'gid': 0, 'mode': '0755', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'state': 'directory', 'ctime': 1529614575.1009853, 'serole': 'object_r', 'gid': 0, 'mode': '0755', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'state': 'directory', 'ctime': 1529614575.1009853, 'serole': 'object_r', 'gid': 0, 'mode': '0755', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Ensure files are populated from templates] *********************** skipping: [node02] => (item={'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'state': 'directory', 'ctime': 1529614575.1009853, 'serole': 'object_r', 'gid': 0, 'mode': '0755', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'state': 'directory', 'ctime': 1529614575.1009853, 'serole': 'object_r', 'gid': 0, 'mode': '0755', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'state': 'directory', 'ctime': 1529614575.1009853, 'serole': 'object_r', 'gid': 0, 'mode': '0755', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1529614575.1009853, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1529614575.1009853, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': u's0', 'seuser': u'unconfined_u', 'serole': u'object_r', 'ctime': 1529614575.1009853, 'state': u'file', 'gid': 0, 'mode': u'0644', 'mtime': 1529614575.1009853, 'owner': u'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': u'admin_home_t'}) TASK [tuned : Make tuned use the recommended tuned profile on restart] ********* changed: [node02] => (item=/etc/tuned/active_profile) changed: [node02] => (item=/etc/tuned/profile_mode) TASK [tuned : Restart tuned service] ******************************************* changed: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] ******* ok: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] *** PLAY [node bootstrap config] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_node : install needed rpm(s)] ********************************** ok: [node02] => (item=origin-node) ok: [node02] => (item=origin-docker-excluder) ok: [node02] => (item=ansible) ok: [node02] => (item=bash-completion) ok: [node02] => (item=docker) ok: [node02] => (item=haproxy) ok: [node02] => (item=dnsmasq) ok: [node02] => (item=ntp) ok: [node02] => (item=logrotate) ok: [node02] => (item=httpd-tools) ok: [node02] => (item=bind-utils) ok: [node02] => (item=firewalld) ok: [node02] => (item=libselinux-python) ok: [node02] => (item=conntrack-tools) ok: [node02] => (item=openssl) ok: [node02] => (item=iproute) ok: [node02] => (item=python-dbus) ok: [node02] => (item=PyYAML) ok: [node02] => (item=yum-utils) ok: [node02] => (item=glusterfs-fuse) ok: [node02] => (item=device-mapper-multipath) ok: [node02] => (item=nfs-utils) ok: [node02] => (item=cockpit-ws) ok: [node02] => (item=cockpit-system) ok: [node02] => (item=cockpit-bridge) ok: [node02] => (item=cockpit-docker) ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=ceph-common) TASK [openshift_node : create the directory for node] ************************** skipping: [node02] TASK [openshift_node : laydown systemd override] ******************************* skipping: [node02] TASK [openshift_node : update the sysconfig to have necessary variables] ******* ok: [node02] => (item={u'regexp': u'^KUBECONFIG=.*', u'line': u'KUBECONFIG=/etc/origin/node/bootstrap.kubeconfig'}) TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : disable origin-node service] **************************** changed: [node02] => (item=origin-node.service) TASK [openshift_node : Check for RPM generated config marker file .config_managed] *** ok: [node02] TASK [openshift_node : create directories for bootstrapping] ******************* ok: [node02] => (item=/root/openshift_bootstrap) changed: [node02] => (item=/var/lib/origin/openshift.local.config) changed: [node02] => (item=/var/lib/origin/openshift.local.config/node) ok: [node02] => (item=/etc/docker/certs.d/docker-registry.default.svc:5000) TASK [openshift_node : laydown the bootstrap.yml file for on boot configuration] *** ok: [node02] TASK [openshift_node : Create a symlink to the node client CA for the docker registry] *** ok: [node02] TASK [openshift_node : Remove RPM generated config files if present] *********** skipping: [node02] => (item=master) skipping: [node02] => (item=.config_managed) TASK [openshift_node : find all files in /etc/origin/node so we can remove them] *** skipping: [node02] TASK [openshift_node : Remove everything except the resolv.conf required for node] *** skipping: [node02] TASK [openshift_node_group : create node config template] ********************** changed: [node02] TASK [openshift_node_group : remove existing node config] ********************** changed: [node02] TASK [openshift_node_group : Ensure required directories are present] ********** ok: [node02] => (item=/etc/origin/node/pods) changed: [node02] => (item=/etc/origin/node/certificates) TASK [openshift_node_group : Update the sysconfig to group "node-config-compute"] *** changed: [node02] TASK [set_fact] **************************************************************** ok: [node02] PLAY [Re-enable excluder if it was previously enabled] ************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/enable.yml for node02 TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] PLAY [Node Preparation Checkpoint End] ***************************************** TASK [Set Node preparation 'Complete'] ***************************************** ok: [node02] PLAY [Distribute bootstrap and start nodes] ************************************ TASK [openshift_node : Gather node information] ******************************** changed: [node02] ok: [node01] TASK [openshift_node : Copy master bootstrap config locally] ******************* ok: [node02] TASK [openshift_node : Distribute bootstrap kubeconfig if one does not exist] *** ok: [node01] changed: [node02] TASK [openshift_node : Start and enable node for bootstrapping] **************** changed: [node01] changed: [node02] TASK [openshift_node : Get node logs] ****************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : debug] ************************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : fail] *************************************************** skipping: [node02] skipping: [node01] PLAY [Approve any pending CSR requests from inventory nodes] ******************* TASK [Dump all candidate bootstrap hostnames] ********************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Find all hostnames for bootstrapping] ************************************ ok: [node01] TASK [Dump the bootstrap hostnames] ******************************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Approve bootstrap nodes] ************************************************* changed: [node01] TASK [Get CSRs] **************************************************************** skipping: [node01] TASK [Report approval errors] ************************************************** skipping: [node01] PLAY [Ensure any inventory labels are applied to the nodes] ******************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_manage_node : Wait for master API to become available before proceeding] *** skipping: [node02] TASK [openshift_manage_node : Wait for Node Registration] ********************** ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/config.yml for node02, node01 TASK [openshift_manage_node : Set node schedulability] ************************* ok: [node01 -> node01] ok: [node02 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/set_default_node_role.yml for node02, node01 TASK [openshift_manage_node : Retrieve nodes that are marked with the infra selector or the legacy infra selector] *** ok: [node02 -> node01] TASK [openshift_manage_node : Label infra or legacy infra nodes with the new role label] *** TASK [openshift_manage_node : Retrieve non-infra, non-master nodes that are not yet labeled compute] *** ok: [node02 -> node01] TASK [openshift_manage_node : label non-master non-infra nodes compute] ******** TASK [openshift_manage_node : Label all-in-one master as a compute node] ******* skipping: [node02] PLAY RECAP ********************************************************************* localhost : ok=30 changed=0 unreachable=0 failed=0 node01 : ok=67 changed=3 unreachable=0 failed=0 node02 : ok=159 changed=33 unreachable=0 failed=0 INSTALLER STATUS *************************************************************** Initialization : Complete (0:03:19) Node Preparation : Complete (0:04:36) Sending file modes: C0755 110489328 oc Sending file modes: C0600 5645 admin.kubeconfig Cluster "node01:8443" set. Cluster "node01:8443" set. + set +e + kubectl get nodes --no-headers + cluster/kubectl.sh get nodes --no-headers node01 Ready compute,infra,master 23d v1.10.0+b81c8f8 node02 Ready compute 58s v1.10.0+b81c8f8 + kubectl_rc=0 + '[' 0 -ne 0 ']' ++ kubectl get nodes --no-headers ++ cluster/kubectl.sh get nodes --no-headers ++ grep NotReady + '[' -n '' ']' + set -e + echo 'Nodes are ready:' Nodes are ready: + kubectl get nodes + cluster/kubectl.sh get nodes NAME STATUS ROLES AGE VERSION node01 Ready compute,infra,master 23d v1.10.0+b81c8f8 node02 Ready compute 59s v1.10.0+b81c8f8 + make cluster-sync ./cluster/build.sh Building ... Untagged: localhost:32896/kubevirt/virt-controller:devel Untagged: localhost:32896/kubevirt/virt-controller@sha256:1e78e1de5c0d2ce00b5a94ced9a1914e8122c39e1cb85a448bb50c6b6d1c97bb Deleted: sha256:a3561f9b266c26b5dc826a8535d2637f0489ff6858a6d34d5cd26700c0229fdd Deleted: sha256:9b8440a95192511cdf51e86cc1335301c5baf7e79cebad9b8c498605963bf558 Deleted: sha256:a9cef3289b1e53d4b9f507f185fc15bd4e57d192d3cd8ccaf4bf8946eb09317e Deleted: sha256:0fb9e91dc4878398a982bc923868dcfc898bfdad6f9a5709cdc2e75c6a8cc088 Untagged: localhost:32896/kubevirt/virt-launcher:devel Untagged: localhost:32896/kubevirt/virt-launcher@sha256:8d0d0da655575d6db353939854c2a156c7c19a2af02d5b782c401b9694de1803 Deleted: sha256:9f650be6e7e8e5e6b4d35bde99247f7f86d827f78322c5742c495dd2eb91dcd7 Deleted: sha256:d094270d915fde4deaba853ab64230169f1e378603207240981fc4df322c0d69 Deleted: sha256:86d19fca02a3c57995301373d895633e0b56f16fcd42d85db64563257ffdc882 Deleted: sha256:35ce4f8330d7071e02901b5622e5b32e92c28044dfa31637147525e6654f8d4a Deleted: sha256:0a1940c3b8f99d1055a17b3ad7e08a9caff348bbbfadc44eb4fd7f58c9792088 Deleted: sha256:9c9c4f26297758804be27aadb67f2475a2eeca152f7c525293cbfcc468f09360 Deleted: sha256:05f791734243510d18eb1b2a089746403e08de4596f60adda285e318f03466ee Deleted: sha256:053a722a7f215b3a7a6f3a87c83879c4ff086303c9556c9a10200707a20db52d Deleted: sha256:5fc71af7f2a8a521f34a7780504da6cf79dab53b8d9f451bcadd2ce01e91f97f Deleted: sha256:403cd22af6815b533063f1c16a12724cc7b9b3f408773d06b0223c60a3e400e6 Deleted: sha256:62d2a380ba8d27987d275db613a48eafb744b2bf8c5cfbd5c3ff944b233d6cc2 Deleted: sha256:7ee3b33172ff798b0b2f9c5d3707d5197a8c42e01a6ffeeee841ee7a7573152e Untagged: localhost:32896/kubevirt/virt-handler:devel Untagged: localhost:32896/kubevirt/virt-handler@sha256:ebed0d8ee74a9fe22c0c7315091300459ef1bfe9d6e773295652015eb32fdde9 Deleted: sha256:ba583b2d9a5bd526c951bc3ad82a30aff9761eba734544fb1c58fb0627b0e08e Deleted: sha256:d13d37d12de6d02a267044ef3b5c4aa3a20a4fe87a59fddfd6c6c2d4b82e6e63 Deleted: sha256:2fb6a0337c314ca74c7f6a916b02410d408a0d2292b837e05ecc8cf47695bb32 Deleted: sha256:7efa3a73f01bc42947757e9d2a984deaeafdfa99a5015c6299ab7e6fb3d34ad9 Untagged: localhost:32896/kubevirt/virt-api:devel Untagged: localhost:32896/kubevirt/virt-api@sha256:f9b1f40b33163907992b9f02fd75bf35563bc8793a6dfa9f64da7c05d9137003 Deleted: sha256:da50ca704fdb91a280df531eb43d1bc7adcdccc41306cddc5b46a65c54fab180 Deleted: sha256:be7f5b43ccf010619079926cc894f3ff9e694680eafc989d0da2e25b0ead587d Deleted: sha256:beb685c4eb18e6fcc99b953d814772d8c304dc1ba1b1ac41a7e0aa70485f9aac Deleted: sha256:4fc6eaf18bd08d1b8b77d72ea13f1402d37d58479cd524072de3a493e251f1f9 Untagged: localhost:32896/kubevirt/subresource-access-test:devel Untagged: localhost:32896/kubevirt/subresource-access-test@sha256:814ccc797b17a428fcc8bbe14fb1b52f1d3c0eaaee9fc1a7ed174e186158ae8a Deleted: sha256:adc58be0f5584f190a81478115a1282683977f01b2d195cd949d84bf9991ff24 Deleted: sha256:8d12fd42a107042316831e0152a8809b8f8d18ccade9563c31834478d764103e Deleted: sha256:f48cd2ef098849d00d99f0f5992e5517b1ba7b0f63c831b73ef9ef4bf9d84738 Deleted: sha256:cb9d0ab48ba37d3830e19ca18871bd9a06f94d2939325949cb3f7b8370d3df79 Untagged: localhost:32896/kubevirt/example-hook-sidecar:devel Untagged: localhost:32896/kubevirt/example-hook-sidecar@sha256:6a864befa2e02ed737c82b0b5bd074bfd3627bd503d9482f3a34135e96518904 Deleted: sha256:f7d6a05e02756ecdf7de421e6fb62c08b12050c582f8ab71adb1e7aba03125cc Deleted: sha256:13707bdd3cf5b97e5c4d5ab633c0f98e43131e2a51bd0d4731a426665f2f639d Deleted: sha256:ee1b86463af3d096c8bd946c15429af41dad7db581640c8305073a6b84917fae Deleted: sha256:3399fa566eda7139c5d1a922896975bd0860472c9c6000c956b8f62c777ad3d4 sha256:7d0ff7c8c318d908872cede18cbe0463a1f4e1df0e2ad9fe22f899e0e0bf4104 go version go1.10 linux/amd64 go version go1.10 linux/amd64 make[1]: Entering directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' hack/dockerized "./hack/check.sh && KUBEVIRT_VERSION= ./hack/build-go.sh install " && ./hack/build-copy-artifacts.sh sha256:7d0ff7c8c318d908872cede18cbe0463a1f4e1df0e2ad9fe22f899e0e0bf4104 go version go1.10 linux/amd64 go version go1.10 linux/amd64 find: '/root/go/src/kubevirt.io/kubevirt/_out/cmd': No such file or directory Compiling tests... compiled tests.test hack/build-docker.sh build Sending build context to Docker daemon 38.11 MB Step 1/8 : FROM fedora:27 ---> 9110ae7f579f Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-controller ---> Using cache ---> 0241ca8295a7 Step 4/8 : WORKDIR /home/virt-controller ---> Using cache ---> 348afd6e30e7 Step 5/8 : USER 1001 ---> Using cache ---> a56bbefef027 Step 6/8 : COPY virt-controller /usr/bin/virt-controller ---> cc2bd67ab23a Removing intermediate container 20994850cdc4 Step 7/8 : ENTRYPOINT /usr/bin/virt-controller ---> Running in a13cb2352c61 ---> 8b0a704e0888 Removing intermediate container a13cb2352c61 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-controller" '' ---> Running in bbe3bcd3d117 ---> 1aa82dc78049 Removing intermediate container bbe3bcd3d117 Successfully built 1aa82dc78049 Sending build context to Docker daemon 40.45 MB Step 1/10 : FROM kubevirt/libvirt:3.7.0 ---> c4e262d2dc3c Step 2/10 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 52c17d330685 Step 3/10 : RUN dnf -y install socat genisoimage util-linux libcgroup-tools ethtool net-tools sudo && dnf -y clean all && test $(id -u qemu) = 107 # make sure that the qemu user really is 107 ---> Using cache ---> c09056350983 Step 4/10 : COPY virt-launcher /usr/bin/virt-launcher ---> 07ba4742bac3 Removing intermediate container 283106c38823 Step 5/10 : COPY kubevirt-sudo /etc/sudoers.d/kubevirt ---> efbc5ebb04a1 Removing intermediate container 39be9a2a1710 Step 6/10 : RUN setcap CAP_NET_BIND_SERVICE=+eip /usr/bin/qemu-system-x86_64 ---> Running in 6544f0e28074  ---> ece33a71dac9 Removing intermediate container 6544f0e28074 Step 7/10 : RUN mkdir -p /usr/share/kubevirt/virt-launcher ---> Running in 5c5a23587f28  ---> f52e9f1ee37b Removing intermediate container 5c5a23587f28 Step 8/10 : COPY entrypoint.sh libvirtd.sh sock-connector /usr/share/kubevirt/virt-launcher/ ---> 5a8e4415c95c Removing intermediate container a188d57edfbb Step 9/10 : ENTRYPOINT /usr/share/kubevirt/virt-launcher/entrypoint.sh ---> Running in 0c3454615d85 ---> 2aa0a7562d39 Removing intermediate container 0c3454615d85 Step 10/10 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-launcher" '' ---> Running in bc7a67f5b019 ---> 831d6a33e477 Removing intermediate container bc7a67f5b019 Successfully built 831d6a33e477 Sending build context to Docker daemon 39.57 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/5 : COPY virt-handler /usr/bin/virt-handler ---> 4f709221a897 Removing intermediate container 1e17e7c5bca0 Step 4/5 : ENTRYPOINT /usr/bin/virt-handler ---> Running in 19264ab28eb6 ---> 6040c4f0e7a8 Removing intermediate container 19264ab28eb6 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-handler" '' ---> Running in 0e4982a0ff46 ---> 15dd15924984 Removing intermediate container 0e4982a0ff46 Successfully built 15dd15924984 Sending build context to Docker daemon 37.02 MB Step 1/8 : FROM fedora:27 ---> 9110ae7f579f Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-api ---> Using cache ---> 5ad04faf42c8 Step 4/8 : WORKDIR /home/virt-api ---> Using cache ---> 686855fdf261 Step 5/8 : USER 1001 ---> Using cache ---> c633d341fb6f Step 6/8 : COPY virt-api /usr/bin/virt-api ---> 439faa5b7e13 Removing intermediate container d786fa0416c2 Step 7/8 : ENTRYPOINT /usr/bin/virt-api ---> Running in 6e53940cebc6 ---> 42da31850134 Removing intermediate container 6e53940cebc6 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-api" '' ---> Running in 61106d201fdc ---> 3b2fc3db939f Removing intermediate container 61106d201fdc Successfully built 3b2fc3db939f Sending build context to Docker daemon 4.096 kB Step 1/7 : FROM fedora:27 ---> 9110ae7f579f Step 2/7 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/7 : ENV container docker ---> Using cache ---> fa9b94755746 Step 4/7 : RUN mkdir -p /images/custom /images/alpine && truncate -s 64M /images/custom/disk.img && curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /images/alpine/disk.img ---> Using cache ---> f44e689d5514 Step 5/7 : ADD entrypoint.sh / ---> Using cache ---> 898a453e2948 Step 6/7 : CMD /entrypoint.sh ---> Using cache ---> 5aad873fd4d8 Step 7/7 : LABEL "disks-images-provider" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> 5a2b5c74be8c Successfully built 5a2b5c74be8c Sending build context to Docker daemon 2.56 kB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/5 : ENV container docker ---> Using cache ---> fa9b94755746 Step 4/5 : RUN dnf -y install procps-ng nmap-ncat && dnf -y clean all ---> Using cache ---> e4b4ed9ff163 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "vm-killer" '' ---> Using cache ---> ec04f91fb922 Successfully built ec04f91fb922 Sending build context to Docker daemon 5.12 kB Step 1/7 : FROM debian:sid ---> 496290160351 Step 2/7 : MAINTAINER "David Vossel" \ ---> Using cache ---> 081acc82039b Step 3/7 : ENV container docker ---> Using cache ---> 87a43203841c Step 4/7 : RUN apt-get update && apt-get install -y bash curl bzip2 qemu-utils && mkdir -p /disk && rm -rf /var/lib/apt/lists/* ---> Using cache ---> bbc83781e0a9 Step 5/7 : ADD entry-point.sh / ---> Using cache ---> c588d7a778a6 Step 6/7 : CMD /entry-point.sh ---> Using cache ---> e28b44b64988 Step 7/7 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "registry-disk-v1alpha" '' ---> Using cache ---> 16abf0a82ca0 Successfully built 16abf0a82ca0 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:32999/kubevirt/registry-disk-v1alpha:devel ---> 16abf0a82ca0 Step 2/4 : MAINTAINER "David Vossel" \ ---> Using cache ---> c0e81d5abf07 Step 3/4 : RUN curl https://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img > /disk/cirros.img ---> Using cache ---> dd8e8c439961 Step 4/4 : LABEL "cirros-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> 6c8a8f29b47e Successfully built 6c8a8f29b47e Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:32999/kubevirt/registry-disk-v1alpha:devel ---> 16abf0a82ca0 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> f9c8f55f0098 Step 3/4 : RUN curl -g -L https://download.fedoraproject.org/pub/fedora/linux/releases/27/CloudImages/x86_64/images/Fedora-Cloud-Base-27-1.6.x86_64.qcow2 > /disk/fedora.qcow2 ---> Using cache ---> dacc271e4b4a Step 4/4 : LABEL "fedora-cloud-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> f07b6945b42b Successfully built f07b6945b42b Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:32999/kubevirt/registry-disk-v1alpha:devel ---> 16abf0a82ca0 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> f9c8f55f0098 Step 3/4 : RUN curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /disk/alpine.iso ---> Using cache ---> 806b9b408532 Step 4/4 : LABEL "alpine-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> 599f9b3360ff Successfully built 599f9b3360ff Sending build context to Docker daemon 34.04 MB Step 1/8 : FROM fedora:27 ---> 9110ae7f579f Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virtctl ---> Using cache ---> d310698d4b36 Step 4/8 : WORKDIR /home/virtctl ---> Using cache ---> e8b8a9c6ab20 Step 5/8 : USER 1001 ---> Using cache ---> d741836b0961 Step 6/8 : COPY subresource-access-test /subresource-access-test ---> e61d638175f0 Removing intermediate container 76a9bd5ecac7 Step 7/8 : ENTRYPOINT /subresource-access-test ---> Running in 0ff5fe8a0142 ---> 7220385096ac Removing intermediate container 0ff5fe8a0142 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "subresource-access-test" '' ---> Running in 626a566cabf3 ---> 0acc9213ecf4 Removing intermediate container 626a566cabf3 Successfully built 0acc9213ecf4 Sending build context to Docker daemon 3.072 kB Step 1/9 : FROM fedora:27 ---> 9110ae7f579f Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/9 : ENV container docker ---> Using cache ---> fa9b94755746 Step 4/9 : RUN dnf -y install make git gcc && dnf -y clean all ---> Using cache ---> 67d950d251fa Step 5/9 : ENV GIMME_GO_VERSION 1.9.2 ---> Using cache ---> 63dee67c4bde Step 6/9 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> f9f0a3f8320f Step 7/9 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 15d5af487ed6 Step 8/9 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/masterzen/winrm-cli ---> Using cache ---> 62349a577761 Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "winrmcli" '' ---> Using cache ---> f41c213849ba Successfully built f41c213849ba Sending build context to Docker daemon 34.49 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 0ae71e3c9e56 Step 3/5 : COPY example-hook-sidecar /example-hook-sidecar ---> 93855cec4ecb Removing intermediate container 2a9a7bd4693b Step 4/5 : ENTRYPOINT /example-hook-sidecar ---> Running in 1ca19c3ac138 ---> fea42de70a9f Removing intermediate container 1ca19c3ac138 Step 5/5 : LABEL "example-hook-sidecar" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Running in c1516960d8dc ---> 3d9d03df3b63 Removing intermediate container c1516960d8dc Successfully built 3d9d03df3b63 hack/build-docker.sh push The push refers to a repository [localhost:32999/kubevirt/virt-controller] 294bac84504f: Preparing a5b36bcc540d: Preparing 39bae602f753: Preparing a5b36bcc540d: Pushed 294bac84504f: Pushed 39bae602f753: Pushed devel: digest: sha256:916baea87c3ae08fd58c907b653d4c21ff6eb6b1c4e4fc8b48dbf775bf9e72cb size: 948 The push refers to a repository [localhost:32999/kubevirt/virt-launcher] 582165c66d21: Preparing 593bcce638c4: Preparing 4fa9a61cc3db: Preparing 6b5731af2425: Preparing 1767442a6a04: Preparing d56a24c0b6b9: Preparing 9e20b26113ea: Preparing a1a99db27cd1: Preparing ec5be2616f4d: Preparing ffcfbc9458ac: Preparing 68e0ce966da1: Preparing 39bae602f753: Preparing d56a24c0b6b9: Waiting ec5be2616f4d: Waiting a1a99db27cd1: Waiting ffcfbc9458ac: Waiting 68e0ce966da1: Waiting 593bcce638c4: Pushed 6b5731af2425: Pushed 582165c66d21: Pushed 9e20b26113ea: Pushed a1a99db27cd1: Pushed ffcfbc9458ac: Pushed ec5be2616f4d: Pushed 39bae602f753: Mounted from kubevirt/virt-controller 4fa9a61cc3db: Pushed d56a24c0b6b9: Pushed 1767442a6a04: Pushed 68e0ce966da1: Pushed devel: digest: sha256:1fe0aaf20134f3b3bb36d04f13ebe840090358f91d482aed09cce4ef76fde8ac size: 2828 The push refers to a repository [localhost:32999/kubevirt/virt-handler] c9b14bf88101: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/virt-launcher c9b14bf88101: Pushed devel: digest: sha256:54c61cca0bd594e1201851f480ef2d649997e6bebd47f8243016cd032c3b3f76 size: 741 The push refers to a repository [localhost:32999/kubevirt/virt-api] 52bc8e5b3d0f: Preparing 75ab133441d4: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/virt-handler 75ab133441d4: Pushed 52bc8e5b3d0f: Pushed devel: digest: sha256:07fc6c2c630830c83ca474ac18de283f02bbb31059413dd7c7d086ebdd7feb24 size: 948 The push refers to a repository [localhost:32999/kubevirt/disks-images-provider] ee9ad4a84d8d: Preparing 70611d11de64: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/virt-api ee9ad4a84d8d: Pushed 70611d11de64: Pushed devel: digest: sha256:5909293c04391e8285f6040e66550a4b7e396114d5b9b56fac578c172418ad5f size: 948 The push refers to a repository [localhost:32999/kubevirt/vm-killer] 4b816de0d5fd: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/disks-images-provider 4b816de0d5fd: Pushed devel: digest: sha256:172abce5d6cb5c0c9c9b6cb20e4219134a023cb0376781346f61a02c6b47e302 size: 740 The push refers to a repository [localhost:32999/kubevirt/registry-disk-v1alpha] cb3d1019d03e: Preparing 626899eeec02: Preparing 132d61a890c5: Preparing cb3d1019d03e: Pushed 626899eeec02: Pushed 132d61a890c5: Pushed devel: digest: sha256:daccb3f0f88a76226c345253449e80743bbf25f573dff9f043522332a54ddf3e size: 948 The push refers to a repository [localhost:32999/kubevirt/cirros-registry-disk-demo] 53ab419eb3e2: Preparing cb3d1019d03e: Preparing 626899eeec02: Preparing 132d61a890c5: Preparing 626899eeec02: Mounted from kubevirt/registry-disk-v1alpha 132d61a890c5: Mounted from kubevirt/registry-disk-v1alpha cb3d1019d03e: Mounted from kubevirt/registry-disk-v1alpha 53ab419eb3e2: Pushed devel: digest: sha256:dc992f0e9f99169047179481c078ca7f6b4f2007391c4e62d80666e140ad5c3d size: 1160 The push refers to a repository [localhost:32999/kubevirt/fedora-cloud-registry-disk-demo] 80823b89837c: Preparing cb3d1019d03e: Preparing 626899eeec02: Preparing 132d61a890c5: Preparing 626899eeec02: Mounted from kubevirt/cirros-registry-disk-demo cb3d1019d03e: Mounted from kubevirt/cirros-registry-disk-demo 132d61a890c5: Mounted from kubevirt/cirros-registry-disk-demo 80823b89837c: Pushed devel: digest: sha256:a563c009f82fc0a1f9095244fe0cb458b09e2bceccb4a6aa636d9898e3eb6c39 size: 1161 The push refers to a repository [localhost:32999/kubevirt/alpine-registry-disk-demo] 39d6040275f2: Preparing cb3d1019d03e: Preparing 626899eeec02: Preparing 132d61a890c5: Preparing 132d61a890c5: Mounted from kubevirt/fedora-cloud-registry-disk-demo 626899eeec02: Mounted from kubevirt/fedora-cloud-registry-disk-demo cb3d1019d03e: Mounted from kubevirt/fedora-cloud-registry-disk-demo 39d6040275f2: Pushed devel: digest: sha256:86fd58f48622c7e028e4ea2555097109e6ccaf5ec48c529b6bd596dc7993f981 size: 1160 The push refers to a repository [localhost:32999/kubevirt/subresource-access-test] 34263b400ebe: Preparing 6368c0536c64: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/vm-killer 6368c0536c64: Pushed 34263b400ebe: Pushed devel: digest: sha256:597a17e32d14734303ca35604e470ede5f3a7359a43f1b266ea6e20df063b773 size: 948 The push refers to a repository [localhost:32999/kubevirt/winrmcli] 00e232679558: Preparing b7685068dac4: Preparing 0d5d88e46b0e: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/subresource-access-test 00e232679558: Pushed 0d5d88e46b0e: Pushed b7685068dac4: Pushed devel: digest: sha256:17696d0db6f5548b29c59af10c3073871204708e9daa77e93aeea22f572d2bb9 size: 1165 The push refers to a repository [localhost:32999/kubevirt/example-hook-sidecar] 0b8f7721d13e: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/winrmcli 0b8f7721d13e: Pushed devel: digest: sha256:76faf0c966efe55dc56f53cb6db9a5b5861ef2218a98fd35cbf2ebed6457d8d3 size: 740 make[1]: Leaving directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' Done ./cluster/clean.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release1 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release1 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-54-g0872dda ++ KUBEVIRT_VERSION=v0.7.0-54-g0872dda + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:0977f1c716862710c8324798b95802e11a149f4532e33be20dd70877fe8f5332 ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:32999/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Cleaning up ...' Cleaning up ... + cluster/kubectl.sh get vmis --all-namespaces -o=custom-columns=NAME:.metadata.name,NAMESPACE:.metadata.namespace,FINALIZERS:.metadata.finalizers --no-headers + grep foregroundDeleteVirtualMachine + read p error: the server doesn't have a resource type "vmis" + _kubectl delete ds -l kubevirt.io -n kube-system --cascade=false --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=libvirt --force --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=virt-handler --force --grace-period 0 No resources found + namespaces=(default ${namespace}) + for i in '${namespaces[@]}' + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete deployment -l kubevirt.io No resources found + _kubectl -n default delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rs -l kubevirt.io No resources found + _kubectl -n default delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete services -l kubevirt.io No resources found + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n default delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete secrets -l kubevirt.io No resources found + _kubectl -n default delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pv -l kubevirt.io No resources found + _kubectl -n default delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pvc -l kubevirt.io No resources found + _kubectl -n default delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete ds -l kubevirt.io No resources found + _kubectl -n default delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n default delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pods -l kubevirt.io No resources found + _kubectl -n default delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n default delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rolebinding -l kubevirt.io No resources found + _kubectl -n default delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete roles -l kubevirt.io No resources found + _kubectl -n default delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterroles -l kubevirt.io No resources found + _kubectl -n default delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n default get crd offlinevirtualmachines.kubevirt.io ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ wc -l ++ cluster/os-3.10.0/.kubectl -n default get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + for i in '${namespaces[@]}' + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete deployment -l kubevirt.io No resources found + _kubectl -n kube-system delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rs -l kubevirt.io No resources found + _kubectl -n kube-system delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete services -l kubevirt.io No resources found + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n kube-system delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete secrets -l kubevirt.io No resources found + _kubectl -n kube-system delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pv -l kubevirt.io No resources found + _kubectl -n kube-system delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pvc -l kubevirt.io No resources found + _kubectl -n kube-system delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete ds -l kubevirt.io No resources found + _kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n kube-system delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pods -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete roles -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterroles -l kubevirt.io No resources found + _kubectl -n kube-system delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ wc -l ++ cluster/os-3.10.0/.kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + sleep 2 + echo Done Done ./cluster/deploy.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release1 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release1 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-54-g0872dda ++ KUBEVIRT_VERSION=v0.7.0-54-g0872dda + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:0977f1c716862710c8324798b95802e11a149f4532e33be20dd70877fe8f5332 ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:32999/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Deploying ...' Deploying ... + [[ -z openshift-3.10-release ]] + [[ openshift-3.10-release =~ .*-dev ]] + [[ openshift-3.10-release =~ .*-release ]] + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/demo-content.yaml =~ .*demo.* ]] + continue + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml =~ .*demo.* ]] + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml clusterrole.rbac.authorization.k8s.io "kubevirt.io:admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:edit" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:view" created serviceaccount "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver-auth-delegator" created rolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created role.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-controller" created serviceaccount "kubevirt-controller" created serviceaccount "kubevirt-privileged" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller-cluster-admin" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-privileged-cluster-admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:default" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt.io:default" created service "virt-api" created deployment.extensions "virt-api" created deployment.extensions "virt-controller" created daemonset.extensions "virt-handler" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstances.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancereplicasets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancepresets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachines.kubevirt.io" created + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R persistentvolumeclaim "disk-alpine" created persistentvolume "host-path-disk-alpine" created persistentvolumeclaim "disk-custom" created persistentvolume "host-path-disk-custom" created daemonset.extensions "disks-images-provider" created serviceaccount "kubevirt-testing" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-testing-cluster-admin" created + [[ os-3.10.0 =~ os-* ]] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-controller"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-testing"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-privileged"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-apiserver"] + _kubectl adm policy add-scc-to-user privileged admin + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged admin scc "privileged" added to: ["admin"] + echo Done Done + namespaces=(kube-system default) + [[ kube-system != \k\u\b\e\-\s\y\s\t\e\m ]] + timeout=300 + sample=30 + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n kube-system --no-headers ++ grep -v Running ++ cluster/kubectl.sh get pods -n kube-system --no-headers + '[' -n 'disks-images-provider-4vtln 0/1 ContainerCreating 0 4s disks-images-provider-75z5d 0/1 ContainerCreating 0 4s virt-api-7d79764579-5c4gq 0/1 ContainerCreating 0 6s virt-api-7d79764579-xvlfb 0/1 ContainerCreating 0 6s virt-controller-7d57d96b65-z4jkm 0/1 ContainerCreating 0 6s virt-handler-56vwc 0/1 ContainerCreating 0 6s virt-handler-7brqg 0/1 ContainerCreating 0 6s' ']' + echo 'Waiting for kubevirt pods to enter the Running state ...' Waiting for kubevirt pods to enter the Running state ... + kubectl get pods -n kube-system --no-headers + cluster/kubectl.sh get pods -n kube-system --no-headers + grep -v Running disks-images-provider-4vtln 0/1 ContainerCreating 0 4s disks-images-provider-75z5d 0/1 ContainerCreating 0 4s virt-api-7d79764579-5c4gq 0/1 ContainerCreating 0 6s virt-api-7d79764579-xvlfb 0/1 ContainerCreating 0 6s virt-handler-56vwc 0/1 ContainerCreating 0 6s virt-handler-7brqg 0/1 ContainerCreating 0 6s + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system --no-headers ++ grep -v Running ++ cluster/kubectl.sh get pods -n kube-system --no-headers + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false + '[' -n '' ']' + kubectl get pods -n kube-system + cluster/kubectl.sh get pods -n kube-system NAME READY STATUS RESTARTS AGE disks-images-provider-4vtln 1/1 Running 0 37s disks-images-provider-75z5d 1/1 Running 0 37s master-api-node01 1/1 Running 1 23d master-controllers-node01 1/1 Running 1 23d master-etcd-node01 1/1 Running 1 23d virt-api-7d79764579-5c4gq 1/1 Running 0 39s virt-api-7d79764579-xvlfb 1/1 Running 0 39s virt-controller-7d57d96b65-z4jkm 1/1 Running 0 39s virt-controller-7d57d96b65-zwhlq 1/1 Running 0 39s virt-handler-56vwc 1/1 Running 0 39s virt-handler-7brqg 1/1 Running 0 39s + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n default --no-headers ++ cluster/kubectl.sh get pods -n default --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n default + cluster/kubectl.sh get pods -n default NAME READY STATUS RESTARTS AGE docker-registry-1-tqlsm 1/1 Running 2 23d registry-console-1-bhtqz 1/1 Running 2 23d router-1-r2xxq 1/1 Running 1 23d + kubectl version + cluster/kubectl.sh version oc v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 features: Basic-Auth GSSAPI Kerberos SPNEGO Server https://127.0.0.1:32996 openshift v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 + ginko_params='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + [[ -d /home/nfs/images/windows2016 ]] + [[ openshift-3.10-release =~ windows.* ]] + FUNC_TEST_ARGS='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + make functest hack/dockerized "hack/build-func-tests.sh" sha256:7d0ff7c8c318d908872cede18cbe0463a1f4e1df0e2ad9fe22f899e0e0bf4104 go version go1.10 linux/amd64 go version go1.10 linux/amd64 Compiling tests... compiled tests.test hack/functests.sh Running Suite: Tests Suite ========================== Random Seed: 1531660027 Will run 142 of 142 specs • [SLOW TEST:44.048 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 should have cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:82 ------------------------------ • [SLOW TEST:104.216 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 with injected ssh-key /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:92 should have ssh-key under authorized keys /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:93 ------------------------------ • [SLOW TEST:101.320 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userData source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:118 should process provided cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:119 ------------------------------ • [SLOW TEST:44.838 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 should take user-data from k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:162 ------------------------------ • [SLOW TEST:110.730 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting and stopping the same VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:90 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:91 should success multiple times /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:92 ------------------------------ • [SLOW TEST:18.812 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:111 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:112 should not modify the spec on status update /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:113 ------------------------------ • [SLOW TEST:40.647 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting multiple VMIs /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:129 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:130 should success /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:131 ------------------------------ • [SLOW TEST:5.324 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should scale /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 to three, to two and then to zero replicas /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:9.001 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should scale /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 to five, to six and then to zero replicas /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ •• ------------------------------ • [SLOW TEST:23.571 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should update readyReplicas once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:157 ------------------------------ • [SLOW TEST:21.271 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should remove VMIs once it is marked for deletion /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:169 ------------------------------ • ------------------------------ • [SLOW TEST:5.746 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should not scale when paused and scale when resume /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:223 ------------------------------ • [SLOW TEST:7.733 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should remove the finished VM /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:279 ------------------------------ • [SLOW TEST:34.013 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running level=info timestamp=2018-07-15T13:19:18.541332Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:19:18.578652Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:19:18.594408Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:19:25 http: TLS handshake error from 10.128.0.1:38548: EOF level=info timestamp=2018-07-15T13:19:28.636549Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:19:32.729790Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:19:35.476622Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:19:35.553567Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:19:35 http: TLS handshake error from 10.128.0.1:38600: EOF level=info timestamp=2018-07-15T13:19:38.784546Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:19:45 http: TLS handshake error from 10.128.0.1:38652: EOF level=info timestamp=2018-07-15T13:19:48.989190Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:19:55 http: TLS handshake error from 10.128.0.1:38702: EOF level=info timestamp=2018-07-15T13:19:59.150991Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:20:02.955753Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 13:18:00 http: TLS handshake error from 10.129.0.1:45216: EOF 2018/07/15 13:18:10 http: TLS handshake error from 10.129.0.1:45220: EOF 2018/07/15 13:18:20 http: TLS handshake error from 10.129.0.1:45226: EOF 2018/07/15 13:18:30 http: TLS handshake error from 10.129.0.1:45232: EOF level=info timestamp=2018-07-15T13:18:35.781581Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 13:18:40 http: TLS handshake error from 10.129.0.1:45238: EOF 2018/07/15 13:18:50 http: TLS handshake error from 10.129.0.1:45244: EOF 2018/07/15 13:19:00 http: TLS handshake error from 10.129.0.1:45250: EOF 2018/07/15 13:19:10 http: TLS handshake error from 10.129.0.1:45256: EOF 2018/07/15 13:19:20 http: TLS handshake error from 10.129.0.1:45262: EOF 2018/07/15 13:19:30 http: TLS handshake error from 10.129.0.1:45268: EOF level=info timestamp=2018-07-15T13:19:35.624019Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 13:19:40 http: TLS handshake error from 10.129.0.1:45274: EOF 2018/07/15 13:19:50 http: TLS handshake error from 10.129.0.1:45280: EOF 2018/07/15 13:20:00 http: TLS handshake error from 10.129.0.1:45286: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T13:05:59.980714Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 level=info timestamp=2018-07-15T13:17:27.568081Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-15T13:17:27.569933Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-15T13:17:27.582948Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-15T13:17:27.584469Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-15T13:17:27.593619Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-15T13:17:27.595494Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-15T13:17:27.596062Z pos=virtinformers.go:104 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-15T13:17:27.611274Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-07-15T13:17:27.618481Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-15T13:17:27.618665Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-15T13:17:27.618777Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-15T13:17:27.618995Z pos=preset.go:71 component=virt-controller service=http msg="Starting Virtual Machine Initializer." Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T13:15:34.336869Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T13:15:34.336901Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T13:15:34.338382Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmilbfl5cjwzs" level=info timestamp=2018-07-15T13:15:34.339650Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:15:34.339951Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilbfl5cjwzs, existing: false\n" level=info timestamp=2018-07-15T13:15:34.339990Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:15:34.340011Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T13:15:34.340092Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T13:15:34.340124Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T13:15:34.342410Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:16:17.251675Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:16:17.256977Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilbfl5cjwzs, existing: false\n" level=info timestamp=2018-07-15T13:16:17.258228Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:16:17.259052Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:16:17.259504Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:17:52.626818Z pos=device_controller.go:131 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-15T13:17:52.655072Z pos=device_controller.go:125 component=virt-handler msg="kvm device plugin started" 2018/07/15 13:17:52 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-kvm.sock->@: write: broken pipe" level=info timestamp=2018-07-15T13:17:52.686536Z pos=device_controller.go:125 component=virt-handler msg="tun device plugin started" 2018/07/15 13:17:52 grpc: Server.Serve failed to create ServerTransport: connection error: desc = "transport: write unix /var/lib/kubelet/device-plugins/kubevirt-tun.sock->@: write: broken pipe" level=info timestamp=2018-07-15T13:17:52.827367Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisskzn, existing: true\n" level=info timestamp=2018-07-15T13:17:52.827785Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-15T13:17:52.827865Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:17:52.828025Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmisskzn kind=VirtualMachineInstance uid=526ace5e-8831-11e8-9bf3-525500d15501 msg="No update processing required" level=info timestamp=2018-07-15T13:17:52.868351Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmisskzn kind=VirtualMachineInstance uid=526ace5e-8831-11e8-9bf3-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:17:52.868626Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmisskzn, existing: true\n" level=info timestamp=2018-07-15T13:17:52.868705Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-15T13:17:52.868757Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:17:52.868912Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmisskzn kind= uid=526ace5e-8831-11e8-9bf3-525500d15501 msg="No update processing required" level=info timestamp=2018-07-15T13:17:52.869011Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmisskzn kind= uid=526ace5e-8831-11e8-9bf3-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmisskzn-j8vf4 Pod phase: Pending • Failure [198.327 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected : 180000000000 to be nil /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:79 ------------------------------ STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-07-15T13:16:46.081459Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmisskzn-j8vf4" level=info timestamp=2018-07-15T13:17:01.245844Z pos=utils.go:243 component=tests msg="Pod owner ship transferred to the node virt-launcher-testvmisskzn-j8vf4" level=info timestamp=2018-07-15T13:17:02.799841Z pos=utils.go:243 component=tests msg="VirtualMachineInstance defined." level=info timestamp=2018-07-15T13:17:02.871191Z pos=utils.go:243 component=tests msg="VirtualMachineInstance started." STEP: Checking that the VirtualMachineInstance console has expected output level=info timestamp=2018-07-15T13:20:03.199332Z pos=utils.go:1265 component=tests namespace=kubevirt-test-default name=testvmisskzn kind=VirtualMachineInstance uid= msg="Login: [{2 \r\nISOLINUX 6.04 6.04-pre1 Copyright (C) 1994-2015 H. Peter Anvin et al\r\nboot: \u001b[?7h\r\n []}]" • [SLOW TEST:188.041 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:176.391 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:55.761 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:113 should create a writeable emptyDisk with the right capacity /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:115 ------------------------------ • [SLOW TEST:49.420 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined and a specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:163 should create a writeable emptyDisk with the specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:165 ------------------------------ • [SLOW TEST:43.396 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should be successfully started /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:207 ------------------------------ • [SLOW TEST:111.458 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should not persist data /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:218 ------------------------------ • [SLOW TEST:203.916 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With VirtualMachineInstance with two PVCs /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:266 should start vmi multiple times /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:278 ------------------------------ • [SLOW TEST:23.598 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should successfully start with hook sidecar annotation /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:60 ------------------------------ • [SLOW TEST:20.167 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should call Collect and OnDefineDomain on the hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:67 ------------------------------ • [SLOW TEST:23.221 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should update domain XML with SM BIOS properties /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:83 ------------------------------ ••• ------------------------------ • [SLOW TEST:7.032 seconds] Templates /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:42 Launching VMI from VM Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:60 with given Fedora Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:193 with given VM JSON from the Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:152 with given VM from the VM JSON /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:158 with given VMI from the VM /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:163 should succeed to terminate the VMI using oc-patch command /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:166 ------------------------------ • ------------------------------ • [SLOW TEST:5.015 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:48 Without permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:56 should not be able to access subresource endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:57 ------------------------------ •• ------------------------------ • [SLOW TEST:127.521 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running 2018/07/15 13:40:15 http: TLS handshake error from 10.128.0.1:45158: EOF level=info timestamp=2018-07-15T13:40:17.790646Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:40:23.388471Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:40:24.530085Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:40:24.591341Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:40:24.733694Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:40:24.749674Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:40:24.762038Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:40:25 http: TLS handshake error from 10.128.0.1:45210: EOF level=info timestamp=2018-07-15T13:40:33.568921Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:40:35 http: TLS handshake error from 10.128.0.1:45260: EOF level=info timestamp=2018-07-15T13:40:39.315208Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T13:40:43.767620Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:40:45 http: TLS handshake error from 10.128.0.1:45314: EOF level=info timestamp=2018-07-15T13:40:48.059059Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 13:38:40 http: TLS handshake error from 10.129.0.1:46002: EOF 2018/07/15 13:38:50 http: TLS handshake error from 10.129.0.1:46008: EOF 2018/07/15 13:39:00 http: TLS handshake error from 10.129.0.1:46014: EOF level=info timestamp=2018-07-15T13:39:09.157250Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 13:39:10 http: TLS handshake error from 10.129.0.1:46020: EOF 2018/07/15 13:39:20 http: TLS handshake error from 10.129.0.1:46028: EOF 2018/07/15 13:39:30 http: TLS handshake error from 10.129.0.1:46034: EOF 2018/07/15 13:39:40 http: TLS handshake error from 10.129.0.1:46040: EOF 2018/07/15 13:39:50 http: TLS handshake error from 10.129.0.1:46046: EOF 2018/07/15 13:40:00 http: TLS handshake error from 10.129.0.1:46052: EOF 2018/07/15 13:40:10 http: TLS handshake error from 10.129.0.1:46058: EOF 2018/07/15 13:40:20 http: TLS handshake error from 10.129.0.1:46064: EOF 2018/07/15 13:40:30 http: TLS handshake error from 10.129.0.1:46070: EOF 2018/07/15 13:40:40 http: TLS handshake error from 10.129.0.1:46076: EOF 2018/07/15 13:40:50 http: TLS handshake error from 10.129.0.1:46082: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T13:35:13.493096Z pos=vm.go:135 component=virt-controller service=http namespace=default name=testvm kind= uid=e4c7688a-8833-11e8-9bf3-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-15T13:35:13.493224Z pos=vm.go:186 component=virt-controller service=http namespace=default name=testvm kind= uid=e4c7688a-8833-11e8-9bf3-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-15T13:35:13.493258Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-15T13:35:13.508402Z pos=vm.go:135 component=virt-controller service=http namespace=default name=testvm kind= uid=e4c7688a-8833-11e8-9bf3-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-15T13:35:13.508507Z pos=vm.go:186 component=virt-controller service=http namespace=default name=testvm kind= uid=e4c7688a-8833-11e8-9bf3-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-15T13:35:13.508540Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-15T13:35:35.236542Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:35:35.240030Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:35:35.278731Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicx5p7 kind= uid=f3f5961c-8833-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:35:35.279903Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicx5p7 kind= uid=f3f5961c-8833-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:35:35.369122Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6tdqs kind= uid=f3fdac7b-8833-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:35:35.377526Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6tdqs kind= uid=f3fdac7b-8833-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:35:35.489685Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix9nmn kind= uid=f4088098-8833-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:35:35.489812Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix9nmn kind= uid=f4088098-8833-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:35:35.838797Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix9nmn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix9nmn" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T13:15:34.336869Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T13:15:34.336901Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T13:15:34.338382Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmilbfl5cjwzs" level=info timestamp=2018-07-15T13:15:34.339650Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:15:34.339951Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilbfl5cjwzs, existing: false\n" level=info timestamp=2018-07-15T13:15:34.339990Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:15:34.340011Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T13:15:34.340092Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T13:15:34.340124Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T13:15:34.342410Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:16:17.251675Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:16:17.256977Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilbfl5cjwzs, existing: false\n" level=info timestamp=2018-07-15T13:16:17.258228Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:16:17.259052Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:16:17.259504Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:36:14.155278Z pos=vm.go:332 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-15T13:36:14.539777Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicx5p7, existing: true\n" level=info timestamp=2018-07-15T13:36:14.539895Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-15T13:36:14.539932Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:36:14.539955Z pos=vm.go:332 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-15T13:36:14.696642Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind= uid=f3f5961c-8833-11e8-9bf3-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-15T13:36:15.461307Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind= uid=f3f5961c-8833-11e8-9bf3-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:36:16.120270Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-15T13:36:16.753587Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:36:16.798582Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: true\n" level=info timestamp=2018-07-15T13:36:16.825567Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-15T13:36:16.830132Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:36:16.833088Z pos=vm.go:332 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-15T13:36:16.837402Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-15T13:36:16.929775Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi6tdqs-v7lgw Pod phase: Running level=info timestamp=2018-07-15T13:36:01.231499Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-15T13:36:01.244400Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID ee9ff5af-d229-46c7-bc1c-ab6ec90f5f03" level=info timestamp=2018-07-15T13:36:01.248644Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-15T13:36:01.456182Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:02.149618Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-15T13:36:02.205261Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi6tdqs kind= uid=f3fdac7b-8833-11e8-9bf3-525500d15501 msg="Domain started." level=info timestamp=2018-07-15T13:36:02.209028Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6tdqs kind= uid=f3fdac7b-8833-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:36:02.210125Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:02.260747Z pos=monitor.go:222 component=virt-launcher msg="Found PID for ee9ff5af-d229-46c7-bc1c-ab6ec90f5f03: 162" level=info timestamp=2018-07-15T13:36:02.268567Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:02.269065Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-15T13:36:02.298690Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:02.472542Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:02.569619Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6tdqs kind= uid=f3fdac7b-8833-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:36:02.590356Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6tdqs kind= uid=f3fdac7b-8833-11e8-9bf3-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmicx5p7-zwpnj Pod phase: Running level=info timestamp=2018-07-15T13:36:03.602942Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-15T13:36:04.345594Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-15T13:36:04.382503Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 18d01696-06ed-44ce-b08b-c26da5d608fc" level=info timestamp=2018-07-15T13:36:04.390670Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-15T13:36:05.278871Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:05.402865Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 18d01696-06ed-44ce-b08b-c26da5d608fc: 171" level=info timestamp=2018-07-15T13:36:05.753172Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-15T13:36:05.809818Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmicx5p7 kind= uid=f3f5961c-8833-11e8-9bf3-525500d15501 msg="Domain started." level=info timestamp=2018-07-15T13:36:05.813195Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicx5p7 kind= uid=f3f5961c-8833-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:36:05.814071Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:05.928909Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:05.929873Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-15T13:36:06.006277Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:07.041708Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:14.738074Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicx5p7 kind= uid=f3f5961c-8833-11e8-9bf3-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmif4npl-q26m4 Pod phase: Running level=info timestamp=2018-07-15T13:36:04.359656Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-15T13:36:04.424451Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID f6610770-d0d6-4c00-9172-472fad848690" level=info timestamp=2018-07-15T13:36:04.424688Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-15T13:36:05.279089Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:05.438241Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f6610770-d0d6-4c00-9172-472fad848690: 176" level=info timestamp=2018-07-15T13:36:05.635660Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-15T13:36:05.705890Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Domain started." level=info timestamp=2018-07-15T13:36:05.714731Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:36:05.715685Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:07.085035Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:07.085400Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-15T13:36:07.171163Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:09.564382Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:16.602269Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:36:16.878282Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmix9nmn-8xm5x Pod phase: Running level=info timestamp=2018-07-15T13:36:02.949461Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmix9nmn kind= uid=f4088098-8833-11e8-9bf3-525500d15501 msg="Domain defined." level=info timestamp=2018-07-15T13:36:03.506709Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-15T13:36:03.582209Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:03.602118Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID ba3263e7-9aed-440a-935e-b7cc5d2ce0c7" level=info timestamp=2018-07-15T13:36:03.602644Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-15T13:36:04.546779Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-15T13:36:04.588478Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmix9nmn kind= uid=f4088098-8833-11e8-9bf3-525500d15501 msg="Domain started." level=info timestamp=2018-07-15T13:36:04.596185Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmix9nmn kind= uid=f4088098-8833-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:36:04.604914Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:04.623112Z pos=monitor.go:222 component=virt-launcher msg="Found PID for ba3263e7-9aed-440a-935e-b7cc5d2ce0c7: 167" level=info timestamp=2018-07-15T13:36:05.094818Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:05.094996Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-15T13:36:05.117632Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:05.285093Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:06.079955Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmix9nmn kind= uid=f4088098-8833-11e8-9bf3-525500d15501 msg="Synced vmi" ------------------------------ • Failure [187.799 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with custom MAC address [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: : 180000000000 expect: timer expired after 180 seconds not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:209 ------------------------------ STEP: checking br1 MTU inside the pod level=info timestamp=2018-07-15T13:37:47.125333Z pos=vmi_networking_test.go:170 component=tests msg="4: br1: mtu 1450 qdisc noqueue state UP group default \n link/ether 0a:58:0a:27:47:80 brd ff:ff:ff:ff:ff:ff\n inet 169.254.75.86/32 brd 169.254.75.86 scope global br1\n valid_lft forever preferred_lft forever\n inet6 fe80::858:aff:fe27:4780/64 scope link \n valid_lft forever preferred_lft forever\n" STEP: checking eth0 MTU inside the VirtualMachineInstance level=info timestamp=2018-07-15T13:37:47.641269Z pos=vmi_networking_test.go:190 component=tests msg="[{1 \r\n$ [$ ]} {3 ip address show eth0\r\n2: eth0: mtu 1450 qdisc pfifo_fast qlen 1000\r\n [2: eth0: mtu 1450 qdisc pfifo_fast qlen 1000\r\n]} {5 link/ether 0a:58:0a [0]}]" STEP: checking the VirtualMachineInstance can send MTU sized frames to another VirtualMachineInstance level=info timestamp=2018-07-15T13:40:53.535567Z pos=utils.go:1206 component=tests namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid=f3f5961c-8833-11e8-9bf3-525500d15501 msg="[{1 \r\n\r\n$ [$ ]} {3 ping 10.129.0.97 -c 1 -w 5 -s 1422\r\nPING 10.129.0.97 (10.129.0.97): 1422 data bytes\r\n\r\n--- 10.129.0.97 ping statistics ---\r\n5 packets transmitted, 0 packets received, 100% packet loss\r\n$ [$ ]} {5 echo $?\r\n1\r\n$ []}]" • [SLOW TEST:6.999 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the internet /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ •••• ------------------------------ • [SLOW TEST:5.564 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a service matching the vmi exposed /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:268 should be able to reach the vmi based on labels specified on the vmi /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:288 ------------------------------ •• ------------------------------ • [SLOW TEST:36.398 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom interface model /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:364 should expose the right device type to the guest /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:365 ------------------------------ • ------------------------------ • [SLOW TEST:48.153 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:398 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:399 ------------------------------ Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running level=info timestamp=2018-07-15T13:45:14.558074Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:45:14.576004Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:45:15 http: TLS handshake error from 10.128.0.1:46796: EOF level=info timestamp=2018-07-15T13:45:19.812427Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:45:19.943852Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:45:25 http: TLS handshake error from 10.128.0.1:46846: EOF level=info timestamp=2018-07-15T13:45:28.707924Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:45:28.722321Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:45:30.080923Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:45:35 http: TLS handshake error from 10.128.0.1:46896: EOF level=info timestamp=2018-07-15T13:45:39.843104Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T13:45:40.239674Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:45:45 http: TLS handshake error from 10.128.0.1:46950: EOF level=info timestamp=2018-07-15T13:45:50.180923Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:45:50.402453Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 13:44:00 http: TLS handshake error from 10.129.0.1:46218: EOF level=info timestamp=2018-07-15T13:44:09.675117Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 13:44:10 http: TLS handshake error from 10.129.0.1:46224: EOF 2018/07/15 13:44:20 http: TLS handshake error from 10.129.0.1:46230: EOF 2018/07/15 13:44:30 http: TLS handshake error from 10.129.0.1:46236: EOF level=info timestamp=2018-07-15T13:44:39.801745Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 13:44:40 http: TLS handshake error from 10.129.0.1:46242: EOF 2018/07/15 13:44:50 http: TLS handshake error from 10.129.0.1:46248: EOF 2018/07/15 13:45:00 http: TLS handshake error from 10.129.0.1:46254: EOF level=info timestamp=2018-07-15T13:45:09.720917Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 13:45:10 http: TLS handshake error from 10.129.0.1:46260: EOF 2018/07/15 13:45:20 http: TLS handshake error from 10.129.0.1:46268: EOF 2018/07/15 13:45:30 http: TLS handshake error from 10.129.0.1:46274: EOF 2018/07/15 13:45:40 http: TLS handshake error from 10.129.0.1:46280: EOF 2018/07/15 13:45:50 http: TLS handshake error from 10.129.0.1:46286: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T13:35:35.240030Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:35:35.278731Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicx5p7 kind= uid=f3f5961c-8833-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:35:35.279903Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicx5p7 kind= uid=f3f5961c-8833-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:35:35.369122Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6tdqs kind= uid=f3fdac7b-8833-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:35:35.377526Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6tdqs kind= uid=f3fdac7b-8833-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:35:35.489685Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix9nmn kind= uid=f4088098-8833-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:35:35.489812Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix9nmn kind= uid=f4088098-8833-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:35:35.838797Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix9nmn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix9nmn" level=info timestamp=2018-07-15T13:41:28.391555Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6qsnj kind= uid=c663df96-8834-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:41:28.395400Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6qsnj kind= uid=c663df96-8834-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:41:28.633096Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6qsnj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6qsnj" level=info timestamp=2018-07-15T13:42:06.466980Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:42:06.467450Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:42:54.648844Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik9njf kind= uid=f9d60006-8834-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:42:54.652682Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik9njf kind= uid=f9d60006-8834-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T13:15:34.336869Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T13:15:34.336901Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T13:15:34.338382Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmilbfl5cjwzs" level=info timestamp=2018-07-15T13:15:34.339650Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:15:34.339951Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilbfl5cjwzs, existing: false\n" level=info timestamp=2018-07-15T13:15:34.339990Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:15:34.340011Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T13:15:34.340092Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T13:15:34.340124Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T13:15:34.342410Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:16:17.251675Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:16:17.256977Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilbfl5cjwzs, existing: false\n" level=info timestamp=2018-07-15T13:16:17.258228Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:16:17.259052Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:16:17.259504Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmilbfl5cjwzs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:42:24.800394Z pos=vm.go:420 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="No update processing required" level=error timestamp=2018-07-15T13:42:24.931393Z pos=vm.go:432 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikj82t\": the object has been modified; please apply your changes to the latest version and try again" msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-07-15T13:42:24.931896Z pos=vm.go:251 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikj82t\": the object has been modified; please apply your changes to the latest version and try again" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmikj82t" level=info timestamp=2018-07-15T13:42:24.932045Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: true\n" level=info timestamp=2018-07-15T13:42:24.932110Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-15T13:42:24.932175Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:42:24.932219Z pos=vm.go:332 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-15T13:42:24.936659Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-15T13:42:24.950785Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:42:24.950969Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: true\n" level=info timestamp=2018-07-15T13:42:24.951019Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-15T13:42:24.951097Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:42:24.951140Z pos=vm.go:332 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-15T13:42:24.951321Z pos=vm.go:417 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-15T13:42:24.972307Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="Synchronization loop succeeded." Pod name: netcat2twhh Pod phase: Succeeded ++ head -n 1 +++ nc my-subdomain.myvmi.kubevirt-test-default 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcat4zk6w Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.96 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcat85xd4 Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.96 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcat8wvfb Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.96 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatgttx2 Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.96 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcath5f7g Pod phase: Succeeded ++ head -n 1 +++ nc myservice.kubevirt-test-default 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcatlc6zp Pod phase: Failed ++ head -n 1 +++ nc wrongservice.kubevirt-test-default 1500 -i 1 -w 1 Ncat: Could not resolve hostname "wrongservice.kubevirt-test-default": Name or service not known. QUITTING. + x= + echo '' + '[' '' = 'Hello World!' ']' + echo failed + exit 1 failed Pod name: virt-launcher-testvmi6qsnj-p4qct Pod phase: Running level=info timestamp=2018-07-15T13:41:45.785257Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmi6qsnj kind= uid=c663df96-8834-11e8-9bf3-525500d15501 msg="Domain defined." level=info timestamp=2018-07-15T13:41:46.346787Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-15T13:41:46.359666Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 6f5e0af0-1d0b-40c0-b865-62075f8315b1" level=info timestamp=2018-07-15T13:41:46.359879Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-15T13:41:46.370234Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:41:46.822028Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-15T13:41:46.897062Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi6qsnj kind= uid=c663df96-8834-11e8-9bf3-525500d15501 msg="Domain started." level=info timestamp=2018-07-15T13:41:46.901129Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6qsnj kind= uid=c663df96-8834-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:41:46.906788Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:41:46.934197Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:41:46.934513Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-15T13:41:46.989628Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:41:47.005735Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:41:47.158906Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6qsnj kind= uid=c663df96-8834-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:41:47.363560Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 6f5e0af0-1d0b-40c0-b865-62075f8315b1: 141" Pod name: virt-launcher-testvmi6tdqs-v7lgw Pod phase: Running level=info timestamp=2018-07-15T13:36:01.231499Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-15T13:36:01.244400Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID ee9ff5af-d229-46c7-bc1c-ab6ec90f5f03" level=info timestamp=2018-07-15T13:36:01.248644Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-15T13:36:01.456182Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:02.149618Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-15T13:36:02.205261Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi6tdqs kind= uid=f3fdac7b-8833-11e8-9bf3-525500d15501 msg="Domain started." level=info timestamp=2018-07-15T13:36:02.209028Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6tdqs kind= uid=f3fdac7b-8833-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:36:02.210125Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:02.260747Z pos=monitor.go:222 component=virt-launcher msg="Found PID for ee9ff5af-d229-46c7-bc1c-ab6ec90f5f03: 162" level=info timestamp=2018-07-15T13:36:02.268567Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:02.269065Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-15T13:36:02.298690Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:02.472542Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:02.569619Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6tdqs kind= uid=f3fdac7b-8833-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:36:02.590356Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6tdqs kind= uid=f3fdac7b-8833-11e8-9bf3-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmicx5p7-zwpnj Pod phase: Running level=info timestamp=2018-07-15T13:36:03.602942Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-15T13:36:04.345594Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-15T13:36:04.382503Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 18d01696-06ed-44ce-b08b-c26da5d608fc" level=info timestamp=2018-07-15T13:36:04.390670Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-15T13:36:05.278871Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:05.402865Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 18d01696-06ed-44ce-b08b-c26da5d608fc: 171" level=info timestamp=2018-07-15T13:36:05.753172Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-15T13:36:05.809818Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmicx5p7 kind= uid=f3f5961c-8833-11e8-9bf3-525500d15501 msg="Domain started." level=info timestamp=2018-07-15T13:36:05.813195Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicx5p7 kind= uid=f3f5961c-8833-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:36:05.814071Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:05.928909Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:05.929873Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-15T13:36:06.006277Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:07.041708Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:14.738074Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicx5p7 kind= uid=f3f5961c-8833-11e8-9bf3-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmif4npl-q26m4 Pod phase: Running level=info timestamp=2018-07-15T13:36:04.359656Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-15T13:36:04.424451Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID f6610770-d0d6-4c00-9172-472fad848690" level=info timestamp=2018-07-15T13:36:04.424688Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-15T13:36:05.279089Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:05.438241Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f6610770-d0d6-4c00-9172-472fad848690: 176" level=info timestamp=2018-07-15T13:36:05.635660Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-15T13:36:05.705890Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Domain started." level=info timestamp=2018-07-15T13:36:05.714731Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:36:05.715685Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:07.085035Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:07.085400Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-15T13:36:07.171163Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:09.564382Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:16.602269Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:36:16.878282Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmif4npl kind= uid=f3f01e19-8833-11e8-9bf3-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmik9njf-bf5fc Pod phase: Pending Pod name: virt-launcher-testvmikj82t-rtcqv Pod phase: Running level=info timestamp=2018-07-15T13:42:23.943016Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-15T13:42:23.951568Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:42:24.459229Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID 29a3b8c0-f411-4479-b905-a0eb169b56be" level=info timestamp=2018-07-15T13:42:24.460652Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-15T13:42:24.635325Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-15T13:42:24.653476Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="Domain started." level=info timestamp=2018-07-15T13:42:24.664490Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:42:24.666815Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:42:24.677993Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:42:24.678123Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-15T13:42:24.711706Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:42:24.713568Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:42:24.946980Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:42:24.964164Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:42:25.469991Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 29a3b8c0-f411-4479-b905-a0eb169b56be: 145" Pod name: virt-launcher-testvmix9nmn-8xm5x Pod phase: Running level=info timestamp=2018-07-15T13:36:02.949461Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmix9nmn kind= uid=f4088098-8833-11e8-9bf3-525500d15501 msg="Domain defined." level=info timestamp=2018-07-15T13:36:03.506709Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-15T13:36:03.582209Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:03.602118Z pos=virt-launcher.go:215 component=virt-launcher msg="Detected domain with UUID ba3263e7-9aed-440a-935e-b7cc5d2ce0c7" level=info timestamp=2018-07-15T13:36:03.602644Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-15T13:36:04.546779Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-15T13:36:04.588478Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmix9nmn kind= uid=f4088098-8833-11e8-9bf3-525500d15501 msg="Domain started." level=info timestamp=2018-07-15T13:36:04.596185Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmix9nmn kind= uid=f4088098-8833-11e8-9bf3-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-15T13:36:04.604914Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:04.623112Z pos=monitor.go:222 component=virt-launcher msg="Found PID for ba3263e7-9aed-440a-935e-b7cc5d2ce0c7: 167" level=info timestamp=2018-07-15T13:36:05.094818Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:05.094996Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-15T13:36:05.117632Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-15T13:36:05.285093Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-15T13:36:06.079955Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmix9nmn kind= uid=f4088098-8833-11e8-9bf3-525500d15501 msg="Synced vmi" • Failure [183.158 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address in non-conventional format /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:410 should configure custom MAC address [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:411 Timed out after 90.176s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ STEP: checking eth0 MAC address level=info timestamp=2018-07-15T13:42:55.529654Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmik9njf-bf5fc" • [SLOW TEST:34.685 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address and slirp interface /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:423 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:424 ------------------------------ Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running level=info timestamp=2018-07-15T13:49:01.320942Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:49:01.361211Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:49:04.796215Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:49:05 http: TLS handshake error from 10.128.0.1:47988: EOF level=info timestamp=2018-07-15T13:49:10.029446Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T13:49:15.019249Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:49:15 http: TLS handshake error from 10.128.0.1:48040: EOF level=info timestamp=2018-07-15T13:49:22.027641Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:49:25.254401Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:49:25 http: TLS handshake error from 10.128.0.1:48090: EOF level=info timestamp=2018-07-15T13:49:27.885504Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:49:27.903626Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:49:27.944839Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:49:31.838803Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:49:31.862695Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 13:47:10 http: TLS handshake error from 10.129.0.1:46338: EOF 2018/07/15 13:47:20 http: TLS handshake error from 10.129.0.1:46344: EOF 2018/07/15 13:47:30 http: TLS handshake error from 10.129.0.1:46350: EOF 2018/07/15 13:47:40 http: TLS handshake error from 10.129.0.1:46356: EOF 2018/07/15 13:47:50 http: TLS handshake error from 10.129.0.1:46362: EOF 2018/07/15 13:48:00 http: TLS handshake error from 10.129.0.1:46368: EOF 2018/07/15 13:48:10 http: TLS handshake error from 10.129.0.1:46374: EOF 2018/07/15 13:48:20 http: TLS handshake error from 10.129.0.1:46382: EOF 2018/07/15 13:48:30 http: TLS handshake error from 10.129.0.1:46388: EOF 2018/07/15 13:48:40 http: TLS handshake error from 10.129.0.1:46394: EOF 2018/07/15 13:48:50 http: TLS handshake error from 10.129.0.1:46400: EOF 2018/07/15 13:49:00 http: TLS handshake error from 10.129.0.1:46406: EOF 2018/07/15 13:49:10 http: TLS handshake error from 10.129.0.1:46412: EOF 2018/07/15 13:49:20 http: TLS handshake error from 10.129.0.1:46418: EOF 2018/07/15 13:49:30 http: TLS handshake error from 10.129.0.1:46424: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T13:35:35.838797Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix9nmn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix9nmn" level=info timestamp=2018-07-15T13:41:28.391555Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6qsnj kind= uid=c663df96-8834-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:41:28.395400Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6qsnj kind= uid=c663df96-8834-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:41:28.633096Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6qsnj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6qsnj" level=info timestamp=2018-07-15T13:42:06.466980Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:42:06.467450Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:42:54.648844Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik9njf kind= uid=f9d60006-8834-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:42:54.652682Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik9njf kind= uid=f9d60006-8834-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:45:57.825282Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigj4zc kind= uid=67024784-8835-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:45:57.828475Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigj4zc kind= uid=67024784-8835-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:45:58.149158Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigj4zc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigj4zc" level=info timestamp=2018-07-15T13:45:58.182823Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigj4zc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigj4zc" level=info timestamp=2018-07-15T13:46:34.305903Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihwv7b kind= uid=7cc6843b-8835-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:46:34.306228Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihwv7b kind= uid=7cc6843b-8835-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:46:34.514131Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihwv7b\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihwv7b" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T13:46:33.199917Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:46:33.206048Z pos=vm.go:332 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-15T13:46:33.207648Z pos=vm.go:354 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T13:46:33.208595Z pos=vm.go:408 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-15T13:46:33.211172Z pos=vm.go:556 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Grace period expired, killing deleted VirtualMachineInstance testvmigj4zc" level=info timestamp=2018-07-15T13:46:33.301215Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:46:33.304108Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-15T13:46:33.306787Z pos=vm.go:757 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=Domain uid=67024784-8835-11e8-9bf3-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-07-15T13:46:33.306880Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmigj4zc, existing: false\n" level=info timestamp=2018-07-15T13:46:33.306904Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:46:33.306962Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T13:46:33.307077Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T13:46:33.307113Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T13:46:33.307458Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmigj4zc" level=info timestamp=2018-07-15T13:46:33.308806Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:47:22.532179Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: false\n" level=info timestamp=2018-07-15T13:47:22.532339Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.532471Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.532644Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532753Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532823Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532956Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533032Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533142Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533345Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.533501Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmix9nmn, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533551Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533674Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533840Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmihwv7b-mfhfg Pod phase: Pending • Failure [182.525 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:35 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:64 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a cirros image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should return that we are running cirros [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:67 Timed out after 90.191s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ STEP: Creating a new VirtualMachineInstance level=info timestamp=2018-07-15T13:46:35.014195Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmihwv7b-mfhfg" Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running level=info timestamp=2018-07-15T13:52:08.342906Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:52:10.050618Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 13:52:15 http: TLS handshake error from 10.128.0.1:48968: EOF level=info timestamp=2018-07-15T13:52:18.427319Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:52:23.580063Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:52:25 http: TLS handshake error from 10.128.0.1:49018: EOF level=info timestamp=2018-07-15T13:52:28.502416Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:52:28.697357Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:52:28.724782Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:52:28.739387Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:52:33.831648Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:52:33.882278Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:52:34.573335Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T13:52:34.633961Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 13:52:35 http: TLS handshake error from 10.128.0.1:49074: EOF Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 13:50:20 http: TLS handshake error from 10.129.0.1:46454: EOF 2018/07/15 13:50:30 http: TLS handshake error from 10.129.0.1:46460: EOF level=info timestamp=2018-07-15T13:50:39.677726Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 13:50:40 http: TLS handshake error from 10.129.0.1:46466: EOF 2018/07/15 13:50:50 http: TLS handshake error from 10.129.0.1:46472: EOF 2018/07/15 13:51:00 http: TLS handshake error from 10.129.0.1:46478: EOF 2018/07/15 13:51:10 http: TLS handshake error from 10.129.0.1:46484: EOF 2018/07/15 13:51:20 http: TLS handshake error from 10.129.0.1:46492: EOF 2018/07/15 13:51:30 http: TLS handshake error from 10.129.0.1:46498: EOF 2018/07/15 13:51:40 http: TLS handshake error from 10.129.0.1:46504: EOF 2018/07/15 13:51:50 http: TLS handshake error from 10.129.0.1:46510: EOF 2018/07/15 13:52:00 http: TLS handshake error from 10.129.0.1:46516: EOF 2018/07/15 13:52:10 http: TLS handshake error from 10.129.0.1:46522: EOF 2018/07/15 13:52:20 http: TLS handshake error from 10.129.0.1:46528: EOF 2018/07/15 13:52:30 http: TLS handshake error from 10.129.0.1:46534: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T13:41:28.633096Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6qsnj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6qsnj" level=info timestamp=2018-07-15T13:42:06.466980Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:42:06.467450Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikj82t kind= uid=dd21623b-8834-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:42:54.648844Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik9njf kind= uid=f9d60006-8834-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:42:54.652682Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik9njf kind= uid=f9d60006-8834-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:45:57.825282Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigj4zc kind= uid=67024784-8835-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:45:57.828475Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigj4zc kind= uid=67024784-8835-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:45:58.149158Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigj4zc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigj4zc" level=info timestamp=2018-07-15T13:45:58.182823Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigj4zc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigj4zc" level=info timestamp=2018-07-15T13:46:34.305903Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihwv7b kind= uid=7cc6843b-8835-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:46:34.306228Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihwv7b kind= uid=7cc6843b-8835-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:46:34.514131Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihwv7b\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihwv7b" level=info timestamp=2018-07-15T13:49:35.277908Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihwv7b\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihwv7b, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 7cc6843b-8835-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihwv7b" level=info timestamp=2018-07-15T13:49:35.470701Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv9fsb kind= uid=e8c2e00d-8835-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:49:35.471888Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv9fsb kind= uid=e8c2e00d-8835-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T13:46:33.199917Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:46:33.206048Z pos=vm.go:332 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-15T13:46:33.207648Z pos=vm.go:354 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T13:46:33.208595Z pos=vm.go:408 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-15T13:46:33.211172Z pos=vm.go:556 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Grace period expired, killing deleted VirtualMachineInstance testvmigj4zc" level=info timestamp=2018-07-15T13:46:33.301215Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:46:33.304108Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-15T13:46:33.306787Z pos=vm.go:757 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=Domain uid=67024784-8835-11e8-9bf3-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-07-15T13:46:33.306880Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmigj4zc, existing: false\n" level=info timestamp=2018-07-15T13:46:33.306904Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:46:33.306962Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T13:46:33.307077Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T13:46:33.307113Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T13:46:33.307458Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmigj4zc" level=info timestamp=2018-07-15T13:46:33.308806Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:47:22.532179Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: false\n" level=info timestamp=2018-07-15T13:47:22.532339Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.532471Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.532644Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532753Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532823Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532956Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533032Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533142Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533345Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.533501Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmix9nmn, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533551Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533674Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533840Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiv9fsb-twclz Pod phase: Pending • Failure [181.150 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:35 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:64 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a fedora image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:76 should return that we are running fedora [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:77 Timed out after 90.006s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ STEP: Creating a new VirtualMachineInstance level=info timestamp=2018-07-15T13:49:36.182626Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmiv9fsb-twclz" Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running 2018/07/15 13:55:05 http: TLS handshake error from 10.128.0.1:49856: EOF level=info timestamp=2018-07-15T13:55:05.904701Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:55:09.975883Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T13:55:11.605487Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:55:15 http: TLS handshake error from 10.128.0.1:49908: EOF level=info timestamp=2018-07-15T13:55:21.825882Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:55:25.036227Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:55:25 http: TLS handshake error from 10.128.0.1:49960: EOF level=info timestamp=2018-07-15T13:55:29.748014Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:55:29.798338Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:55:29.834337Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:55:32.037499Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:55:35 http: TLS handshake error from 10.128.0.1:50010: EOF level=info timestamp=2018-07-15T13:55:36.045556Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:55:36.056649Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 13:53:20 http: TLS handshake error from 10.129.0.1:46564: EOF 2018/07/15 13:53:30 http: TLS handshake error from 10.129.0.1:46570: EOF 2018/07/15 13:53:40 http: TLS handshake error from 10.129.0.1:46576: EOF 2018/07/15 13:53:50 http: TLS handshake error from 10.129.0.1:46582: EOF 2018/07/15 13:54:00 http: TLS handshake error from 10.129.0.1:46588: EOF level=info timestamp=2018-07-15T13:54:09.783834Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 13:54:10 http: TLS handshake error from 10.129.0.1:46594: EOF 2018/07/15 13:54:20 http: TLS handshake error from 10.129.0.1:46602: EOF 2018/07/15 13:54:30 http: TLS handshake error from 10.129.0.1:46608: EOF 2018/07/15 13:54:40 http: TLS handshake error from 10.129.0.1:46614: EOF 2018/07/15 13:54:50 http: TLS handshake error from 10.129.0.1:46620: EOF 2018/07/15 13:55:00 http: TLS handshake error from 10.129.0.1:46626: EOF 2018/07/15 13:55:10 http: TLS handshake error from 10.129.0.1:46632: EOF 2018/07/15 13:55:20 http: TLS handshake error from 10.129.0.1:46638: EOF 2018/07/15 13:55:30 http: TLS handshake error from 10.129.0.1:46644: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T13:45:57.825282Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigj4zc kind= uid=67024784-8835-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:45:57.828475Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigj4zc kind= uid=67024784-8835-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:45:58.149158Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigj4zc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigj4zc" level=info timestamp=2018-07-15T13:45:58.182823Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigj4zc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigj4zc" level=info timestamp=2018-07-15T13:46:34.305903Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihwv7b kind= uid=7cc6843b-8835-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:46:34.306228Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihwv7b kind= uid=7cc6843b-8835-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:46:34.514131Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihwv7b\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihwv7b" level=info timestamp=2018-07-15T13:49:35.277908Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihwv7b\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihwv7b, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 7cc6843b-8835-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihwv7b" level=info timestamp=2018-07-15T13:49:35.470701Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv9fsb kind= uid=e8c2e00d-8835-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:49:35.471888Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv9fsb kind= uid=e8c2e00d-8835-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:52:36.244657Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv9fsb\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiv9fsb, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: e8c2e00d-8835-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv9fsb" level=info timestamp=2018-07-15T13:52:36.377434Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisz9mb kind= uid=549752bc-8836-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:52:36.378029Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisz9mb kind= uid=549752bc-8836-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:52:36.565199Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisz9mb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisz9mb" level=info timestamp=2018-07-15T13:52:36.603777Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisz9mb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisz9mb" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T13:46:33.199917Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:46:33.206048Z pos=vm.go:332 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-15T13:46:33.207648Z pos=vm.go:354 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T13:46:33.208595Z pos=vm.go:408 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-15T13:46:33.211172Z pos=vm.go:556 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Grace period expired, killing deleted VirtualMachineInstance testvmigj4zc" level=info timestamp=2018-07-15T13:46:33.301215Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:46:33.304108Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-15T13:46:33.306787Z pos=vm.go:757 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=Domain uid=67024784-8835-11e8-9bf3-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-07-15T13:46:33.306880Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmigj4zc, existing: false\n" level=info timestamp=2018-07-15T13:46:33.306904Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:46:33.306962Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T13:46:33.307077Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T13:46:33.307113Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T13:46:33.307458Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmigj4zc" level=info timestamp=2018-07-15T13:46:33.308806Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:47:22.532179Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: false\n" level=info timestamp=2018-07-15T13:47:22.532339Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.532471Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.532644Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532753Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532823Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532956Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533032Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533142Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533345Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.533501Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmix9nmn, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533551Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533674Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533840Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmisz9mb-6lbpx Pod phase: Pending • Failure [180.919 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:35 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:64 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 should be able to reconnect to console multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:86 Timed out after 90.184s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ STEP: Creating a new VirtualMachineInstance level=info timestamp=2018-07-15T13:52:37.058121Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmisz9mb-6lbpx" Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running level=info timestamp=2018-07-15T13:58:05.502238Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:58:05 http: TLS handshake error from 10.128.0.1:50784: EOF level=info timestamp=2018-07-15T13:58:07.864021Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:58:07.878871Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:58:10.023587Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T13:58:15.800356Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:58:15 http: TLS handshake error from 10.128.0.1:50834: EOF 2018/07/15 13:58:25 http: TLS handshake error from 10.128.0.1:50888: EOF level=info timestamp=2018-07-15T13:58:26.056891Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:58:26.295013Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:58:30.681718Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:58:30.732575Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T13:58:30.751012Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 13:58:35 http: TLS handshake error from 10.128.0.1:50940: EOF level=info timestamp=2018-07-15T13:58:36.179819Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 13:56:20 http: TLS handshake error from 10.129.0.1:46674: EOF 2018/07/15 13:56:30 http: TLS handshake error from 10.129.0.1:46680: EOF 2018/07/15 13:56:40 http: TLS handshake error from 10.129.0.1:46686: EOF 2018/07/15 13:56:50 http: TLS handshake error from 10.129.0.1:46692: EOF 2018/07/15 13:57:00 http: TLS handshake error from 10.129.0.1:46698: EOF level=info timestamp=2018-07-15T13:57:09.951004Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 13:57:10 http: TLS handshake error from 10.129.0.1:46704: EOF 2018/07/15 13:57:20 http: TLS handshake error from 10.129.0.1:46712: EOF 2018/07/15 13:57:30 http: TLS handshake error from 10.129.0.1:46718: EOF 2018/07/15 13:57:40 http: TLS handshake error from 10.129.0.1:46724: EOF 2018/07/15 13:57:50 http: TLS handshake error from 10.129.0.1:46730: EOF 2018/07/15 13:58:00 http: TLS handshake error from 10.129.0.1:46736: EOF 2018/07/15 13:58:10 http: TLS handshake error from 10.129.0.1:46742: EOF 2018/07/15 13:58:20 http: TLS handshake error from 10.129.0.1:46748: EOF 2018/07/15 13:58:30 http: TLS handshake error from 10.129.0.1:46754: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T13:46:34.306228Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihwv7b kind= uid=7cc6843b-8835-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:46:34.514131Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihwv7b\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihwv7b" level=info timestamp=2018-07-15T13:49:35.277908Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihwv7b\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmihwv7b, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 7cc6843b-8835-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihwv7b" level=info timestamp=2018-07-15T13:49:35.470701Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv9fsb kind= uid=e8c2e00d-8835-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:49:35.471888Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv9fsb kind= uid=e8c2e00d-8835-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:52:36.244657Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv9fsb\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiv9fsb, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: e8c2e00d-8835-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv9fsb" level=info timestamp=2018-07-15T13:52:36.377434Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisz9mb kind= uid=549752bc-8836-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:52:36.378029Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisz9mb kind= uid=549752bc-8836-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:52:36.565199Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisz9mb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisz9mb" level=info timestamp=2018-07-15T13:52:36.603777Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisz9mb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisz9mb" level=info timestamp=2018-07-15T13:55:37.259548Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisz9mb\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmisz9mb, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 549752bc-8836-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisz9mb" level=info timestamp=2018-07-15T13:55:37.446477Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivclrf kind= uid=c0829b20-8836-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:55:37.448223Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivclrf kind= uid=c0829b20-8836-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:55:37.785946Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivclrf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivclrf" level=info timestamp=2018-07-15T13:55:37.895108Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivclrf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivclrf" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T13:46:33.199917Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:46:33.206048Z pos=vm.go:332 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-15T13:46:33.207648Z pos=vm.go:354 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T13:46:33.208595Z pos=vm.go:408 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Processing shutdown." level=info timestamp=2018-07-15T13:46:33.211172Z pos=vm.go:556 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Grace period expired, killing deleted VirtualMachineInstance testvmigj4zc" level=info timestamp=2018-07-15T13:46:33.301215Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:46:33.304108Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-15T13:46:33.306787Z pos=vm.go:757 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=Domain uid=67024784-8835-11e8-9bf3-525500d15501 msg="Domain is in state Shutoff reason Destroyed" level=info timestamp=2018-07-15T13:46:33.306880Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmigj4zc, existing: false\n" level=info timestamp=2018-07-15T13:46:33.306904Z pos=vm.go:330 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-15T13:46:33.306962Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T13:46:33.307077Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T13:46:33.307113Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T13:46:33.307458Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmigj4zc" level=info timestamp=2018-07-15T13:46:33.308806Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmigj4zc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:47:22.532179Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: false\n" level=info timestamp=2018-07-15T13:47:22.532339Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.532471Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.532644Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532753Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532823Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532956Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533032Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533142Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533345Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.533501Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmix9nmn, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533551Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533674Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533840Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmivclrf-rwcdj Pod phase: Pending • Failure [181.165 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with 3 CPU cores /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:56 should report 3 cpu cores under guest OS [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:62 Timed out after 90.184s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ STEP: Starting a VirtualMachineInstance level=info timestamp=2018-07-15T13:55:38.253268Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmivclrf-rwcdj" S [SKIPPING] [0.273 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:108 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-2Mi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 No node with hugepages hugepages-2Mi capacity /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:160 ------------------------------ S [SKIPPING] [0.161 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:108 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-1Gi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 No node with hugepages hugepages-1Gi capacity /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:160 ------------------------------ • ------------------------------ • [SLOW TEST:81.895 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:238 when CPU model defined /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:277 should report defined CPU model /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:278 ------------------------------ • [SLOW TEST:80.671 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:238 when CPU model not defined /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:305 should report CPU model from libvirt capabilities /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:306 ------------------------------ Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running 2018/07/15 14:03:35 http: TLS handshake error from 10.128.0.1:52500: EOF level=info timestamp=2018-07-15T14:03:38.620391Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:03:45 http: TLS handshake error from 10.128.0.1:52554: EOF level=info timestamp=2018-07-15T14:03:48.880903Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:03:55 http: TLS handshake error from 10.128.0.1:52698: EOF level=info timestamp=2018-07-15T14:03:56.167193Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:03:56.181923Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:03:59.057696Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:04:01.111667Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:04:01.116066Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:04:02.444758Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:04:05 http: TLS handshake error from 10.128.0.1:52750: EOF level=info timestamp=2018-07-15T14:04:09.336535Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:04:15 http: TLS handshake error from 10.128.0.1:52804: EOF level=info timestamp=2018-07-15T14:04:19.567544Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-xvlfb Pod phase: Running level=info timestamp=2018-07-15T14:02:34.360904Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:02:40 http: TLS handshake error from 10.129.0.1:46908: EOF 2018/07/15 14:02:50 http: TLS handshake error from 10.129.0.1:46914: EOF 2018/07/15 14:03:00 http: TLS handshake error from 10.129.0.1:46920: EOF 2018/07/15 14:03:10 http: TLS handshake error from 10.129.0.1:46926: EOF level=info timestamp=2018-07-15T14:03:10.902254Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:03:20 http: TLS handshake error from 10.129.0.1:46932: EOF 2018/07/15 14:03:30 http: TLS handshake error from 10.129.0.1:46940: EOF 2018/07/15 14:03:40 http: TLS handshake error from 10.129.0.1:46946: EOF level=info timestamp=2018-07-15T14:03:41.028468Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:03:50 http: TLS handshake error from 10.129.0.1:46952: EOF 2018/07/15 14:04:00 http: TLS handshake error from 10.129.0.1:46958: EOF 2018/07/15 14:04:10 http: TLS handshake error from 10.129.0.1:46964: EOF level=info timestamp=2018-07-15T14:04:10.970479Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:04:20 http: TLS handshake error from 10.129.0.1:46970: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T13:58:40.497858Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijxb2t kind= uid=2da047a4-8837-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:58:40.498115Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijxb2t kind= uid=2da047a4-8837-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:58:41.022788Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijxb2t\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijxb2t" level=info timestamp=2018-07-15T13:58:59.278321Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi99tpf kind= uid=38ca1545-8837-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T13:58:59.279080Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi99tpf kind= uid=38ca1545-8837-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T13:58:59.828035Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi99tpf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi99tpf" level=info timestamp=2018-07-15T13:58:59.954154Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi99tpf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi99tpf" level=info timestamp=2018-07-15T14:00:02.546220Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirhdd6 kind= uid=5e865954-8837-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:00:02.546717Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirhdd6 kind= uid=5e865954-8837-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:00:20.475111Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiscprp kind= uid=693720e5-8837-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:00:20.475255Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiscprp kind= uid=693720e5-8837-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:00:20.775074Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiscprp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiscprp" level=info timestamp=2018-07-15T14:01:22.877139Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin6csk kind= uid=8e6926b0-8837-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:01:22.878638Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin6csk kind= uid=8e6926b0-8837-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:01:22.999497Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmin6csk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmin6csk" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T14:01:22.795279Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T14:01:22.795704Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T14:01:22.795954Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T14:01:22.798527Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmiscprp" level=info timestamp=2018-07-15T14:01:22.800639Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.250442Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251013Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251191Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirhdd6, existing: false\n" level=info timestamp=2018-07-15T14:02:17.251332Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.251516Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.251831Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.251961Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiscprp, existing: false\n" level=info timestamp=2018-07-15T14:02:17.252027Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.252133Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.252378Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:47:22.532179Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: false\n" level=info timestamp=2018-07-15T13:47:22.532339Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.532471Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.532644Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532753Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532823Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532956Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533032Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533142Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533345Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.533501Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmix9nmn, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533551Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533674Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533840Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmin6csk-pxhmn Pod phase: Pending • Failure [180.958 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 New VirtualMachineInstance with all supported drives /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:326 should have all the device nodes [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:349 Timed out after 90.003s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ level=info timestamp=2018-07-15T14:01:23.520135Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmin6csk-pxhmn" •• Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running level=info timestamp=2018-07-15T14:06:33.571866Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:06:35 http: TLS handshake error from 10.128.0.1:53528: EOF level=info timestamp=2018-07-15T14:06:42.300582Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:06:45 http: TLS handshake error from 10.128.0.1:53582: EOF level=info timestamp=2018-07-15T14:06:52.535218Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:06:55 http: TLS handshake error from 10.128.0.1:53630: EOF level=info timestamp=2018-07-15T14:06:58.450228Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:06:58.458315Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:07:02.734045Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:07:03.967564Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:07:05 http: TLS handshake error from 10.128.0.1:53678: EOF level=info timestamp=2018-07-15T14:07:11.144507Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:07:12.950684Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:07:15 http: TLS handshake error from 10.128.0.1:53734: EOF level=info timestamp=2018-07-15T14:07:23.170398Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 14:05:20 http: TLS handshake error from 10.129.0.1:47006: EOF 2018/07/15 14:05:30 http: TLS handshake error from 10.129.0.1:47012: EOF 2018/07/15 14:05:40 http: TLS handshake error from 10.129.0.1:47018: EOF 2018/07/15 14:05:50 http: TLS handshake error from 10.129.0.1:47024: EOF 2018/07/15 14:06:00 http: TLS handshake error from 10.129.0.1:47030: EOF 2018/07/15 14:06:10 http: TLS handshake error from 10.129.0.1:47036: EOF level=info timestamp=2018-07-15T14:06:10.982162Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:06:20 http: TLS handshake error from 10.129.0.1:47042: EOF 2018/07/15 14:06:30 http: TLS handshake error from 10.129.0.1:47050: EOF 2018/07/15 14:06:40 http: TLS handshake error from 10.129.0.1:47056: EOF level=info timestamp=2018-07-15T14:06:41.061570Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:06:50 http: TLS handshake error from 10.129.0.1:47062: EOF 2018/07/15 14:07:00 http: TLS handshake error from 10.129.0.1:47068: EOF 2018/07/15 14:07:10 http: TLS handshake error from 10.129.0.1:47074: EOF 2018/07/15 14:07:20 http: TLS handshake error from 10.129.0.1:47080: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T14:00:20.775074Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiscprp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiscprp" level=info timestamp=2018-07-15T14:01:22.877139Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin6csk kind= uid=8e6926b0-8837-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:01:22.878638Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin6csk kind= uid=8e6926b0-8837-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:01:22.999497Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmin6csk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmin6csk" level=info timestamp=2018-07-15T14:04:23.762845Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2rt6d kind= uid=fa3adf40-8837-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:04:23.763207Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2rt6d kind= uid=fa3adf40-8837-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:04:23.892691Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2rt6d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2rt6d" level=info timestamp=2018-07-15T14:04:24.006123Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2rt6d\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2rt6d, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: fa3adf40-8837-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2rt6d" level=info timestamp=2018-07-15T14:04:24.111687Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2tk78 kind= uid=fa6f2de9-8837-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:04:24.111798Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2tk78 kind= uid=fa6f2de9-8837-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:04:24.389189Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2tk78\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2tk78" level=info timestamp=2018-07-15T14:04:24.483223Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2tk78\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2tk78, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: fa6f2de9-8837-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2tk78" level=info timestamp=2018-07-15T14:04:24.635333Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi47szh kind= uid=fabec454-8837-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:04:24.635600Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi47szh kind= uid=fabec454-8837-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:04:24.958236Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi47szh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi47szh" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T14:01:22.795279Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T14:01:22.795704Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T14:01:22.795954Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T14:01:22.798527Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmiscprp" level=info timestamp=2018-07-15T14:01:22.800639Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.250442Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251013Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251191Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirhdd6, existing: false\n" level=info timestamp=2018-07-15T14:02:17.251332Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.251516Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.251831Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.251961Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiscprp, existing: false\n" level=info timestamp=2018-07-15T14:02:17.252027Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.252133Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.252378Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:47:22.532179Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: false\n" level=info timestamp=2018-07-15T13:47:22.532339Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.532471Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.532644Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532753Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532823Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532956Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533032Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533142Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533345Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.533501Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmix9nmn, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533551Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533674Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533840Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi47szh-kc6pp Pod phase: Pending ------------------------------ • Failure [181.066 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should start it [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:80 Timed out after 90.000s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ level=info timestamp=2018-07-15T14:04:25.473955Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmi47szh-kc6pp" Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running level=info timestamp=2018-07-15T14:09:35.669490Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:09:35 http: TLS handshake error from 10.128.0.1:54546: EOF 2018/07/15 14:09:45 http: TLS handshake error from 10.128.0.1:54602: EOF level=info timestamp=2018-07-15T14:09:45.946330Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:09:55 http: TLS handshake error from 10.128.0.1:54654: EOF level=info timestamp=2018-07-15T14:09:56.028121Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:10:00.375859Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:10:00.431829Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:10:04.869669Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:10:05 http: TLS handshake error from 10.128.0.1:54702: EOF level=info timestamp=2018-07-15T14:10:06.129615Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:10:11.246977Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:10:15 http: TLS handshake error from 10.128.0.1:54758: EOF level=info timestamp=2018-07-15T14:10:16.341763Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:10:25 http: TLS handshake error from 10.128.0.1:54806: EOF Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 14:08:20 http: TLS handshake error from 10.129.0.1:47116: EOF 2018/07/15 14:08:30 http: TLS handshake error from 10.129.0.1:47122: EOF 2018/07/15 14:08:40 http: TLS handshake error from 10.129.0.1:47128: EOF level=info timestamp=2018-07-15T14:08:41.098093Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:08:50 http: TLS handshake error from 10.129.0.1:47134: EOF 2018/07/15 14:09:00 http: TLS handshake error from 10.129.0.1:47140: EOF 2018/07/15 14:09:10 http: TLS handshake error from 10.129.0.1:47146: EOF 2018/07/15 14:09:20 http: TLS handshake error from 10.129.0.1:47152: EOF 2018/07/15 14:09:30 http: TLS handshake error from 10.129.0.1:47160: EOF 2018/07/15 14:09:40 http: TLS handshake error from 10.129.0.1:47166: EOF level=info timestamp=2018-07-15T14:09:41.078905Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:09:50 http: TLS handshake error from 10.129.0.1:47172: EOF 2018/07/15 14:10:00 http: TLS handshake error from 10.129.0.1:47178: EOF 2018/07/15 14:10:10 http: TLS handshake error from 10.129.0.1:47184: EOF 2018/07/15 14:10:20 http: TLS handshake error from 10.129.0.1:47190: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T14:01:22.878638Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin6csk kind= uid=8e6926b0-8837-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:01:22.999497Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmin6csk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmin6csk" level=info timestamp=2018-07-15T14:04:23.762845Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2rt6d kind= uid=fa3adf40-8837-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:04:23.763207Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2rt6d kind= uid=fa3adf40-8837-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:04:23.892691Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2rt6d\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2rt6d" level=info timestamp=2018-07-15T14:04:24.006123Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2rt6d\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2rt6d, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: fa3adf40-8837-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2rt6d" level=info timestamp=2018-07-15T14:04:24.111687Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2tk78 kind= uid=fa6f2de9-8837-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:04:24.111798Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2tk78 kind= uid=fa6f2de9-8837-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:04:24.389189Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2tk78\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2tk78" level=info timestamp=2018-07-15T14:04:24.483223Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2tk78\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi2tk78, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: fa6f2de9-8837-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2tk78" level=info timestamp=2018-07-15T14:04:24.635333Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi47szh kind= uid=fabec454-8837-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:04:24.635600Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi47szh kind= uid=fabec454-8837-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:04:24.958236Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi47szh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi47szh" level=info timestamp=2018-07-15T14:07:25.824094Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir67wv kind= uid=66bd6430-8838-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:07:25.824495Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir67wv kind= uid=66bd6430-8838-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T14:01:22.795279Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T14:01:22.795704Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T14:01:22.795954Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T14:01:22.798527Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmiscprp" level=info timestamp=2018-07-15T14:01:22.800639Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.250442Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251013Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251191Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirhdd6, existing: false\n" level=info timestamp=2018-07-15T14:02:17.251332Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.251516Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.251831Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.251961Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiscprp, existing: false\n" level=info timestamp=2018-07-15T14:02:17.252027Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.252133Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.252378Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:47:22.532179Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: false\n" level=info timestamp=2018-07-15T13:47:22.532339Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.532471Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.532644Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532753Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532823Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532956Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533032Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533142Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533345Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.533501Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmix9nmn, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533551Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533674Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533840Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmir67wv-jjjmf Pod phase: Pending • Failure [181.278 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should attach virt-launcher to it [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:86 Timed out after 90.179s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ level=info timestamp=2018-07-15T14:07:26.509987Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmir67wv-jjjmf" •••• Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running level=info timestamp=2018-07-15T14:12:39.780130Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:12:45 http: TLS handshake error from 10.128.0.1:55542: EOF level=info timestamp=2018-07-15T14:12:50.034056Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:12:55 http: TLS handshake error from 10.128.0.1:55592: EOF level=info timestamp=2018-07-15T14:13:00.120095Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:13:01.375461Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:13:01.387195Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:13:02.741809Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:13:02.750526Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:13:05 http: TLS handshake error from 10.128.0.1:55640: EOF level=info timestamp=2018-07-15T14:13:06.409389Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:13:10.357653Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:13:15 http: TLS handshake error from 10.128.0.1:55696: EOF level=info timestamp=2018-07-15T14:13:20.587004Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:13:25 http: TLS handshake error from 10.128.0.1:55744: EOF Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 14:11:50 http: TLS handshake error from 10.129.0.1:47244: EOF 2018/07/15 14:12:00 http: TLS handshake error from 10.129.0.1:47250: EOF 2018/07/15 14:12:10 http: TLS handshake error from 10.129.0.1:47256: EOF level=info timestamp=2018-07-15T14:12:10.845349Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:12:20 http: TLS handshake error from 10.129.0.1:47262: EOF 2018/07/15 14:12:30 http: TLS handshake error from 10.129.0.1:47270: EOF level=info timestamp=2018-07-15T14:12:34.358380Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:12:34.440585Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:12:40 http: TLS handshake error from 10.129.0.1:47276: EOF level=info timestamp=2018-07-15T14:12:40.886115Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:12:50 http: TLS handshake error from 10.129.0.1:47282: EOF 2018/07/15 14:13:00 http: TLS handshake error from 10.129.0.1:47288: EOF 2018/07/15 14:13:10 http: TLS handshake error from 10.129.0.1:47294: EOF level=info timestamp=2018-07-15T14:13:11.019260Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:13:20 http: TLS handshake error from 10.129.0.1:47300: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T14:04:24.958236Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi47szh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi47szh" level=info timestamp=2018-07-15T14:07:25.824094Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir67wv kind= uid=66bd6430-8838-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:07:25.824495Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir67wv kind= uid=66bd6430-8838-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:10:26.777567Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmir67wv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmir67wv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 66bd6430-8838-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmir67wv" level=info timestamp=2018-07-15T14:10:26.789822Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmir67wv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmir67wv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 66bd6430-8838-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmir67wv" level=info timestamp=2018-07-15T14:10:27.421993Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimxfch kind= uid=d2f9f1fb-8838-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:10:27.424027Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimxfch kind= uid=d2f9f1fb-8838-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:10:27.644968Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimxfch\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimxfch" level=info timestamp=2018-07-15T14:10:27.710068Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimxfch\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimxfch" level=info timestamp=2018-07-15T14:10:27.770170Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimxfch\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmimxfch, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d2f9f1fb-8838-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimxfch" level=info timestamp=2018-07-15T14:10:27.879203Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirvpdc kind= uid=d3411311-8838-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:10:27.879367Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirvpdc kind= uid=d3411311-8838-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:10:28.249011Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96ht5 kind= uid=d379fe80-8838-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:10:28.249263Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96ht5 kind= uid=d379fe80-8838-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:10:28.521916Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96ht5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96ht5" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T14:01:22.795279Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T14:01:22.795704Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T14:01:22.795954Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T14:01:22.798527Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmiscprp" level=info timestamp=2018-07-15T14:01:22.800639Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.250442Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251013Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251191Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirhdd6, existing: false\n" level=info timestamp=2018-07-15T14:02:17.251332Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.251516Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.251831Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.251961Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiscprp, existing: false\n" level=info timestamp=2018-07-15T14:02:17.252027Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.252133Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.252378Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:47:22.532179Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: false\n" level=info timestamp=2018-07-15T13:47:22.532339Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.532471Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.532644Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532753Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532823Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532956Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533032Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533142Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533345Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.533501Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmix9nmn, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533551Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533674Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533840Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi96ht5-qbpc8 Pod phase: Pending ------------------------------ • Failure [180.842 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:174 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Alpine as first boot [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 90.181s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ STEP: defining a VirtualMachineInstance with an Alpine disk STEP: adding a Cirros Disk STEP: setting boot order STEP: starting VirtualMachineInstance STEP: Waiting the VirtualMachineInstance start level=info timestamp=2018-07-15T14:10:28.985087Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmi96ht5-qbpc8" Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running 2018/07/15 14:15:45 http: TLS handshake error from 10.128.0.1:56472: EOF level=info timestamp=2018-07-15T14:15:53.782173Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:15:55 http: TLS handshake error from 10.128.0.1:56524: EOF level=info timestamp=2018-07-15T14:16:01.417228Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:16:01.421481Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:16:04.044706Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:16:05 http: TLS handshake error from 10.128.0.1:56572: EOF level=info timestamp=2018-07-15T14:16:06.029778Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:16:06.048077Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:16:07.820745Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:16:11.358691Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:16:14.172762Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:16:15 http: TLS handshake error from 10.128.0.1:56626: EOF level=info timestamp=2018-07-15T14:16:24.272125Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:16:25 http: TLS handshake error from 10.128.0.1:56674: EOF Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 14:14:10 http: TLS handshake error from 10.129.0.1:47330: EOF 2018/07/15 14:14:20 http: TLS handshake error from 10.129.0.1:47336: EOF 2018/07/15 14:14:30 http: TLS handshake error from 10.129.0.1:47342: EOF 2018/07/15 14:14:40 http: TLS handshake error from 10.129.0.1:47348: EOF level=info timestamp=2018-07-15T14:14:40.855228Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:14:50 http: TLS handshake error from 10.129.0.1:47354: EOF 2018/07/15 14:15:00 http: TLS handshake error from 10.129.0.1:47360: EOF 2018/07/15 14:15:10 http: TLS handshake error from 10.129.0.1:47366: EOF 2018/07/15 14:15:20 http: TLS handshake error from 10.129.0.1:47372: EOF 2018/07/15 14:15:30 http: TLS handshake error from 10.129.0.1:47380: EOF 2018/07/15 14:15:40 http: TLS handshake error from 10.129.0.1:47386: EOF 2018/07/15 14:15:50 http: TLS handshake error from 10.129.0.1:47392: EOF 2018/07/15 14:16:00 http: TLS handshake error from 10.129.0.1:47398: EOF 2018/07/15 14:16:10 http: TLS handshake error from 10.129.0.1:47404: EOF 2018/07/15 14:16:20 http: TLS handshake error from 10.129.0.1:47410: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T14:07:25.824495Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmir67wv kind= uid=66bd6430-8838-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:10:26.777567Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmir67wv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmir67wv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 66bd6430-8838-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmir67wv" level=info timestamp=2018-07-15T14:10:26.789822Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmir67wv\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmir67wv, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 66bd6430-8838-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmir67wv" level=info timestamp=2018-07-15T14:10:27.421993Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimxfch kind= uid=d2f9f1fb-8838-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:10:27.424027Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimxfch kind= uid=d2f9f1fb-8838-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:10:27.644968Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimxfch\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimxfch" level=info timestamp=2018-07-15T14:10:27.710068Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimxfch\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimxfch" level=info timestamp=2018-07-15T14:10:27.770170Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimxfch\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmimxfch, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d2f9f1fb-8838-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimxfch" level=info timestamp=2018-07-15T14:10:27.879203Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirvpdc kind= uid=d3411311-8838-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:10:27.879367Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirvpdc kind= uid=d3411311-8838-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:10:28.249011Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96ht5 kind= uid=d379fe80-8838-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:10:28.249263Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96ht5 kind= uid=d379fe80-8838-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:10:28.521916Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96ht5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96ht5" level=info timestamp=2018-07-15T14:13:29.101658Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwxk4l kind= uid=3f44c34d-8839-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:13:29.102484Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwxk4l kind= uid=3f44c34d-8839-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T14:01:22.795279Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T14:01:22.795704Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T14:01:22.795954Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T14:01:22.798527Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmiscprp" level=info timestamp=2018-07-15T14:01:22.800639Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.250442Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251013Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251191Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirhdd6, existing: false\n" level=info timestamp=2018-07-15T14:02:17.251332Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.251516Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.251831Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.251961Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiscprp, existing: false\n" level=info timestamp=2018-07-15T14:02:17.252027Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.252133Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.252378Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:47:22.532179Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: false\n" level=info timestamp=2018-07-15T13:47:22.532339Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.532471Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.532644Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532753Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532823Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532956Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533032Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533142Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533345Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.533501Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmix9nmn, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533551Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533674Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533840Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiwxk4l-s5tlz Pod phase: Pending • Failure [181.004 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:174 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Cirros as first boot [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 90.177s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ STEP: defining a VirtualMachineInstance with an Alpine disk STEP: adding a Cirros Disk STEP: setting boot order STEP: starting VirtualMachineInstance STEP: Waiting the VirtualMachineInstance start level=info timestamp=2018-07-15T14:13:30.012768Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmiwxk4l-s5tlz" • [SLOW TEST:60.507 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:205 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:206 should retry starting the VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:207 ------------------------------ •! Panic [60.343 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:205 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:206 should log warning and proceed once the secret is there [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:237 Test Panicked runtime error: invalid memory address or nil pointer dereference /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/panic.go:505 Full Stack Trace /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/panic.go:505 +0x229 kubevirt.io/kubevirt/tests_test.glob..func16.3.9.1.2() /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:258 +0x431 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).runSync(0xc420329a40, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/tests/tests_suite_test.go:43 +0xaa testing.tRunner(0xc420408ff0, 0x13528e8) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 created by testing.(*T).Run /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:824 +0x2e0 ------------------------------ STEP: Starting a VirtualMachineInstance STEP: Checking that VirtualMachineInstance start failed level=info timestamp=2018-07-15T14:17:31.246053Z pos=utils.go:254 component=tests msg="Created virtual machine pod virt-launcher-testvminn5lc-pch2n" Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running level=info timestamp=2018-07-15T14:21:00.158080Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:21:01.863783Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:21:01.869814Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/15 14:21:05 http: TLS handshake error from 10.128.0.1:58116: EOF level=info timestamp=2018-07-15T14:21:09.525987Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:21:09.544996Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:21:10.280751Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:21:10.322334Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:21:15 http: TLS handshake error from 10.128.0.1:58170: EOF level=info timestamp=2018-07-15T14:21:20.517647Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:21:25 http: TLS handshake error from 10.128.0.1:58220: EOF level=info timestamp=2018-07-15T14:21:27.239550Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:21:27.263446Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:21:27.288279Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:21:30.749897Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 14:19:30 http: TLS handshake error from 10.129.0.1:47526: EOF 2018/07/15 14:19:40 http: TLS handshake error from 10.129.0.1:47532: EOF 2018/07/15 14:19:50 http: TLS handshake error from 10.129.0.1:47538: EOF 2018/07/15 14:20:00 http: TLS handshake error from 10.129.0.1:47544: EOF 2018/07/15 14:20:10 http: TLS handshake error from 10.129.0.1:47550: EOF 2018/07/15 14:20:20 http: TLS handshake error from 10.129.0.1:47556: EOF 2018/07/15 14:20:30 http: TLS handshake error from 10.129.0.1:47562: EOF 2018/07/15 14:20:40 http: TLS handshake error from 10.129.0.1:47568: EOF level=info timestamp=2018-07-15T14:20:41.028800Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:20:50 http: TLS handshake error from 10.129.0.1:47574: EOF 2018/07/15 14:21:00 http: TLS handshake error from 10.129.0.1:47580: EOF 2018/07/15 14:21:10 http: TLS handshake error from 10.129.0.1:47586: EOF level=info timestamp=2018-07-15T14:21:10.883348Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:21:20 http: TLS handshake error from 10.129.0.1:47592: EOF 2018/07/15 14:21:30 http: TLS handshake error from 10.129.0.1:47600: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T14:10:28.521916Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96ht5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96ht5" level=info timestamp=2018-07-15T14:13:29.101658Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwxk4l kind= uid=3f44c34d-8839-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:13:29.102484Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwxk4l kind= uid=3f44c34d-8839-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:16:30.107218Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwxk4l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwxk4l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3f44c34d-8839-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwxk4l" level=info timestamp=2018-07-15T14:16:30.270961Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi28vg5 kind= uid=ab42607e-8839-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:16:30.271229Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi28vg5 kind= uid=ab42607e-8839-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:16:30.416152Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi28vg5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi28vg5" level=info timestamp=2018-07-15T14:17:30.505404Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi28vg5\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi28vg5, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: ab42607e-8839-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi28vg5" level=info timestamp=2018-07-15T14:17:30.618387Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminn5lc kind= uid=cf391cf1-8839-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:17:30.620403Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminn5lc kind= uid=cf391cf1-8839-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:17:30.778307Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminn5lc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminn5lc" level=info timestamp=2018-07-15T14:17:30.819276Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminn5lc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminn5lc" level=info timestamp=2018-07-15T14:18:30.888781Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminn5lc\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminn5lc, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cf391cf1-8839-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminn5lc" level=info timestamp=2018-07-15T14:18:31.036675Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikfgh7 kind= uid=f33de2d4-8839-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:18:31.038222Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikfgh7 kind= uid=f33de2d4-8839-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T14:01:22.795279Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T14:01:22.795704Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T14:01:22.795954Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T14:01:22.798527Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmiscprp" level=info timestamp=2018-07-15T14:01:22.800639Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.250442Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251013Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251191Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirhdd6, existing: false\n" level=info timestamp=2018-07-15T14:02:17.251332Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.251516Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.251831Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.251961Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiscprp, existing: false\n" level=info timestamp=2018-07-15T14:02:17.252027Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.252133Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.252378Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:47:22.532179Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: false\n" level=info timestamp=2018-07-15T13:47:22.532339Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.532471Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.532644Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532753Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532823Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532956Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533032Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533142Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533345Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.533501Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmix9nmn, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533551Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533674Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533840Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmikfgh7-vtn5z Pod phase: Pending • Failure [181.010 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-launcher crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:285 should be stopped and have Failed phase [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:286 Timed out after 90.000s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ level=info timestamp=2018-07-15T14:18:31.686661Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmikfgh7-vtn5z" Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running 2018/07/15 14:23:55 http: TLS handshake error from 10.128.0.1:59000: EOF level=info timestamp=2018-07-15T14:24:01.457760Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:24:01.468234Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:24:03.712710Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:24:05 http: TLS handshake error from 10.128.0.1:59048: EOF level=info timestamp=2018-07-15T14:24:11.900918Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:24:12.035677Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:24:12.040904Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:24:14.089181Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:24:15 http: TLS handshake error from 10.128.0.1:59104: EOF level=info timestamp=2018-07-15T14:24:24.183942Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:24:25 http: TLS handshake error from 10.128.0.1:59154: EOF level=info timestamp=2018-07-15T14:24:27.992773Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:24:28.021399Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:24:28.060683Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-xvlfb Pod phase: Running level=info timestamp=2018-07-15T14:22:34.403756Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:22:40 http: TLS handshake error from 10.129.0.1:47642: EOF 2018/07/15 14:22:50 http: TLS handshake error from 10.129.0.1:47648: EOF 2018/07/15 14:23:00 http: TLS handshake error from 10.129.0.1:47654: EOF 2018/07/15 14:23:10 http: TLS handshake error from 10.129.0.1:47660: EOF level=info timestamp=2018-07-15T14:23:10.868108Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:23:20 http: TLS handshake error from 10.129.0.1:47666: EOF 2018/07/15 14:23:30 http: TLS handshake error from 10.129.0.1:47672: EOF 2018/07/15 14:23:40 http: TLS handshake error from 10.129.0.1:47678: EOF 2018/07/15 14:23:50 http: TLS handshake error from 10.129.0.1:47684: EOF 2018/07/15 14:24:00 http: TLS handshake error from 10.129.0.1:47690: EOF 2018/07/15 14:24:10 http: TLS handshake error from 10.129.0.1:47696: EOF level=info timestamp=2018-07-15T14:24:10.945259Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:24:20 http: TLS handshake error from 10.129.0.1:47702: EOF 2018/07/15 14:24:30 http: TLS handshake error from 10.129.0.1:47710: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T14:16:30.107218Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiwxk4l\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmiwxk4l, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3f44c34d-8839-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiwxk4l" level=info timestamp=2018-07-15T14:16:30.270961Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi28vg5 kind= uid=ab42607e-8839-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:16:30.271229Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi28vg5 kind= uid=ab42607e-8839-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:16:30.416152Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi28vg5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi28vg5" level=info timestamp=2018-07-15T14:17:30.505404Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi28vg5\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi28vg5, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: ab42607e-8839-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi28vg5" level=info timestamp=2018-07-15T14:17:30.618387Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminn5lc kind= uid=cf391cf1-8839-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:17:30.620403Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminn5lc kind= uid=cf391cf1-8839-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:17:30.778307Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminn5lc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminn5lc" level=info timestamp=2018-07-15T14:17:30.819276Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminn5lc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminn5lc" level=info timestamp=2018-07-15T14:18:30.888781Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminn5lc\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminn5lc, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cf391cf1-8839-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminn5lc" level=info timestamp=2018-07-15T14:18:31.036675Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikfgh7 kind= uid=f33de2d4-8839-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:18:31.038222Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikfgh7 kind= uid=f33de2d4-8839-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:21:31.955989Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg6v5 kind= uid=5f12e9cb-883a-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:21:31.956436Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg6v5 kind= uid=5f12e9cb-883a-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:21:32.127131Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilg6v5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilg6v5" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T14:01:22.795279Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T14:01:22.795704Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T14:01:22.795954Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T14:01:22.798527Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmiscprp" level=info timestamp=2018-07-15T14:01:22.800639Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.250442Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251013Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251191Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirhdd6, existing: false\n" level=info timestamp=2018-07-15T14:02:17.251332Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.251516Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.251831Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.251961Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiscprp, existing: false\n" level=info timestamp=2018-07-15T14:02:17.252027Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.252133Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.252378Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:47:22.532179Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: false\n" level=info timestamp=2018-07-15T13:47:22.532339Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.532471Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.532644Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532753Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532823Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532956Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533032Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533142Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533345Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.533501Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmix9nmn, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533551Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533674Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533840Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmilg6v5-hlnvb Pod phase: Pending • Failure [181.115 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:308 should recover and continue management [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:309 Timed out after 90.175s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ level=info timestamp=2018-07-15T14:21:32.639646Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmilg6v5-hlnvb" • [SLOW TEST:63.135 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler is responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:339 should indicate that a node is ready for vmis /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:340 ------------------------------ Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running 2018/07/15 14:27:55 http: TLS handshake error from 10.128.0.1:60236: EOF level=info timestamp=2018-07-15T14:27:58.275457Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:28:01.959141Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:28:01.966761Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/15 14:28:05 http: TLS handshake error from 10.128.0.1:60288: EOF level=info timestamp=2018-07-15T14:28:08.518670Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:28:11.344437Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:28:15.225822Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:28:15.304624Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:28:15.308242Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:28:15 http: TLS handshake error from 10.128.0.1:60344: EOF level=info timestamp=2018-07-15T14:28:18.706234Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:28:25 http: TLS handshake error from 10.128.0.1:60394: EOF level=info timestamp=2018-07-15T14:28:28.926678Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:28:35 http: TLS handshake error from 10.128.0.1:60444: EOF Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 14:26:20 http: TLS handshake error from 10.129.0.1:47776: EOF 2018/07/15 14:26:30 http: TLS handshake error from 10.129.0.1:47782: EOF 2018/07/15 14:26:40 http: TLS handshake error from 10.129.0.1:47788: EOF 2018/07/15 14:26:50 http: TLS handshake error from 10.129.0.1:47794: EOF 2018/07/15 14:27:00 http: TLS handshake error from 10.129.0.1:47800: EOF 2018/07/15 14:27:10 http: TLS handshake error from 10.129.0.1:47806: EOF 2018/07/15 14:27:20 http: TLS handshake error from 10.129.0.1:47812: EOF 2018/07/15 14:27:30 http: TLS handshake error from 10.129.0.1:47820: EOF 2018/07/15 14:27:40 http: TLS handshake error from 10.129.0.1:47826: EOF level=info timestamp=2018-07-15T14:27:41.347079Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:27:50 http: TLS handshake error from 10.129.0.1:47832: EOF 2018/07/15 14:28:00 http: TLS handshake error from 10.129.0.1:47838: EOF 2018/07/15 14:28:10 http: TLS handshake error from 10.129.0.1:47844: EOF 2018/07/15 14:28:20 http: TLS handshake error from 10.129.0.1:47850: EOF 2018/07/15 14:28:30 http: TLS handshake error from 10.129.0.1:47856: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T14:16:30.416152Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi28vg5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi28vg5" level=info timestamp=2018-07-15T14:17:30.505404Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi28vg5\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi28vg5, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: ab42607e-8839-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi28vg5" level=info timestamp=2018-07-15T14:17:30.618387Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvminn5lc kind= uid=cf391cf1-8839-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:17:30.620403Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvminn5lc kind= uid=cf391cf1-8839-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:17:30.778307Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminn5lc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminn5lc" level=info timestamp=2018-07-15T14:17:30.819276Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminn5lc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminn5lc" level=info timestamp=2018-07-15T14:18:30.888781Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvminn5lc\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvminn5lc, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cf391cf1-8839-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvminn5lc" level=info timestamp=2018-07-15T14:18:31.036675Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikfgh7 kind= uid=f33de2d4-8839-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:18:31.038222Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikfgh7 kind= uid=f33de2d4-8839-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:21:31.955989Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg6v5 kind= uid=5f12e9cb-883a-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:21:31.956436Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg6v5 kind= uid=5f12e9cb-883a-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:21:32.127131Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilg6v5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilg6v5" level=info timestamp=2018-07-15T14:25:36.125496Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisskv4 kind= uid=f09b616b-883a-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:25:36.125827Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisskv4 kind= uid=f09b616b-883a-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:25:36.430231Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisskv4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisskv4" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T14:01:22.795279Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T14:01:22.795704Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T14:01:22.795954Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T14:01:22.798527Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmiscprp" level=info timestamp=2018-07-15T14:01:22.800639Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.250442Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251013Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251191Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirhdd6, existing: false\n" level=info timestamp=2018-07-15T14:02:17.251332Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.251516Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.251831Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.251961Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiscprp, existing: false\n" level=info timestamp=2018-07-15T14:02:17.252027Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.252133Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.252378Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:47:22.532179Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: false\n" level=info timestamp=2018-07-15T13:47:22.532339Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.532471Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.532644Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532753Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532823Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532956Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533032Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533142Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533345Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.533501Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmix9nmn, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533551Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533674Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533840Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmisskv4-kntvb Pod phase: Pending • Failure in Spec Setup (BeforeEach) [181.008 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler is not responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:370 the node controller should react [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:409 Timed out after 90.193s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ level=info timestamp=2018-07-15T14:25:36.815847Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmisskv4-kntvb" S [SKIPPING] [0.385 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:462 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-default [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Skip log query tests for JENKINS ci test environment /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:467 ------------------------------ S [SKIPPING] [0.139 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:462 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-alternative [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Skip log query tests for JENKINS ci test environment /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:467 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.190 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:523 should enable emulation in virt-launcher [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:535 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:531 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.304 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:523 should be reflected in domain XML [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:572 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:531 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.330 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:523 should request a TUN device but not KVM [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:616 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:531 ------------------------------ •••• Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running level=info timestamp=2018-07-15T14:30:52.415473Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:30:55 http: TLS handshake error from 10.128.0.1:32936: EOF level=info timestamp=2018-07-15T14:31:01.987649Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:31:01.990945Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:31:02.643186Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:31:05 http: TLS handshake error from 10.128.0.1:32988: EOF level=info timestamp=2018-07-15T14:31:12.907051Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:31:15 http: TLS handshake error from 10.128.0.1:33040: EOF level=info timestamp=2018-07-15T14:31:17.353699Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:31:17.499051Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:31:17.499831Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:31:23.144208Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:31:25 http: TLS handshake error from 10.128.0.1:33088: EOF level=info timestamp=2018-07-15T14:31:33.363492Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:31:35 http: TLS handshake error from 10.128.0.1:33138: EOF Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 14:29:40 http: TLS handshake error from 10.129.0.1:47898: EOF level=info timestamp=2018-07-15T14:29:41.049111Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:29:50 http: TLS handshake error from 10.129.0.1:47904: EOF 2018/07/15 14:30:00 http: TLS handshake error from 10.129.0.1:47910: EOF 2018/07/15 14:30:10 http: TLS handshake error from 10.129.0.1:47916: EOF level=info timestamp=2018-07-15T14:30:11.195853Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:30:20 http: TLS handshake error from 10.129.0.1:47922: EOF 2018/07/15 14:30:30 http: TLS handshake error from 10.129.0.1:47930: EOF 2018/07/15 14:30:40 http: TLS handshake error from 10.129.0.1:47936: EOF 2018/07/15 14:30:50 http: TLS handshake error from 10.129.0.1:47942: EOF 2018/07/15 14:31:00 http: TLS handshake error from 10.129.0.1:47948: EOF 2018/07/15 14:31:10 http: TLS handshake error from 10.129.0.1:47954: EOF level=info timestamp=2018-07-15T14:31:10.978477Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:31:20 http: TLS handshake error from 10.129.0.1:47960: EOF 2018/07/15 14:31:30 http: TLS handshake error from 10.129.0.1:47966: EOF Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T14:18:31.038222Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikfgh7 kind= uid=f33de2d4-8839-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:21:31.955989Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg6v5 kind= uid=5f12e9cb-883a-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:21:31.956436Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilg6v5 kind= uid=5f12e9cb-883a-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:21:32.127131Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilg6v5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilg6v5" level=info timestamp=2018-07-15T14:25:36.125496Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisskv4 kind= uid=f09b616b-883a-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:25:36.125827Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisskv4 kind= uid=f09b616b-883a-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:25:36.430231Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisskv4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisskv4" level=info timestamp=2018-07-15T14:28:37.174085Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisskv4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmisskv4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: f09b616b-883a-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisskv4" level=info timestamp=2018-07-15T14:28:38.451971Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib6fjz kind= uid=5d49fabb-883b-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:28:38.452320Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib6fjz kind= uid=5d49fabb-883b-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:28:38.830703Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitjhjp kind= uid=5d841da0-883b-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:28:38.832091Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitjhjp kind= uid=5d841da0-883b-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:28:39.024983Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitjhjp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitjhjp" level=info timestamp=2018-07-15T14:28:39.677699Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilzktn kind= uid=5e050f91-883b-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:28:39.677916Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilzktn kind= uid=5e050f91-883b-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T14:01:22.795279Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T14:01:22.795704Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T14:01:22.795954Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T14:01:22.798527Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmiscprp" level=info timestamp=2018-07-15T14:01:22.800639Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.250442Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251013Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251191Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirhdd6, existing: false\n" level=info timestamp=2018-07-15T14:02:17.251332Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.251516Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.251831Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.251961Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiscprp, existing: false\n" level=info timestamp=2018-07-15T14:02:17.252027Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.252133Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.252378Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:47:22.532179Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: false\n" level=info timestamp=2018-07-15T13:47:22.532339Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.532471Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.532644Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532753Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532823Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532956Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533032Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533142Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533345Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.533501Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmix9nmn, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533551Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533674Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533840Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmilzktn-zkhh2 Pod phase: Pending ------------------------------ • Failure [180.950 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance's Pod /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:768 should result in the VirtualMachineInstance moving to a finalized state [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:769 Timed out after 90.000s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ STEP: Creating the VirtualMachineInstance level=info timestamp=2018-07-15T14:28:40.306925Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmilzktn-zkhh2" Pod name: disks-images-provider-4vtln Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-75z5d Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-5c4gq Pod phase: Running level=info timestamp=2018-07-15T14:33:56.697607Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:34:02.592122Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-15T14:34:02.595960Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/07/15 14:34:05 http: TLS handshake error from 10.128.0.1:33916: EOF level=info timestamp=2018-07-15T14:34:06.891023Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:34:11.361935Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:34:15 http: TLS handshake error from 10.128.0.1:33972: EOF level=info timestamp=2018-07-15T14:34:17.119656Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:34:19.146064Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:34:19.383868Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-15T14:34:19.384335Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:34:25 http: TLS handshake error from 10.128.0.1:34024: EOF level=info timestamp=2018-07-15T14:34:27.320724Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/15 14:34:35 http: TLS handshake error from 10.128.0.1:34078: EOF level=info timestamp=2018-07-15T14:34:37.565682Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-xvlfb Pod phase: Running 2018/07/15 14:32:40 http: TLS handshake error from 10.129.0.1:48008: EOF 2018/07/15 14:32:50 http: TLS handshake error from 10.129.0.1:48014: EOF 2018/07/15 14:33:00 http: TLS handshake error from 10.129.0.1:48020: EOF 2018/07/15 14:33:10 http: TLS handshake error from 10.129.0.1:48026: EOF 2018/07/15 14:33:20 http: TLS handshake error from 10.129.0.1:48032: EOF 2018/07/15 14:33:30 http: TLS handshake error from 10.129.0.1:48038: EOF 2018/07/15 14:33:40 http: TLS handshake error from 10.129.0.1:48046: EOF level=info timestamp=2018-07-15T14:33:41.091662Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/15 14:33:50 http: TLS handshake error from 10.129.0.1:48052: EOF 2018/07/15 14:34:00 http: TLS handshake error from 10.129.0.1:48058: EOF 2018/07/15 14:34:10 http: TLS handshake error from 10.129.0.1:48064: EOF 2018/07/15 14:34:20 http: TLS handshake error from 10.129.0.1:48070: EOF 2018/07/15 14:34:30 http: TLS handshake error from 10.129.0.1:48076: EOF 2018/07/15 14:34:40 http: TLS handshake error from 10.129.0.1:48082: EOF level=info timestamp=2018-07-15T14:34:40.917100Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-z4jkm Pod phase: Running level=info timestamp=2018-07-15T14:21:32.127131Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilg6v5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilg6v5" level=info timestamp=2018-07-15T14:25:36.125496Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisskv4 kind= uid=f09b616b-883a-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:25:36.125827Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisskv4 kind= uid=f09b616b-883a-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:25:36.430231Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisskv4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisskv4" level=info timestamp=2018-07-15T14:28:37.174085Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisskv4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmisskv4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: f09b616b-883a-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisskv4" level=info timestamp=2018-07-15T14:28:38.451971Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib6fjz kind= uid=5d49fabb-883b-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:28:38.452320Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib6fjz kind= uid=5d49fabb-883b-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:28:38.830703Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitjhjp kind= uid=5d841da0-883b-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:28:38.832091Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmitjhjp kind= uid=5d841da0-883b-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:28:39.024983Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitjhjp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitjhjp" level=info timestamp=2018-07-15T14:28:39.677699Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilzktn kind= uid=5e050f91-883b-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:28:39.677916Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilzktn kind= uid=5e050f91-883b-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-15T14:31:40.726207Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmilzktn\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmilzktn, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5e050f91-883b-11e8-9bf3-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmilzktn" level=info timestamp=2018-07-15T14:31:40.962598Z pos=preset.go:139 component=virt-controller service=http namespace=kubevirt-test-default name=testvmithhg9 kind= uid=ca0f36ad-883b-11e8-9bf3-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-15T14:31:40.963263Z pos=preset.go:165 component=virt-controller service=http namespace=kubevirt-test-default name=testvmithhg9 kind= uid=ca0f36ad-883b-11e8-9bf3-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-zwhlq Pod phase: Running level=info timestamp=2018-07-15T13:17:51.935681Z pos=application.go:174 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-56vwc Pod phase: Running level=info timestamp=2018-07-15T14:01:22.795279Z pos=vm.go:332 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-07-15T14:01:22.795704Z pos=vm.go:359 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-07-15T14:01:22.795954Z pos=vm.go:411 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-07-15T14:01:22.798527Z pos=vm.go:586 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Signaled deletion for testvmiscprp" level=info timestamp=2018-07-15T14:01:22.800639Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.250442Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251013Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T14:02:17.251191Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirhdd6, existing: false\n" level=info timestamp=2018-07-15T14:02:17.251332Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.251516Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.251831Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmirhdd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T14:02:17.251961Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiscprp, existing: false\n" level=info timestamp=2018-07-15T14:02:17.252027Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T14:02:17.252133Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T14:02:17.252378Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmiscprp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-7brqg Pod phase: Running level=info timestamp=2018-07-15T13:47:22.532179Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmicx5p7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532264Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmif4npl, existing: false\n" level=info timestamp=2018-07-15T13:47:22.532339Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.532471Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.532644Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmif4npl kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.532753Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532823Z pos=vm.go:747 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-15T13:47:22.532956Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmikj82t, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533032Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533142Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533345Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmikj82t kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-15T13:47:22.533501Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmix9nmn, existing: false\n" level=info timestamp=2018-07-15T13:47:22.533551Z pos=vm.go:330 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-15T13:47:22.533674Z pos=vm.go:414 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-15T13:47:22.533840Z pos=vm.go:441 component=virt-handler namespace=kubevirt-test-default name=testvmix9nmn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmithhg9-tx8vk Pod phase: Pending • Failure [181.495 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:800 with an active pod. /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:801 should result in pod being terminated [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:802 Timed out after 90.210s. Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 ------------------------------ STEP: Creating the VirtualMachineInstance level=info timestamp=2018-07-15T14:31:41.613336Z pos=utils.go:243 component=tests msg="Created virtual machine pod virt-launcher-testvmithhg9-tx8vk" panic: test timed out after 1h30m0s goroutine 11771 [running]: testing.(*M).startAlarm.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1240 +0xfc created by time.goFunc /gimme/.gimme/versions/go1.10.linux.amd64/src/time/sleep.go:172 +0x44 goroutine 1 [chan receive, 90 minutes]: testing.(*T).Run(0xc420408ff0, 0x12c595f, 0x9, 0x13528e8, 0x47fd76) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:825 +0x301 testing.runTests.func1(0xc420408f00) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1063 +0x64 testing.tRunner(0xc420408f00, 0xc4207cddf8) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 testing.runTests(0xc4203e8780, 0x1bd24f0, 0x1, 0x1, 0x412009) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1061 +0x2c4 testing.(*M).Run(0xc420602400, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:978 +0x171 main.main() _testmain.go:44 +0x151 goroutine 82 [chan receive]: kubevirt.io/kubevirt/vendor/github.com/golang/glog.(*loggingT).flushDaemon(0x1bfd940) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:879 +0x8b created by kubevirt.io/kubevirt/vendor/github.com/golang/glog.init.0 /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:410 +0x203 goroutine 6 [syscall, 90 minutes]: os/signal.signal_recv(0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/sigqueue.go:139 +0xa6 os/signal.loop() /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:22 +0x22 created by os/signal.init.0 /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:28 +0x41 goroutine 8 [sleep]: time.Sleep(0xa77622b) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/time.go:102 +0x166 kubevirt.io/kubevirt/vendor/github.com/juju/ratelimit.realClock.Sleep(0xa77622b) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/juju/ratelimit/ratelimit.go:53 +0x2b kubevirt.io/kubevirt/vendor/github.com/juju/ratelimit.(*Bucket).Wait(0xc420a05740, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/juju/ratelimit/ratelimit.go:143 +0x65 kubevirt.io/kubevirt/vendor/k8s.io/client-go/util/flowcontrol.(*tokenBucketRateLimiter).Accept(0xc4202bbcb0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/util/flowcontrol/throttle.go:88 +0x37 kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest.(*Request).tryThrottle(0xc420a54000) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest/request.go:477 +0x1fd kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest.(*Request).Do(0xc420a54000, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest/request.go:732 +0x62 kubevirt.io/kubevirt/pkg/kubecli.(*vmis).Get(0xc420c11440, 0xc420ce46a0, 0xc, 0xc420dc0080, 0xc420c11440, 0x8, 0x7fea16bb8f78) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:317 +0x125 kubevirt.io/kubevirt/tests.waitForVMIStart.func1(0x0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1031 +0xc2 reflect.Value.call(0x10d28e0, 0xc4205dd230, 0x13, 0x12bfade, 0x4, 0xc420a50ec8, 0x0, 0x0, 0x10d28e0, 0x10d28e0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0x10d28e0, 0xc4205dd230, 0x13, 0xc420a50ec8, 0x0, 0x0, 0x44b21b, 0xc420b043c8, 0xc420a50f00) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion.(*AsyncAssertion).pollActual(0xc42066ca40, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go:71 +0x9f kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion.(*AsyncAssertion).match(0xc42066ca40, 0x13e08a0, 0xc4203ce030, 0x412801, 0x0, 0x0, 0x0, 0xc4203ce030) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go:141 +0x305 kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion.(*AsyncAssertion).Should(0xc42066ca40, 0x13e08a0, 0xc4203ce030, 0x0, 0x0, 0x0, 0xc42066ca40) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go:48 +0x62 kubevirt.io/kubevirt/tests.waitForVMIStart(0x13d6c40, 0xc4202ff680, 0x5a, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1041 +0x668 kubevirt.io/kubevirt/tests.WaitForSuccessfulVMIStart(0x13d6c40, 0xc4202ff680, 0xab6c0e, 0xc4207ce880) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1064 +0x43 kubevirt.io/kubevirt/tests.WaitUntilVMIReady(0xc4202ff680, 0x13528c0, 0x1c1bfc0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1069 +0x3b kubevirt.io/kubevirt/tests_test.glob..func16.6.2.1() /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:838 +0x1d3 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).runSync(0xc42079d260, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:109 +0x9c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).run(0xc42079d260, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:63 +0x13e kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*ItNode).Run(0xc42013d5e0, 0x13d3000, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/it_node.go:25 +0x7f kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).runSample(0xc42035e4e0, 0x0, 0x13d3000, 0xc42023bc80) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:176 +0x5a6 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).Run(0xc42035e4e0, 0x13d3000, 0xc42023bc80) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:127 +0xe3 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpec(0xc420260dc0, 0xc42035e4e0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:198 +0x10d kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpecs(0xc420260dc0, 0x1353501) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:168 +0x32c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run(0xc420260dc0, 0xb) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:64 +0xdc kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite.(*Suite).Run(0xc4200fc190, 0x7fea16b10e18, 0xc420408ff0, 0x12c7ed2, 0xb, 0xc4203e8840, 0x2, 0x2, 0x13ee760, 0xc42023bc80, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite/suite.go:62 +0x27c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithCustomReporters(0x13d40c0, 0xc420408ff0, 0x12c7ed2, 0xb, 0xc4203e87e0, 0x2, 0x2, 0x2) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:218 +0x258 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithDefaultAndCustomReporters(0x13d40c0, 0xc420408ff0, 0x12c7ed2, 0xb, 0xc42024a440, 0x1, 0x1, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:206 +0xab kubevirt.io/kubevirt/tests_test.TestTests(0xc420408ff0) /root/go/src/kubevirt.io/kubevirt/tests/tests_suite_test.go:43 +0xaa testing.tRunner(0xc420408ff0, 0x13528e8) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 created by testing.(*T).Run /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:824 +0x2e0 goroutine 9 [chan receive, 90 minutes]: kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).registerForInterrupts(0xc420260dc0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:220 +0xc0 created by kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:59 +0x60 goroutine 98 [select, 90 minutes, locked to thread]: runtime.gopark(0x1354900, 0x0, 0x12c24fb, 0x6, 0x18, 0x1) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/proc.go:291 +0x11a runtime.selectgo(0xc42048f750, 0xc420048180) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/select.go:392 +0xe50 runtime.ensureSigM.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/signal_unix.go:549 +0x1f4 runtime.goexit() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/asm_amd64.s:2361 +0x1 goroutine 14 [IO wait]: internal/poll.runtime_pollWait(0x7fea16b5af00, 0x72, 0xc420dd7850) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/netpoll.go:173 +0x57 internal/poll.(*pollDesc).wait(0xc420851e98, 0x72, 0xffffffffffffff00, 0x13d5280, 0x1ae97c8) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:85 +0x9b internal/poll.(*pollDesc).waitRead(0xc420851e98, 0xc42077e000, 0x8000, 0x8000) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:90 +0x3d internal/poll.(*FD).Read(0xc420851e80, 0xc42077e000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_unix.go:157 +0x17d net.(*netFD).Read(0xc420851e80, 0xc42077e000, 0x8000, 0x8000, 0x0, 0x8, 0x7ffb) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/fd_unix.go:202 +0x4f net.(*conn).Read(0xc4200fa728, 0xc42077e000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/net.go:176 +0x6a crypto/tls.(*block).readFromUntil(0xc420239890, 0x7fea16b11040, 0xc4200fa728, 0x5, 0xc4200fa728, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:493 +0x96 crypto/tls.(*Conn).readRecord(0xc420305880, 0x1354a17, 0xc4203059a0, 0x20) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:595 +0xe0 crypto/tls.(*Conn).Read(0xc420305880, 0xc4206a3000, 0x1000, 0x1000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:1156 +0x100 bufio.(*Reader).Read(0xc4205a2240, 0xc4203237d8, 0x9, 0x9, 0xc420c76478, 0xc42066f100, 0xc420dd7d10) /gimme/.gimme/versions/go1.10.linux.amd64/src/bufio/bufio.go:216 +0x238 io.ReadAtLeast(0x13d1c20, 0xc4205a2240, 0xc4203237d8, 0x9, 0x9, 0x9, 0xc420dd7ce0, 0xc420dd7ce0, 0x406614) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:309 +0x86 io.ReadFull(0x13d1c20, 0xc4205a2240, 0xc4203237d8, 0x9, 0x9, 0xc420c76420, 0xc420dd7d10, 0xc400005301) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:327 +0x58 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.readFrameHeader(0xc4203237d8, 0x9, 0x9, 0x13d1c20, 0xc4205a2240, 0x0, 0xc400000000, 0x87cfdd, 0xc420dd7fb0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:237 +0x7b kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Framer).ReadFrame(0xc4203237a0, 0xc42072fe60, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:492 +0xa4 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*clientConnReadLoop).run(0xc420dd7fb0, 0x13537f8, 0xc4200997b0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1428 +0x8e kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*ClientConn).readLoop(0xc42024d380) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1354 +0x76 created by kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Transport).newClientConn /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:579 +0x651 goroutine 1724 [chan send, 72 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420c04870) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 2258 [chan send, 65 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420206900) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 make: *** [functest] Error 2 + make cluster-down ./cluster/down.sh