+ export WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + [[ openshift-3.10-release =~ openshift-.* ]] + [[ openshift-3.10-release =~ .*-crio-.* ]] + export KUBEVIRT_PROVIDER=os-3.10.0 + KUBEVIRT_PROVIDER=os-3.10.0 + export KUBEVIRT_NUM_NODES=2 + KUBEVIRT_NUM_NODES=2 + export NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + export NAMESPACE=kube-system + NAMESPACE=kube-system + trap '{ make cluster-down; }' EXIT SIGINT SIGTERM SIGSTOP + make cluster-down ./cluster/down.sh + make cluster-up ./cluster/up.sh Downloading ....... Downloading ....... 2018/07/30 14:25:32 Waiting for host: 192.168.66.102:22 2018/07/30 14:25:35 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 14:25:43 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 14:25:51 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 14:25:59 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 14:26:04 Connected to tcp://192.168.66.102:22 + systemctl stop origin-node.service + rm -rf /etc/origin/ /etc/etcd/ /var/lib/origin /var/lib/etcd/ ++ docker ps -q + containers= + '[' -n '' ']' ++ docker ps -q -a + containers='2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' + '[' -n '2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' ']' + docker rm -f 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2018/07/30 14:26:07 Waiting for host: 192.168.66.101:22 2018/07/30 14:26:10 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 14:26:18 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 14:26:26 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/07/30 14:26:31 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: connection refused. Sleeping 5s 2018/07/30 14:26:36 Connected to tcp://192.168.66.101:22 + inventory_file=/root/inventory + openshift_ansible=/root/openshift-ansible + echo '[new_nodes]' + sed -i '/\[OSEv3:children\]/a new_nodes' /root/inventory + nodes_found=false ++ seq 2 100 + for i in '$(seq 2 100)' ++ printf node%02d 2 + node=node02 ++ printf 192.168.66.1%02d 2 + node_ip=192.168.66.102 + set +e + ping 192.168.66.102 -c 1 PING 192.168.66.102 (192.168.66.102) 56(84) bytes of data. 64 bytes from 192.168.66.102: icmp_seq=1 ttl=64 time=2.43 ms --- 192.168.66.102 ping statistics --- 1 packets transmitted, 1 received, 0% packet loss, time 0ms rtt min/avg/max/mdev = 2.430/2.430/2.430/0.000 ms Found node02. Adding it to the inventory. + '[' 0 -ne 0 ']' + nodes_found=true + set -e + echo '192.168.66.102 node02' + echo 'Found node02. Adding it to the inventory.' + echo 'node02 openshift_node_group_name="node-config-compute" openshift_schedulable=true openshift_ip=192.168.66.102' + for i in '$(seq 2 100)' ++ printf node%02d 3 + node=node03 ++ printf 192.168.66.1%02d 3 + node_ip=192.168.66.103 + set +e + ping 192.168.66.103 -c 1 PING 192.168.66.103 (192.168.66.103) 56(84) bytes of data. From 192.168.66.101 icmp_seq=1 Destination Host Unreachable --- 192.168.66.103 ping statistics --- 1 packets transmitted, 0 received, +1 errors, 100% packet loss, time 0ms + '[' 1 -ne 0 ']' + break + '[' true = true ']' + ansible-playbook -i /root/inventory /root/openshift-ansible/playbooks/openshift-node/scaleup.yml PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) PLAY [Ensure there are new_nodes] ********************************************** TASK [fail] ******************************************************************** skipping: [localhost] TASK [fail] ******************************************************************** skipping: [localhost] PLAY [Initialization Checkpoint Start] ***************************************** TASK [Set install initialization 'In Progress'] ******************************** ok: [node01] PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) [WARNING]: Could not match supplied host pattern, ignoring: oo_lb_to_config PLAY [Ensure that all non-node hosts are accessible] *************************** TASK [Gathering Facts] ********************************************************* ok: [node01] PLAY [Initialize basic host facts] ********************************************* TASK [Gathering Facts] ********************************************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml for node01, node02 TASK [openshift_sanitize_inventory : Check for usage of deprecated variables] *** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : debug] ************************************ skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_stats] ******************************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Assign deprecated variables to correct counterparts] *** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml for node01, node02 included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml for node01, node02 TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Standardize on latest variable names] ***** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Normalize openshift_release] ************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Abort when openshift_release is invalid] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml for node01, node02 TASK [openshift_sanitize_inventory : Ensure that openshift_use_dnsmasq is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that openshift_node_dnsmasq_install_network_manager_hook is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* skipping: [node01] => (item=openshift_hosted_etcd_storage_kind) skipping: [node02] => (item=openshift_hosted_etcd_storage_kind) TASK [openshift_sanitize_inventory : Ensure that dynamic provisioning is set if using dynamic storage] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure clusterid is set along with the cloudprovider] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure ansible_service_broker_remove and ansible_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure template_service_broker_remove and template_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that all requires vsphere configuration variables are set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : ensure provider configuration variables are defined] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure removed web console extension variables are not set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that web console port matches API server port] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : At least one master is schedulable] ******* skipping: [node01] skipping: [node02] TASK [Detecting Operating System from ostree_booted] *************************** ok: [node02] ok: [node01] TASK [set openshift_deployment_type if unset] ********************************** skipping: [node01] skipping: [node02] TASK [check for node already bootstrapped] ************************************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_bootstrapped] ********************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_atomic and openshift_is_containerized] *** ok: [node01] ok: [node02] TASK [Determine Atomic Host Docker Version] ************************************ skipping: [node01] skipping: [node02] TASK [assert atomic host docker version is 1.12 or later] ********************** skipping: [node01] skipping: [node02] PLAY [Retrieve existing master configs and validate] *************************** TASK [openshift_control_plane : stat] ****************************************** ok: [node01] TASK [openshift_control_plane : slurp] ***************************************** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [openshift_control_plane : Check for file paths outside of /etc/origin/master in master's config] *** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Initialize special first-master variables] ******************************* TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Disable web console if required] ***************************************** TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Setup yum repositories for all hosts] ************************************ TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Install Red Hat Subscription manager] ******************* skipping: [node02] TASK [rhel_subscribe : Is host already registered?] **************************** skipping: [node02] TASK [rhel_subscribe : Register host] ****************************************** skipping: [node02] TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Determine if OpenShift Pool Already Attached] *********** skipping: [node02] TASK [rhel_subscribe : Attach to OpenShift Pool] ******************************* skipping: [node02] TASK [rhel_subscribe : Satellite preparation] ********************************** skipping: [node02] TASK [openshift_repos : openshift_repos detect ostree] ************************* ok: [node02] TASK [openshift_repos : Ensure libselinux-python is installed] ***************** ok: [node02] TASK [openshift_repos : Remove openshift_additional.repo file] ***************** ok: [node02] TASK [openshift_repos : Create any additional repos that are defined] ********** TASK [openshift_repos : include_tasks] ***************************************** skipping: [node02] TASK [openshift_repos : include_tasks] ***************************************** included: /root/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml for node02 TASK [openshift_repos : Configure origin gpg keys] ***************************** ok: [node02] TASK [openshift_repos : Configure correct origin release repository] *********** ok: [node02] => (item=/root/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin.repo.j2) TASK [openshift_repos : Ensure clean repo cache in the event repos have been changed manually] *** changed: [node02] => { "msg": "First run of openshift_repos" } TASK [openshift_repos : Record that openshift_repos already ran] *************** ok: [node02] RUNNING HANDLER [openshift_repos : refresh cache] ****************************** changed: [node02] PLAY [Install packages necessary for installer] ******************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Determine if chrony is installed] **************************************** [WARNING]: Consider using the yum, dnf or zypper module rather than running rpm. If you need to use command because yum, dnf or zypper is insufficient you can add warn=False to this command task or set command_warnings=False in ansible.cfg to get rid of this message. changed: [node02] TASK [Install ntp package] ***************************************************** skipping: [node02] TASK [Start and enable ntpd/chronyd] ******************************************* changed: [node02] TASK [Ensure openshift-ansible installer package deps are installed] *********** ok: [node02] => (item=iproute) ok: [node02] => (item=dbus-python) ok: [node02] => (item=PyYAML) ok: [node02] => (item=python-ipaddress) ok: [node02] => (item=libsemanage-python) ok: [node02] => (item=yum-utils) ok: [node02] => (item=python-docker) PLAY [Initialize cluster facts] ************************************************ TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [get openshift_current_version] ******************************************* ok: [node02] ok: [node01] TASK [set_fact openshift_portal_net if present on masters] ********************* ok: [node01] ok: [node02] TASK [Gather Cluster facts] **************************************************** changed: [node02] changed: [node01] TASK [Set fact of no_proxy_internal_hostnames] ********************************* skipping: [node01] skipping: [node02] TASK [Initialize openshift.node.sdn_mtu] *************************************** changed: [node02] ok: [node01] PLAY [Initialize etcd host variables] ****************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** ok: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** ok: [node01] => { "msg": "openshift_pkg_version was not defined. Falling back to -3.10.0" } TASK [openshift_version : set_fact] ******************************************** ok: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Verify Requirements] ***************************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [Run variable sanity checks] ********************************************** ok: [node01] TASK [Validate openshift_node_groups and openshift_node_group_name] ************ ok: [node01] PLAY [Initialization Checkpoint End] ******************************************* TASK [Set install initialization 'Complete'] *********************************** ok: [node01] PLAY [Validate node hostnames] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Query DNS for IP address of node02] ************************************** ok: [node02] TASK [Validate openshift_hostname when defined] ******************************** skipping: [node02] TASK [Validate openshift_ip exists on node when defined] *********************** skipping: [node02] PLAY [Configure os_firewall] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [os_firewall : Detecting Atomic Host Operating System] ******************** ok: [node02] TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** ok: [node02] TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** skipping: [node02] TASK [os_firewall : Install firewalld packages] ******************************** skipping: [node02] TASK [os_firewall : Ensure iptables services are not enabled] ****************** skipping: [node02] => (item=iptables) skipping: [node02] => (item=ip6tables) TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** skipping: [node02] TASK [os_firewall : Start and enable firewalld service] ************************ skipping: [node02] TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** skipping: [node02] TASK [os_firewall : Restart polkitd] ******************************************* skipping: [node02] TASK [os_firewall : Wait for polkit action to have been created] *************** skipping: [node02] TASK [os_firewall : Ensure firewalld service is not enabled] ******************* ok: [node02] TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** skipping: [node02] TASK [os_firewall : Install iptables packages] ********************************* ok: [node02] => (item=iptables) ok: [node02] => (item=iptables-services) TASK [os_firewall : Start and enable iptables service] ************************* ok: [node02 -> node02] => (item=node02) TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** skipping: [node02] PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [container_runtime : Setup the docker-storage for overlay] **************** skipping: [node02] TASK [container_runtime : Create file system on extra volume device] *********** TASK [container_runtime : Create mount entry for extra volume] ***************** PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** ok: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* ok: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Add http_proxy to /etc/atomic.conf] ****************** skipping: [node02] TASK [container_runtime : Add https_proxy to /etc/atomic.conf] ***************** skipping: [node02] TASK [container_runtime : Add no_proxy to /etc/atomic.conf] ******************** skipping: [node02] TASK [container_runtime : Get current installed Docker version] **************** ok: [node02] TASK [container_runtime : Error out if Docker pre-installed but too old] ******* skipping: [node02] TASK [container_runtime : Error out if requested Docker is too old] ************ skipping: [node02] TASK [container_runtime : Install Docker] ************************************** skipping: [node02] TASK [container_runtime : Ensure docker.service.d directory exists] ************ ok: [node02] TASK [container_runtime : Configure Docker service unit file] ****************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Set registry params] ********************************* skipping: [node02] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) TASK [container_runtime : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** skipping: [node02] TASK [container_runtime : Set Proxy Settings] ********************************** skipping: [node02] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) TASK [container_runtime : Set various Docker options] ************************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Configure Docker Network OPTIONS] ******************** ok: [node02] TASK [container_runtime : Detect if docker is already started] ***************** ok: [node02] TASK [container_runtime : Start the Docker service] **************************** ok: [node02] TASK [container_runtime : set_fact] ******************************************** ok: [node02] TASK [container_runtime : Check for docker_storage_path/overlay2] ************** ok: [node02] TASK [container_runtime : Fixup SELinux permissions for docker] **************** changed: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* ok: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** ok: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** ok: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] TASK [container_runtime : Fail if Atomic Host since this is an rpm request] **** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Check that overlay is in the kernel] ***************** skipping: [node02] TASK [container_runtime : Add overlay to modprobe.d] *************************** skipping: [node02] TASK [container_runtime : Manually modprobe overlay into the kernel] *********** skipping: [node02] TASK [container_runtime : Enable and start systemd-modules-load] *************** skipping: [node02] TASK [container_runtime : Install cri-o] *************************************** skipping: [node02] TASK [container_runtime : Remove CRI-O default configuration files] ************ skipping: [node02] => (item=/etc/cni/net.d/200-loopback.conf) skipping: [node02] => (item=/etc/cni/net.d/100-crio-bridge.conf) TASK [container_runtime : Create the CRI-O configuration] ********************** skipping: [node02] TASK [container_runtime : Ensure CNI configuration directory exists] *********** skipping: [node02] TASK [container_runtime : Add iptables allow rules] **************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove iptables rules] ******************************* TASK [container_runtime : Add firewalld allow rules] *************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove firewalld allow rules] ************************ TASK [container_runtime : Configure the CNI network] *************************** skipping: [node02] TASK [container_runtime : Create /etc/sysconfig/crio-network] ****************** skipping: [node02] TASK [container_runtime : Start the CRI-O service] ***************************** skipping: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* skipping: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** skipping: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** skipping: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** skipping: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Node Preparation Checkpoint Start] *************************************** TASK [Set Node preparation 'In Progress'] ************************************** ok: [node01] PLAY [Only target nodes that have not yet been bootstrapped] ******************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [add_host] **************************************************************** skipping: [localhost] => (item=node02) ok: [localhost] => (item=node01) PLAY [Disable excluders] ******************************************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for node02 TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-docker-excluder version detected] ************ skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-docker-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-excluder version detected] ******************* skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** skipping: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] PLAY [Configure nodes] ********************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_cloud_provider : Set cloud provider facts] ********************* skipping: [node02] TASK [openshift_cloud_provider : Create cloudprovider config dir] ************** skipping: [node02] TASK [openshift_cloud_provider : include the defined cloud provider files] ***** skipping: [node02] TASK [openshift_node : fail] *************************************************** skipping: [node02] TASK [openshift_node : Check for NetworkManager service] *********************** ok: [node02] TASK [openshift_node : Set fact using_network_manager] ************************* ok: [node02] TASK [openshift_node : Install dnsmasq] **************************************** ok: [node02] TASK [openshift_node : ensure origin/node directory exists] ******************** changed: [node02] => (item=/etc/origin) changed: [node02] => (item=/etc/origin/node) TASK [openshift_node : Install NetworkManager during node_bootstrap provisioning] *** skipping: [node02] TASK [openshift_node : Install network manager dispatch script] **************** skipping: [node02] TASK [openshift_node : Install dnsmasq configuration] ************************** ok: [node02] TASK [openshift_node : Deploy additional dnsmasq.conf] ************************* skipping: [node02] TASK [openshift_node : Enable dnsmasq] ***************************************** ok: [node02] TASK [openshift_node : Install network manager dispatch script] **************** ok: [node02] TASK [openshift_node : Add iptables allow rules] ******************************* ok: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) ok: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) ok: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) ok: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) ok: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove iptables rules] ********************************** TASK [openshift_node : Add firewalld allow rules] ****************************** skipping: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) skipping: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) skipping: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) skipping: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) skipping: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove firewalld allow rules] *************************** TASK [openshift_node : Checking for journald.conf] ***************************** ok: [node02] TASK [openshift_node : Create journald persistence directories] **************** ok: [node02] TASK [openshift_node : Update journald setup] ********************************** ok: [node02] => (item={u'var': u'Storage', u'val': u'persistent'}) ok: [node02] => (item={u'var': u'Compress', u'val': True}) ok: [node02] => (item={u'var': u'SyncIntervalSec', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitInterval', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitBurst', u'val': 10000}) ok: [node02] => (item={u'var': u'SystemMaxUse', u'val': u'8G'}) ok: [node02] => (item={u'var': u'SystemKeepFree', u'val': u'20%'}) ok: [node02] => (item={u'var': u'SystemMaxFileSize', u'val': u'10M'}) ok: [node02] => (item={u'var': u'MaxRetentionSec', u'val': u'1month'}) ok: [node02] => (item={u'var': u'MaxFileSec', u'val': u'1day'}) ok: [node02] => (item={u'var': u'ForwardToSyslog', u'val': False}) ok: [node02] => (item={u'var': u'ForwardToWall', u'val': False}) TASK [openshift_node : Restart journald] *************************************** skipping: [node02] TASK [openshift_node : Disable swap] ******************************************* ok: [node02] TASK [openshift_node : Install node, clients, and conntrack packages] ********** ok: [node02] => (item={u'name': u'origin-node-3.10.0*'}) ok: [node02] => (item={u'name': u'origin-clients-3.10.0*'}) ok: [node02] => (item={u'name': u'conntrack-tools'}) TASK [openshift_node : Restart cri-o] ****************************************** skipping: [node02] TASK [openshift_node : restart NetworkManager to ensure resolv.conf is present] *** changed: [node02] TASK [openshift_node : sysctl] ************************************************* ok: [node02] TASK [openshift_node : Check for credentials file for registry auth] *********** skipping: [node02] TASK [openshift_node : Create credentials for registry auth] ******************* skipping: [node02] TASK [openshift_node : Create credentials for registry auth (alternative)] ***** skipping: [node02] TASK [openshift_node : Setup ro mount of /root/.docker for containerized hosts] *** skipping: [node02] TASK [openshift_node : Check that node image is present] *********************** changed: [node02] TASK [openshift_node : Pre-pull node image] ************************************ skipping: [node02] TASK [openshift_node : Copy node script to the node] *************************** ok: [node02] TASK [openshift_node : Install Node service file] ****************************** ok: [node02] TASK [openshift_node : Ensure old system path is set] ************************** skipping: [node02] => (item=/etc/origin/openvswitch) skipping: [node02] => (item=/var/lib/kubelet) skipping: [node02] => (item=/opt/cni/bin) TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Copy node container image to ostree storage] ************ skipping: [node02] TASK [openshift_node : Install or Update node system container] **************** skipping: [node02] TASK [openshift_node : Restart network manager to ensure networking configuration is in place] *** skipping: [node02] TASK [openshift_node : Configure Node settings] ******************************** ok: [node02] => (item={u'regex': u'^OPTIONS=', u'line': u'OPTIONS='}) ok: [node02] => (item={u'regex': u'^DEBUG_LOGLEVEL=', u'line': u'DEBUG_LOGLEVEL=2'}) ok: [node02] => (item={u'regex': u'^IMAGE_VERSION=', u'line': u'IMAGE_VERSION=v3.10.0-rc.0'}) TASK [openshift_node : Configure Proxy Settings] ******************************* skipping: [node02] => (item={u'regex': u'^HTTP_PROXY=', u'line': u'HTTP_PROXY='}) skipping: [node02] => (item={u'regex': u'^HTTPS_PROXY=', u'line': u'HTTPS_PROXY='}) skipping: [node02] => (item={u'regex': u'^NO_PROXY=', u'line': u'NO_PROXY=[],172.30.0.0/16,10.128.0.0/14'}) TASK [openshift_node : file] *************************************************** skipping: [node02] TASK [openshift_node : Create the Node config] ********************************* changed: [node02] TASK [openshift_node : Configure Node Environment Variables] ******************* TASK [openshift_node : Ensure the node static pod directory exists] ************ changed: [node02] TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Install NFS storage plugin dependencies] **************** ok: [node02] TASK [openshift_node : Check for existence of nfs sebooleans] ****************** ok: [node02] => (item=virt_use_nfs) ok: [node02] => (item=virt_sandbox_use_nfs) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 14:35:13.798539', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.008754', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 14:35:13.789785', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 14:35:15.321756', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.019364', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 14:35:15.302392', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 14:35:13.798539', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.008754', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 14:35:13.789785', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 14:35:15.321756', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.019364', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 14:35:15.302392', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install GlusterFS storage plugin dependencies] ********** ok: [node02] TASK [openshift_node : Check for existence of fusefs sebooleans] *************** ok: [node02] => (item=virt_use_fusefs) ok: [node02] => (item=virt_sandbox_use_fusefs) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 14:35:23.522798', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.008007', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 14:35:23.514791', '_ansible_ignore_errors': None, 'failed': False}) ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 14:35:25.034469', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.007143', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 14:35:25.027326', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 14:35:23.522798', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.008007', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 14:35:23.514791', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-07-30 14:35:25.034469', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.007143', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-07-30 14:35:25.027326', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install Ceph storage plugin dependencies] *************** ok: [node02] TASK [openshift_node : Install iSCSI storage plugin dependencies] ************** ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=device-mapper-multipath) TASK [openshift_node : restart services] *************************************** ok: [node02] => (item=multipathd) ok: [node02] => (item=rpcbind) ok: [node02] => (item=iscsid) TASK [openshift_node : Template multipath configuration] *********************** changed: [node02] TASK [openshift_node : Enable and start multipath] ***************************** changed: [node02] TASK [tuned : Check for tuned package] ***************************************** ok: [node02] TASK [tuned : Set tuned OpenShift variables] *********************************** ok: [node02] TASK [tuned : Ensure directory structure exists] ******************************* ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Ensure files are populated from templates] *********************** skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Make tuned use the recommended tuned profile on restart] ********* changed: [node02] => (item=/etc/tuned/active_profile) changed: [node02] => (item=/etc/tuned/profile_mode) TASK [tuned : Restart tuned service] ******************************************* changed: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] ******* ok: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] *** PLAY [node bootstrap config] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_node : install needed rpm(s)] ********************************** ok: [node02] => (item=origin-node) ok: [node02] => (item=origin-docker-excluder) ok: [node02] => (item=ansible) ok: [node02] => (item=bash-completion) ok: [node02] => (item=docker) ok: [node02] => (item=haproxy) ok: [node02] => (item=dnsmasq) ok: [node02] => (item=ntp) ok: [node02] => (item=logrotate) ok: [node02] => (item=httpd-tools) ok: [node02] => (item=bind-utils) ok: [node02] => (item=firewalld) ok: [node02] => (item=libselinux-python) ok: [node02] => (item=conntrack-tools) ok: [node02] => (item=openssl) ok: [node02] => (item=iproute) ok: [node02] => (item=python-dbus) ok: [node02] => (item=PyYAML) ok: [node02] => (item=yum-utils) ok: [node02] => (item=glusterfs-fuse) ok: [node02] => (item=device-mapper-multipath) ok: [node02] => (item=nfs-utils) ok: [node02] => (item=cockpit-ws) ok: [node02] => (item=cockpit-system) ok: [node02] => (item=cockpit-bridge) ok: [node02] => (item=cockpit-docker) ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=ceph-common) TASK [openshift_node : create the directory for node] ************************** skipping: [node02] TASK [openshift_node : laydown systemd override] ******************************* skipping: [node02] TASK [openshift_node : update the sysconfig to have necessary variables] ******* ok: [node02] => (item={u'regexp': u'^KUBECONFIG=.*', u'line': u'KUBECONFIG=/etc/origin/node/bootstrap.kubeconfig'}) TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : disable origin-node service] **************************** changed: [node02] => (item=origin-node.service) TASK [openshift_node : Check for RPM generated config marker file .config_managed] *** ok: [node02] TASK [openshift_node : create directories for bootstrapping] ******************* ok: [node02] => (item=/root/openshift_bootstrap) changed: [node02] => (item=/var/lib/origin/openshift.local.config) changed: [node02] => (item=/var/lib/origin/openshift.local.config/node) ok: [node02] => (item=/etc/docker/certs.d/docker-registry.default.svc:5000) TASK [openshift_node : laydown the bootstrap.yml file for on boot configuration] *** ok: [node02] TASK [openshift_node : Create a symlink to the node client CA for the docker registry] *** ok: [node02] TASK [openshift_node : Remove RPM generated config files if present] *********** skipping: [node02] => (item=master) skipping: [node02] => (item=.config_managed) TASK [openshift_node : find all files in /etc/origin/node so we can remove them] *** skipping: [node02] TASK [openshift_node : Remove everything except the resolv.conf required for node] *** skipping: [node02] TASK [openshift_node_group : create node config template] ********************** changed: [node02] TASK [openshift_node_group : remove existing node config] ********************** changed: [node02] TASK [openshift_node_group : Ensure required directories are present] ********** ok: [node02] => (item=/etc/origin/node/pods) changed: [node02] => (item=/etc/origin/node/certificates) TASK [openshift_node_group : Update the sysconfig to group "node-config-compute"] *** changed: [node02] TASK [set_fact] **************************************************************** ok: [node02] PLAY [Re-enable excluder if it was previously enabled] ************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/enable.yml for node02 TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] PLAY [Node Preparation Checkpoint End] ***************************************** TASK [Set Node preparation 'Complete'] ***************************************** ok: [node01] PLAY [Distribute bootstrap and start nodes] ************************************ TASK [openshift_node : Gather node information] ******************************** changed: [node02] ok: [node01] TASK [openshift_node : Copy master bootstrap config locally] ******************* ok: [node02] TASK [openshift_node : Distribute bootstrap kubeconfig if one does not exist] *** ok: [node01] changed: [node02] TASK [openshift_node : Start and enable node for bootstrapping] **************** changed: [node02] changed: [node01] TASK [openshift_node : Get node logs] ****************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : debug] ************************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : fail] *************************************************** skipping: [node02] skipping: [node01] PLAY [Approve any pending CSR requests from inventory nodes] ******************* TASK [Dump all candidate bootstrap hostnames] ********************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Find all hostnames for bootstrapping] ************************************ ok: [node01] TASK [Dump the bootstrap hostnames] ******************************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Approve bootstrap nodes] ************************************************* changed: [node01] TASK [Get CSRs] **************************************************************** skipping: [node01] TASK [Report approval errors] ************************************************** skipping: [node01] PLAY [Ensure any inventory labels are applied to the nodes] ******************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_manage_node : Wait for master API to become available before proceeding] *** skipping: [node02] TASK [openshift_manage_node : Wait for Node Registration] ********************** ok: [node01 -> node01] ok: [node02 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/config.yml for node02, node01 TASK [openshift_manage_node : Set node schedulability] ************************* ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/set_default_node_role.yml for node02, node01 TASK [openshift_manage_node : Retrieve nodes that are marked with the infra selector or the legacy infra selector] *** ok: [node02 -> node01] TASK [openshift_manage_node : Label infra or legacy infra nodes with the new role label] *** TASK [openshift_manage_node : Retrieve non-infra, non-master nodes that are not yet labeled compute] *** ok: [node02 -> node01] TASK [openshift_manage_node : label non-master non-infra nodes compute] ******** TASK [openshift_manage_node : Label all-in-one master as a compute node] ******* skipping: [node02] PLAY RECAP ********************************************************************* localhost : ok=30 changed=0 unreachable=0 failed=0 node01 : ok=71 changed=3 unreachable=0 failed=0 node02 : ok=155 changed=33 unreachable=0 failed=0 INSTALLER STATUS *************************************************************** Initialization : Complete (0:03:58) Node Preparation : Complete (0:05:17) Sending file modes: C0755 110489328 oc Sending file modes: C0600 5649 admin.kubeconfig Cluster "node01:8443" set. Cluster "node01:8443" set. + set +e + kubectl get nodes --no-headers + cluster/kubectl.sh get nodes --no-headers node01 Ready compute,infra,master 22d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + kubectl_rc=0 + '[' 0 -ne 0 ']' ++ kubectl get nodes --no-headers ++ cluster/kubectl.sh get nodes --no-headers ++ grep NotReady + '[' -n '' ']' + set -e + echo 'Nodes are ready:' Nodes are ready: + kubectl get nodes + cluster/kubectl.sh get nodes NAME STATUS ROLES AGE VERSION node01 Ready compute,infra,master 22d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + make cluster-sync ./cluster/build.sh Building ... sha256:dcf2b21fa2ed11dcf9dbba21b1cca0ee3fad521a0e9aee61c06d0b0b66a4b200 go version go1.10 linux/amd64 go version go1.10 linux/amd64 make[1]: Entering directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' hack/dockerized "./hack/check.sh && KUBEVIRT_VERSION= ./hack/build-go.sh install " && ./hack/build-copy-artifacts.sh sha256:dcf2b21fa2ed11dcf9dbba21b1cca0ee3fad521a0e9aee61c06d0b0b66a4b200 go version go1.10 linux/amd64 go version go1.10 linux/amd64 find: '/root/go/src/kubevirt.io/kubevirt/_out/cmd': No such file or directory Compiling tests... compiled tests.test hack/build-docker.sh build Sending build context to Docker daemon 40.39 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-controller ---> Using cache ---> b00c84523b53 Step 4/8 : WORKDIR /home/virt-controller ---> Using cache ---> b76b8bd8cd39 Step 5/8 : USER 1001 ---> Using cache ---> b6d9ad9ed232 Step 6/8 : COPY virt-controller /usr/bin/virt-controller ---> Using cache ---> 0ea00864fd81 Step 7/8 : ENTRYPOINT /usr/bin/virt-controller ---> Using cache ---> f07dce66f60b Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-controller" '' ---> Running in d485df7ae867 ---> 8b441387a56b Removing intermediate container d485df7ae867 Successfully built 8b441387a56b Sending build context to Docker daemon 43.32 MB Step 1/9 : FROM kubevirt/libvirt:4.2.0 ---> 5f0bfe81a3e0 Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 945996802736 Step 3/9 : RUN dnf -y install socat genisoimage && dnf -y clean all && test $(id -u qemu) = 107 # make sure that the qemu user really is 107 ---> Using cache ---> 1dcd22d08d0e Step 4/9 : COPY virt-launcher /usr/bin/virt-launcher ---> Using cache ---> 69903f0d132c Step 5/9 : RUN setcap CAP_NET_BIND_SERVICE=+eip /usr/bin/qemu-system-x86_64 ---> Using cache ---> 50c73161648f Step 6/9 : RUN mkdir -p /usr/share/kubevirt/virt-launcher ---> Using cache ---> 1b1ce09bc3c8 Step 7/9 : COPY sock-connector /usr/share/kubevirt/virt-launcher/ ---> Using cache ---> 451cbbe9e65c Step 8/9 : ENTRYPOINT /usr/bin/virt-launcher ---> Using cache ---> 821627496907 Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-launcher" '' ---> Running in 3b9804107577 ---> 7acf9196d409 Removing intermediate container 3b9804107577 Successfully built 7acf9196d409 Sending build context to Docker daemon 41.69 MB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/5 : COPY virt-handler /usr/bin/virt-handler ---> Using cache ---> 4f89e3d59178 Step 4/5 : ENTRYPOINT /usr/bin/virt-handler ---> Using cache ---> a5d0cb1b8900 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-handler" '' ---> Running in 7f5e1a43dae3 ---> e149bd17f143 Removing intermediate container 7f5e1a43dae3 Successfully built e149bd17f143 Sending build context to Docker daemon 38.81 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-api ---> Using cache ---> ed1ebf600ee1 Step 4/8 : WORKDIR /home/virt-api ---> Using cache ---> 0769dad023e5 Step 5/8 : USER 1001 ---> Using cache ---> 0cb65afb0c2b Step 6/8 : COPY virt-api /usr/bin/virt-api ---> Using cache ---> 5a2835457266 Step 7/8 : ENTRYPOINT /usr/bin/virt-api ---> Using cache ---> f949a9d261c5 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-api" '' ---> Running in 291bcf5aa0b5 ---> 1e25dad25314 Removing intermediate container 291bcf5aa0b5 Successfully built 1e25dad25314 Sending build context to Docker daemon 4.096 kB Step 1/7 : FROM fedora:28 ---> cc510acfcd70 Step 2/7 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/7 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/7 : RUN mkdir -p /images/custom /images/alpine && truncate -s 64M /images/custom/disk.img && curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /images/alpine/disk.img ---> Using cache ---> 02134835a6aa Step 5/7 : ADD entrypoint.sh / ---> Using cache ---> ec0843818da7 Step 6/7 : CMD /entrypoint.sh ---> Using cache ---> 754029bb4bd2 Step 7/7 : LABEL "disks-images-provider" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> 6327b8256318 Successfully built 6327b8256318 Sending build context to Docker daemon 2.56 kB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/5 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/5 : RUN dnf -y install procps-ng nmap-ncat && dnf -y clean all ---> Using cache ---> 207487abe7b2 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "vm-killer" '' ---> Using cache ---> 27cf5472530f Successfully built 27cf5472530f Sending build context to Docker daemon 5.12 kB Step 1/7 : FROM debian:sid ---> 68f33cf86aab Step 2/7 : MAINTAINER "David Vossel" \ ---> Using cache ---> 5734d749eb5c Step 3/7 : ENV container docker ---> Using cache ---> f8775a77966f Step 4/7 : RUN apt-get update && apt-get install -y bash curl bzip2 qemu-utils && mkdir -p /disk && rm -rf /var/lib/apt/lists/* ---> Using cache ---> 1a40cf222a61 Step 5/7 : ADD entry-point.sh / ---> Using cache ---> 77b545d92fe7 Step 6/7 : CMD /entry-point.sh ---> Using cache ---> dfe20d463305 Step 7/7 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "registry-disk-v1alpha" '' ---> Using cache ---> 5efdf368e732 Successfully built 5efdf368e732 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33187/kubevirt/registry-disk-v1alpha:devel ---> 5efdf368e732 Step 2/4 : MAINTAINER "David Vossel" \ ---> Using cache ---> 386f7e924456 Step 3/4 : RUN curl https://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img > /disk/cirros.img ---> Using cache ---> f473a86e4d6a Step 4/4 : LABEL "cirros-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> a4ca4c67d45c Successfully built a4ca4c67d45c Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33187/kubevirt/registry-disk-v1alpha:devel ---> 5efdf368e732 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 369bca39dcc2 Step 3/4 : RUN curl -g -L https://download.fedoraproject.org/pub/fedora/linux/releases/27/CloudImages/x86_64/images/Fedora-Cloud-Base-27-1.6.x86_64.qcow2 > /disk/fedora.qcow2 ---> Using cache ---> de1e81f43a59 Step 4/4 : LABEL "fedora-cloud-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> a5867eac6e05 Successfully built a5867eac6e05 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33187/kubevirt/registry-disk-v1alpha:devel ---> 5efdf368e732 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 369bca39dcc2 Step 3/4 : RUN curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /disk/alpine.iso ---> Using cache ---> 1083d820f9c8 Step 4/4 : LABEL "alpine-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> 11512d828b9c Successfully built 11512d828b9c Sending build context to Docker daemon 35.59 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virtctl ---> Using cache ---> 985fe391c056 Step 4/8 : WORKDIR /home/virtctl ---> Using cache ---> 3b2cae8ac543 Step 5/8 : USER 1001 ---> Using cache ---> 0c06e5b4a900 Step 6/8 : COPY subresource-access-test /subresource-access-test ---> Using cache ---> 1ff475511c79 Step 7/8 : ENTRYPOINT /subresource-access-test ---> Using cache ---> 5d72fd9a68d9 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "subresource-access-test" '' ---> Running in 5d048fb33d95 ---> 2a7fb8ff0785 Removing intermediate container 5d048fb33d95 Successfully built 2a7fb8ff0785 Sending build context to Docker daemon 3.072 kB Step 1/9 : FROM fedora:28 ---> cc510acfcd70 Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/9 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/9 : RUN dnf -y install make git gcc && dnf -y clean all ---> Using cache ---> d3456b1644b1 Step 5/9 : ENV GIMME_GO_VERSION 1.9.2 ---> Using cache ---> 0ba81fddbba1 Step 6/9 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 5d33abe3f819 Step 7/9 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 783826523be1 Step 8/9 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/masterzen/winrm-cli ---> Using cache ---> 711bc8d15952 Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "winrmcli" '' ---> Using cache ---> fe40426b785b Successfully built fe40426b785b Sending build context to Docker daemon 36.8 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> e3238544ad97 Step 3/5 : COPY example-hook-sidecar /example-hook-sidecar ---> Using cache ---> 70e6426eddc5 Step 4/5 : ENTRYPOINT /example-hook-sidecar ---> Using cache ---> 4fb262a4f5b4 Step 5/5 : LABEL "example-hook-sidecar" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Running in 5ccb5b0e0bd5 ---> 64f6014bb90a Removing intermediate container 5ccb5b0e0bd5 Successfully built 64f6014bb90a hack/build-docker.sh push The push refers to a repository [localhost:33187/kubevirt/virt-controller] 262257e8949b: Preparing aa89340cf7a8: Preparing 891e1e4ef82a: Preparing aa89340cf7a8: Pushed 262257e8949b: Pushed 891e1e4ef82a: Pushed devel: digest: sha256:6b89e2bb50407ad93c74e179c46536affb3f9c03ee757a095d09aef8703a1899 size: 949 The push refers to a repository [localhost:33187/kubevirt/virt-launcher] 9453a84b3e04: Preparing a72ab3cfd46a: Preparing 08b8880e15dd: Preparing d1dbd1d0afad: Preparing af293cb2890d: Preparing da38cf808aa5: Preparing b83399358a92: Preparing 186d8b3e4fd8: Preparing fa6154170bf5: Preparing 5eefb9960a36: Preparing 891e1e4ef82a: Preparing b83399358a92: Waiting da38cf808aa5: Waiting fa6154170bf5: Waiting 891e1e4ef82a: Waiting a72ab3cfd46a: Pushed 9453a84b3e04: Pushed da38cf808aa5: Pushed b83399358a92: Pushed 186d8b3e4fd8: Pushed fa6154170bf5: Pushed 891e1e4ef82a: Mounted from kubevirt/virt-controller 08b8880e15dd: Pushed af293cb2890d: Pushed d1dbd1d0afad: Pushed 5eefb9960a36: Pushed devel: digest: sha256:85a1b25cda9cbeb2b442304f420b30565438013e7099c5bc1267979b6c623316 size: 2620 The push refers to a repository [localhost:33187/kubevirt/virt-handler] 65ebe4d3cc49: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-launcher 65ebe4d3cc49: Pushed devel: digest: sha256:67547b1056346914759973801cc8746d09c15c1cd036a823d9fb2dc90d413789 size: 741 The push refers to a repository [localhost:33187/kubevirt/virt-api] 134a6db7130c: Preparing 82fc744c99b4: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-handler 82fc744c99b4: Pushed 134a6db7130c: Pushed devel: digest: sha256:65249c63ea3b3c72f8080f1a2e69625607ff4b43cb0bac3db8ac845d0fc75f8b size: 948 The push refers to a repository [localhost:33187/kubevirt/disks-images-provider] 71ad31feb2c5: Preparing 21d4b721776e: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-api 71ad31feb2c5: Pushed 21d4b721776e: Pushed devel: digest: sha256:5dc088106df85eb01f2ad0566624239b95b34986820107944e36d309183fd4cd size: 948 The push refers to a repository [localhost:33187/kubevirt/vm-killer] c4cfadeeaf5f: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/disks-images-provider c4cfadeeaf5f: Pushed devel: digest: sha256:39b817b79b1fbce75dbb476bc261b2752fd6466bf98d373208d5144579da22b0 size: 740 The push refers to a repository [localhost:33187/kubevirt/registry-disk-v1alpha] 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 661cce8d8e52: Pushed 41e0baba3077: Pushed 25edbec0eaea: Pushed devel: digest: sha256:0df707a55243af8792380fba68a76307017494c503e0e9071ed55d7d3c3611d4 size: 948 The push refers to a repository [localhost:33187/kubevirt/cirros-registry-disk-demo] f9f97de3966a: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 41e0baba3077: Mounted from kubevirt/registry-disk-v1alpha 25edbec0eaea: Mounted from kubevirt/registry-disk-v1alpha 661cce8d8e52: Mounted from kubevirt/registry-disk-v1alpha f9f97de3966a: Pushed devel: digest: sha256:3f818f67105a36bdc42bdbfad87fc29d0028e39a0dceef92d12efbcf8e16e5ed size: 1160 The push refers to a repository [localhost:33187/kubevirt/fedora-cloud-registry-disk-demo] 24cdf3b545f2: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 41e0baba3077: Mounted from kubevirt/cirros-registry-disk-demo 25edbec0eaea: Mounted from kubevirt/cirros-registry-disk-demo 661cce8d8e52: Mounted from kubevirt/cirros-registry-disk-demo 24cdf3b545f2: Pushed devel: digest: sha256:a6a571626690141c7da4cf0e1eb4fd75e5dd9ae427d5070c2729214cfbd6a192 size: 1161 The push refers to a repository [localhost:33187/kubevirt/alpine-registry-disk-demo] d8e356e905f4: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 41e0baba3077: Mounted from kubevirt/fedora-cloud-registry-disk-demo 661cce8d8e52: Mounted from kubevirt/fedora-cloud-registry-disk-demo 25edbec0eaea: Mounted from kubevirt/fedora-cloud-registry-disk-demo d8e356e905f4: Pushed devel: digest: sha256:c27568048aa8e031860d98cdced0370763745ad80581e62432568dac45abf1fb size: 1160 The push refers to a repository [localhost:33187/kubevirt/subresource-access-test] b748933d2902: Preparing 25cb73590a9d: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/vm-killer 25cb73590a9d: Pushed b748933d2902: Pushed devel: digest: sha256:2f4f211fcd6f6fadf756e8f8697b033ba844d155a8bbdabad00dd46a8bc188c3 size: 948 The push refers to a repository [localhost:33187/kubevirt/winrmcli] f8083e002d0b: Preparing 53c709abc882: Preparing 9ca98a0f492b: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/subresource-access-test f8083e002d0b: Pushed 9ca98a0f492b: Pushed 53c709abc882: Pushed devel: digest: sha256:4fe6c9666a841b61b962d7fb73ccb7cb0dabc3b56e1657cfdfd9005e1a36d38c size: 1165 The push refers to a repository [localhost:33187/kubevirt/example-hook-sidecar] 332f5e49c707: Preparing 39bae602f753: Preparing 332f5e49c707: Pushed 39bae602f753: Pushed devel: digest: sha256:bb0956aad7b06812fcc8f3b8f8a7f78f3ffafd48122dfbfe6ddb11db9b54fedb size: 740 make[1]: Leaving directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' Done ./cluster/clean.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release1 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release1 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-165-g5c37f0a ++ KUBEVIRT_VERSION=v0.7.0-165-g5c37f0a + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33187/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Cleaning up ...' Cleaning up ... + cluster/kubectl.sh get vmis --all-namespaces -o=custom-columns=NAME:.metadata.name,NAMESPACE:.metadata.namespace,FINALIZERS:.metadata.finalizers --no-headers + grep foregroundDeleteVirtualMachine + read p error: the server doesn't have a resource type "vmis" + _kubectl delete ds -l kubevirt.io -n kube-system --cascade=false --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=libvirt --force --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=virt-handler --force --grace-period 0 No resources found + namespaces=(default ${namespace}) + for i in '${namespaces[@]}' + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete deployment -l kubevirt.io No resources found + _kubectl -n default delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rs -l kubevirt.io No resources found + _kubectl -n default delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete services -l kubevirt.io No resources found + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n default delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete secrets -l kubevirt.io No resources found + _kubectl -n default delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pv -l kubevirt.io No resources found + _kubectl -n default delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pvc -l kubevirt.io No resources found + _kubectl -n default delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete ds -l kubevirt.io No resources found + _kubectl -n default delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n default delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pods -l kubevirt.io No resources found + _kubectl -n default delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n default delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rolebinding -l kubevirt.io No resources found + _kubectl -n default delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete roles -l kubevirt.io No resources found + _kubectl -n default delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterroles -l kubevirt.io No resources found + _kubectl -n default delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n default get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n default get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + for i in '${namespaces[@]}' + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete deployment -l kubevirt.io No resources found + _kubectl -n kube-system delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rs -l kubevirt.io No resources found + _kubectl -n kube-system delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete services -l kubevirt.io No resources found + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n kube-system delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete secrets -l kubevirt.io No resources found + _kubectl -n kube-system delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pv -l kubevirt.io No resources found + _kubectl -n kube-system delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pvc -l kubevirt.io No resources found + _kubectl -n kube-system delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete ds -l kubevirt.io No resources found + _kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n kube-system delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pods -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete roles -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterroles -l kubevirt.io No resources found + _kubectl -n kube-system delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + sleep 2 + echo Done Done ./cluster/deploy.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release1 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release1 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-165-g5c37f0a ++ KUBEVIRT_VERSION=v0.7.0-165-g5c37f0a + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33187/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Deploying ...' Deploying ... + [[ -z openshift-3.10-release ]] + [[ openshift-3.10-release =~ .*-dev ]] + [[ openshift-3.10-release =~ .*-release ]] + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/demo-content.yaml =~ .*demo.* ]] + continue + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml =~ .*demo.* ]] + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml clusterrole.rbac.authorization.k8s.io "kubevirt.io:admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:edit" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:view" created serviceaccount "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver-auth-delegator" created rolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created role.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-controller" created serviceaccount "kubevirt-controller" created serviceaccount "kubevirt-privileged" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller-cluster-admin" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-privileged-cluster-admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:default" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt.io:default" created service "virt-api" created deployment.extensions "virt-api" created deployment.extensions "virt-controller" created daemonset.extensions "virt-handler" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstances.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancereplicasets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancepresets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachines.kubevirt.io" created + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R persistentvolumeclaim "disk-alpine" created persistentvolume "host-path-disk-alpine" created persistentvolumeclaim "disk-custom" created persistentvolume "host-path-disk-custom" created daemonset.extensions "disks-images-provider" created serviceaccount "kubevirt-testing" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-testing-cluster-admin" created + [[ os-3.10.0 =~ os-* ]] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-controller"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-testing"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-privileged"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-apiserver"] + _kubectl adm policy add-scc-to-user privileged admin + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged admin scc "privileged" added to: ["admin"] + echo Done Done + namespaces=(kube-system default) + [[ kube-system != \k\u\b\e\-\s\y\s\t\e\m ]] + timeout=300 + sample=30 + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n 'disks-images-provider-ctcjh 0/1 ContainerCreating 0 4s disks-images-provider-dqv4m 0/1 ContainerCreating 0 4s virt-api-7d79764579-8n6wn 0/1 ContainerCreating 0 6s virt-api-7d79764579-g72bg 0/1 ContainerCreating 0 6s virt-controller-7d57d96b65-g5k4q 0/1 ContainerCreating 0 6s virt-controller-7d57d96b65-vrqn2 0/1 ContainerCreating 0 6s virt-handler-c8dmh 0/1 ContainerCreating 0 7s virt-handler-rzrl5 0/1 ContainerCreating 0 6s' ']' + echo 'Waiting for kubevirt pods to enter the Running state ...' Waiting for kubevirt pods to enter the Running state ... + kubectl get pods -n kube-system --no-headers + grep -v Running + cluster/kubectl.sh get pods -n kube-system --no-headers disks-images-provider-ctcjh 0/1 ContainerCreating 0 5s disks-images-provider-dqv4m 0/1 ContainerCreating 0 5s virt-api-7d79764579-8n6wn 0/1 ContainerCreating 0 7s virt-api-7d79764579-g72bg 0/1 ContainerCreating 0 7s virt-controller-7d57d96b65-g5k4q 0/1 ContainerCreating 0 7s virt-controller-7d57d96b65-vrqn2 0/1 ContainerCreating 0 7s virt-handler-c8dmh 0/1 ContainerCreating 0 8s virt-handler-rzrl5 0/1 ContainerCreating 0 7s + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n false ']' + echo 'Waiting for KubeVirt containers to become ready ...' Waiting for KubeVirt containers to become ready ... + kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + grep false + cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers false + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n kube-system + cluster/kubectl.sh get pods -n kube-system NAME READY STATUS RESTARTS AGE disks-images-provider-ctcjh 1/1 Running 0 1m disks-images-provider-dqv4m 1/1 Running 0 1m master-api-node01 1/1 Running 1 22d master-controllers-node01 1/1 Running 2 22d master-etcd-node01 1/1 Running 1 22d virt-api-7d79764579-8n6wn 1/1 Running 1 1m virt-api-7d79764579-g72bg 1/1 Running 0 1m virt-controller-7d57d96b65-g5k4q 1/1 Running 0 1m virt-controller-7d57d96b65-vrqn2 1/1 Running 0 1m virt-handler-c8dmh 1/1 Running 0 1m virt-handler-rzrl5 1/1 Running 0 1m + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n default --no-headers ++ cluster/kubectl.sh get pods -n default --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ cluster/kubectl.sh get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false + '[' -n '' ']' + kubectl get pods -n default + cluster/kubectl.sh get pods -n default NAME READY STATUS RESTARTS AGE docker-registry-1-rl562 1/1 Running 1 22d registry-console-1-rw9zf 1/1 Running 1 22d router-1-6cch9 1/1 Running 1 22d + kubectl version + cluster/kubectl.sh version oc v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 features: Basic-Auth GSSAPI Kerberos SPNEGO Server https://127.0.0.1:33184 openshift v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 + ginko_params='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + [[ openshift-3.10-release =~ windows.* ]] + FUNC_TEST_ARGS='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + make functest hack/dockerized "hack/build-func-tests.sh" sha256:dcf2b21fa2ed11dcf9dbba21b1cca0ee3fad521a0e9aee61c06d0b0b66a4b200 go version go1.10 linux/amd64 go version go1.10 linux/amd64 Compiling tests... compiled tests.test hack/functests.sh Running Suite: Tests Suite ========================== Random Seed: 1532962560 Will run 151 of 151 specs ••• ------------------------------ • [SLOW TEST:6.370 seconds] Templates /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:42 Launching VMI from VM Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:60 with given Fedora Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:193 with given VM JSON from the Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:152 with given VM from the VM JSON /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:158 with given VMI from the VM /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:163 should succeed to terminate the VMI using oc-patch command /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:166 ------------------------------ 2018/07/30 10:57:12 read closing down: EOF 2018/07/30 10:58:06 read closing down: EOF Pod name: disks-images-provider-ctcjh Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-dqv4m Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8n6wn Pod phase: Running 2018/07/30 14:56:39 http: TLS handshake error from 10.129.0.1:54618: EOF level=info timestamp=2018-07-30T14:56:40.879082Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" 2018/07/30 14:56:49 http: TLS handshake error from 10.129.0.1:54630: EOF level=info timestamp=2018-07-30T14:56:58.883293Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 14:56:59 http: TLS handshake error from 10.129.0.1:54642: EOF 2018/07/30 14:57:09 http: TLS handshake error from 10.129.0.1:54654: EOF 2018/07/30 14:57:19 http: TLS handshake error from 10.129.0.1:54664: EOF level=info timestamp=2018-07-30T14:57:28.199584Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T14:57:29.240852Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 14:57:29 http: TLS handshake error from 10.129.0.1:54674: EOF level=info timestamp=2018-07-30T14:57:35.397695Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" 2018/07/30 14:57:39 http: TLS handshake error from 10.129.0.1:54698: EOF 2018/07/30 14:57:49 http: TLS handshake error from 10.129.0.1:54708: EOF level=info timestamp=2018-07-30T14:57:59.410537Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 14:57:59 http: TLS handshake error from 10.129.0.1:54718: EOF Pod name: virt-api-7d79764579-g72bg Pod phase: Running level=info timestamp=2018-07-30T14:57:16.400464Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T14:57:16.540659Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 14:57:22 http: TLS handshake error from 10.128.0.1:45448: EOF level=info timestamp=2018-07-30T14:57:26.776209Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T14:57:27.717600Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 14:57:32 http: TLS handshake error from 10.128.0.1:45506: EOF level=info timestamp=2018-07-30T14:57:37.354205Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T14:57:40.814509Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 14:57:42 http: TLS handshake error from 10.128.0.1:45558: EOF level=info timestamp=2018-07-30T14:57:45.007156Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T14:57:47.237108Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T14:57:47.566429Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 14:57:52 http: TLS handshake error from 10.128.0.1:45602: EOF level=info timestamp=2018-07-30T14:57:57.770006Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 14:58:02 http: TLS handshake error from 10.128.0.1:45652: EOF Pod name: virt-controller-7d57d96b65-g5k4q Pod phase: Running level=info timestamp=2018-07-30T14:54:39.655703Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-vrqn2 Pod phase: Running level=info timestamp=2018-07-30T14:56:18.892978Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance default/testvm" level=info timestamp=2018-07-30T14:56:18.907518Z pos=vm.go:135 component=virt-controller service=http namespace=default name=testvm kind= uid=b53c7a47-9408-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T14:56:18.907637Z pos=vm.go:186 component=virt-controller service=http namespace=default name=testvm kind= uid=b53c7a47-9408-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-30T14:56:18.907671Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-30T14:56:18.912922Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/default/testvm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b631a2a6-9408-11e8-82a2-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance default/testvm" level=info timestamp=2018-07-30T14:56:18.935486Z pos=vm.go:135 component=virt-controller service=http namespace=default name=testvm kind= uid=b53c7a47-9408-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T14:56:18.935595Z pos=vm.go:186 component=virt-controller service=http namespace=default name=testvm kind= uid=b53c7a47-9408-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-30T14:56:18.935631Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-30T14:56:20.824478Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T14:56:20.825030Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T14:56:20.931110Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibkmrq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibkmrq" level=info timestamp=2018-07-30T14:57:12.329993Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T14:57:12.335038Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T14:57:12.616050Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi84mpb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi84mpb" level=info timestamp=2018-07-30T14:57:12.642409Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi84mpb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi84mpb" Pod name: virt-handler-c8dmh Pod phase: Running level=info timestamp=2018-07-30T14:57:33.156061Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmi84mpb kind=Domain uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T14:57:33.159750Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T14:57:33.159919Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi84mpb, existing: true\n" level=info timestamp=2018-07-30T14:57:33.159967Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-30T14:57:33.159993Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T14:57:33.160015Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T14:57:33.160085Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T14:57:33.238035Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T14:57:34.572249Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T14:57:34.572462Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi84mpb, existing: true\n" level=info timestamp=2018-07-30T14:57:34.572540Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-30T14:57:34.572607Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T14:57:34.572650Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T14:57:34.572905Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T14:57:34.617679Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-rzrl5 Pod phase: Running level=info timestamp=2018-07-30T14:54:43.526335Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-30T14:54:43.533614Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-30T14:54:43.597880Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-30T14:54:43.637909Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-30T14:54:43.676431Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-07-30T14:54:43.693120Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmi84mpb-wsbp4 Pod phase: Running level=info timestamp=2018-07-30T14:57:32.815838Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T14:57:32.827316Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:57:33.085265Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T14:57:33.124284Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:57:33.140274Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T14:57:33.140772Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:57:33.140973Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T14:57:33.146134Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T14:57:33.161918Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:57:33.259142Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:57:33.823379Z pos=monitor.go:222 component=virt-launcher msg="Found PID for e9c5d4b6-d279-4afe-a999-db7ce68f7c5c: 168" level=info timestamp=2018-07-30T14:57:34.575653Z pos=converter.go:535 component=virt-launcher msg="The network interface type of default was changed to e1000 due to unsupported interface type by qemu slirp network" level=info timestamp=2018-07-30T14:57:34.576599Z pos=converter.go:751 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-30T14:57:34.576698Z pos=converter.go:752 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-30T14:57:34.593205Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmibkmrq-lb4qv Pod phase: Running level=info timestamp=2018-07-30T14:56:39.002613Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T14:56:39.207474Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:56:39.465289Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T14:56:39.500036Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:56:39.519193Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T14:56:39.520195Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:56:39.520353Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T14:56:39.531444Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T14:56:39.542400Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:56:39.551879Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:56:39.660245Z pos=converter.go:535 component=virt-launcher msg="The network interface type of default was changed to e1000 due to unsupported interface type by qemu slirp network" level=info timestamp=2018-07-30T14:56:39.664454Z pos=converter.go:751 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-30T14:56:39.664603Z pos=converter.go:752 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-30T14:56:39.669924Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T14:56:40.008655Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 4d4ff3b4-193a-4e53-90a6-06656dfd2914: 163" • Failure [107.776 seconds] Slirp /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:39 should be able to /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 VirtualMachineInstance with slirp interface [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: : { Err: { s: "command terminated with exit code 126", }, Code: 126, } command terminated with exit code 126 not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:88 ------------------------------ level=info timestamp=2018-07-30T14:56:21.563042Z pos=utils.go:256 component=tests namespace=kubevirt-test-default name=testvmibkmrq kind=VirtualMachineInstance uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmibkmrq-lb4qv" level=info timestamp=2018-07-30T14:56:38.404251Z pos=utils.go:256 component=tests namespace=kubevirt-test-default name=testvmibkmrq kind=VirtualMachineInstance uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmibkmrq-lb4qv" level=info timestamp=2018-07-30T14:56:40.228754Z pos=utils.go:256 component=tests namespace=kubevirt-test-default name=testvmibkmrq kind=VirtualMachineInstance uid=b82a155b-9408-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T14:56:40.699740Z pos=utils.go:256 component=tests namespace=kubevirt-test-default name=testvmibkmrq kind=VirtualMachineInstance uid=b82a155b-9408-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." level=info timestamp=2018-07-30T14:57:13.170314Z pos=utils.go:256 component=tests namespace=kubevirt-test-default name=testvmi84mpb kind=VirtualMachineInstance uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi84mpb-wsbp4" level=info timestamp=2018-07-30T14:57:32.521910Z pos=utils.go:256 component=tests namespace=kubevirt-test-default name=testvmi84mpb kind=VirtualMachineInstance uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi84mpb-wsbp4" level=info timestamp=2018-07-30T14:57:35.182554Z pos=utils.go:256 component=tests namespace=kubevirt-test-default name=testvmi84mpb kind=VirtualMachineInstance uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T14:57:35.331884Z pos=utils.go:256 component=tests namespace=kubevirt-test-default name=testvmi84mpb kind=VirtualMachineInstance uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." STEP: have containerPort in the pod manifest STEP: start the virtual machine with slirp interface level=info timestamp=2018-07-30T14:58:07.086552Z pos=vmi_slirp_interface_test.go:87 component=tests msg= Pod name: disks-images-provider-ctcjh Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-dqv4m Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8n6wn Pod phase: Running 2018/07/30 14:56:39 http: TLS handshake error from 10.129.0.1:54618: EOF level=info timestamp=2018-07-30T14:56:40.879082Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" 2018/07/30 14:56:49 http: TLS handshake error from 10.129.0.1:54630: EOF level=info timestamp=2018-07-30T14:56:58.883293Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 14:56:59 http: TLS handshake error from 10.129.0.1:54642: EOF 2018/07/30 14:57:09 http: TLS handshake error from 10.129.0.1:54654: EOF 2018/07/30 14:57:19 http: TLS handshake error from 10.129.0.1:54664: EOF level=info timestamp=2018-07-30T14:57:28.199584Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T14:57:29.240852Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 14:57:29 http: TLS handshake error from 10.129.0.1:54674: EOF level=info timestamp=2018-07-30T14:57:35.397695Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" 2018/07/30 14:57:39 http: TLS handshake error from 10.129.0.1:54698: EOF 2018/07/30 14:57:49 http: TLS handshake error from 10.129.0.1:54708: EOF level=info timestamp=2018-07-30T14:57:59.410537Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 14:57:59 http: TLS handshake error from 10.129.0.1:54718: EOF Pod name: virt-api-7d79764579-g72bg Pod phase: Running level=info timestamp=2018-07-30T14:57:16.540659Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 14:57:22 http: TLS handshake error from 10.128.0.1:45448: EOF level=info timestamp=2018-07-30T14:57:26.776209Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T14:57:27.717600Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 14:57:32 http: TLS handshake error from 10.128.0.1:45506: EOF level=info timestamp=2018-07-30T14:57:37.354205Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T14:57:40.814509Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 14:57:42 http: TLS handshake error from 10.128.0.1:45558: EOF level=info timestamp=2018-07-30T14:57:45.007156Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T14:57:47.237108Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T14:57:47.566429Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 14:57:52 http: TLS handshake error from 10.128.0.1:45602: EOF level=info timestamp=2018-07-30T14:57:57.770006Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 14:58:02 http: TLS handshake error from 10.128.0.1:45652: EOF level=info timestamp=2018-07-30T14:58:07.897098Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-g5k4q Pod phase: Running level=info timestamp=2018-07-30T14:54:39.655703Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-vrqn2 Pod phase: Running level=info timestamp=2018-07-30T14:56:18.892978Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance default/testvm" level=info timestamp=2018-07-30T14:56:18.907518Z pos=vm.go:135 component=virt-controller service=http namespace=default name=testvm kind= uid=b53c7a47-9408-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T14:56:18.907637Z pos=vm.go:186 component=virt-controller service=http namespace=default name=testvm kind= uid=b53c7a47-9408-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-30T14:56:18.907671Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-30T14:56:18.912922Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/default/testvm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b631a2a6-9408-11e8-82a2-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance default/testvm" level=info timestamp=2018-07-30T14:56:18.935486Z pos=vm.go:135 component=virt-controller service=http namespace=default name=testvm kind= uid=b53c7a47-9408-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T14:56:18.935595Z pos=vm.go:186 component=virt-controller service=http namespace=default name=testvm kind= uid=b53c7a47-9408-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-30T14:56:18.935631Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-30T14:56:20.824478Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T14:56:20.825030Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T14:56:20.931110Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibkmrq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibkmrq" level=info timestamp=2018-07-30T14:57:12.329993Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T14:57:12.335038Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T14:57:12.616050Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi84mpb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi84mpb" level=info timestamp=2018-07-30T14:57:12.642409Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi84mpb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi84mpb" Pod name: virt-handler-c8dmh Pod phase: Running level=info timestamp=2018-07-30T14:57:33.156061Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmi84mpb kind=Domain uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T14:57:33.159750Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T14:57:33.159919Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi84mpb, existing: true\n" level=info timestamp=2018-07-30T14:57:33.159967Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-30T14:57:33.159993Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T14:57:33.160015Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T14:57:33.160085Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T14:57:33.238035Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T14:57:34.572249Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T14:57:34.572462Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi84mpb, existing: true\n" level=info timestamp=2018-07-30T14:57:34.572540Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-30T14:57:34.572607Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T14:57:34.572650Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T14:57:34.572905Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T14:57:34.617679Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-rzrl5 Pod phase: Running level=info timestamp=2018-07-30T14:54:43.526335Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-30T14:54:43.533614Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-30T14:54:43.597880Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-30T14:54:43.637909Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-30T14:54:43.676431Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-07-30T14:54:43.693120Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: virt-launcher-testvmi84mpb-wsbp4 Pod phase: Running level=info timestamp=2018-07-30T14:57:32.815838Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T14:57:32.827316Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:57:33.085265Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T14:57:33.124284Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:57:33.140274Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T14:57:33.140772Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:57:33.140973Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T14:57:33.146134Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T14:57:33.161918Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:57:33.259142Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:57:33.823379Z pos=monitor.go:222 component=virt-launcher msg="Found PID for e9c5d4b6-d279-4afe-a999-db7ce68f7c5c: 168" level=info timestamp=2018-07-30T14:57:34.575653Z pos=converter.go:535 component=virt-launcher msg="The network interface type of default was changed to e1000 due to unsupported interface type by qemu slirp network" level=info timestamp=2018-07-30T14:57:34.576599Z pos=converter.go:751 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-30T14:57:34.576698Z pos=converter.go:752 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-30T14:57:34.593205Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmibkmrq-lb4qv Pod phase: Running level=info timestamp=2018-07-30T14:56:39.002613Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T14:56:39.207474Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:56:39.465289Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T14:56:39.500036Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:56:39.519193Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T14:56:39.520195Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:56:39.520353Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T14:56:39.531444Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T14:56:39.542400Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:56:39.551879Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:56:39.660245Z pos=converter.go:535 component=virt-launcher msg="The network interface type of default was changed to e1000 due to unsupported interface type by qemu slirp network" level=info timestamp=2018-07-30T14:56:39.664454Z pos=converter.go:751 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-30T14:56:39.664603Z pos=converter.go:752 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-30T14:56:39.669924Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T14:56:40.008655Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 4d4ff3b4-193a-4e53-90a6-06656dfd2914: 163" • Failure [1.784 seconds] Slirp /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:39 should be able to /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 VirtualMachineInstance with slirp interface with custom MAC address [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: : { Err: { s: "command terminated with exit code 126", }, Code: 126, } command terminated with exit code 126 not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:88 ------------------------------ STEP: have containerPort in the pod manifest STEP: start the virtual machine with slirp interface level=info timestamp=2018-07-30T14:58:09.876469Z pos=vmi_slirp_interface_test.go:87 component=tests msg= 2018/07/30 10:58:55 read closing down: EOF Service cluster-ip-vmi successfully exposed for virtualmachineinstance testvmib8d7q • [SLOW TEST:51.186 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose ClusterIP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:68 Should expose a Cluster IP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:71 ------------------------------ Service cluster-ip-target-vmi successfully exposed for virtualmachineinstance testvmib8d7q •Service node-port-vmi successfully exposed for virtualmachineinstance testvmib8d7q ------------------------------ • [SLOW TEST:9.409 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:61 Expose NodePort service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:124 Should expose a NodePort service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:129 ------------------------------ 2018/07/30 10:59:58 read closing down: EOF Service cluster-ip-udp-vmi successfully exposed for virtualmachineinstance testvmic9str • [SLOW TEST:53.058 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose UDP service on a VMI /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:166 Expose ClusterIP UDP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:173 Should expose a ClusterIP service on a VMI and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:177 ------------------------------ Service node-port-udp-vmi successfully exposed for virtualmachineinstance testvmic9str Pod name: disks-images-provider-ctcjh Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-dqv4m Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8n6wn Pod phase: Running 2018/07/30 14:59:49 http: TLS handshake error from 10.129.0.1:54838: EOF 2018/07/30 14:59:59 http: TLS handshake error from 10.129.0.1:54850: EOF 2018/07/30 15:00:09 http: TLS handshake error from 10.129.0.1:54860: EOF 2018/07/30 15:00:19 http: TLS handshake error from 10.129.0.1:54870: EOF level=info timestamp=2018-07-30T15:00:29.016446Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 15:00:29 http: TLS handshake error from 10.129.0.1:54880: EOF 2018/07/30 15:00:39 http: TLS handshake error from 10.129.0.1:54890: EOF level=error timestamp=2018-07-30T15:00:46.844949Z pos=subresource.go:91 component=virt-api reason="write tcp 10.129.0.3:8443->10.128.0.1:37988: write: connection reset by peer" msg="error ecountered reading from remote podExec stream" level=error timestamp=2018-07-30T15:00:46.845638Z pos=subresource.go:106 component=virt-api reason="write tcp 10.129.0.3:8443->10.128.0.1:37988: write: connection reset by peer" msg="Error in websocket proxy" 2018/07/30 15:00:46 http: response.WriteHeader on hijacked connection level=error timestamp=2018-07-30T15:00:46.846431Z pos=subresource.go:97 component=virt-api reason="websocket: close 1006 (abnormal closure): unexpected EOF" msg="error ecountered reading from websocket stream" level=info timestamp=2018-07-30T15:00:46.846730Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmibkmrq/console proto=HTTP/1.1 statusCode=500 contentLength=0 2018/07/30 15:00:49 http: TLS handshake error from 10.129.0.1:54900: EOF 2018/07/30 15:00:59 http: TLS handshake error from 10.129.0.1:54910: EOF 2018/07/30 15:01:09 http: TLS handshake error from 10.129.0.1:54920: EOF Pod name: virt-api-7d79764579-g72bg Pod phase: Running 2018/07/30 15:00:22 http: TLS handshake error from 10.128.0.1:46380: EOF level=info timestamp=2018-07-30T15:00:31.224131Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:00:32 http: TLS handshake error from 10.128.0.1:46430: EOF level=info timestamp=2018-07-30T15:00:41.477242Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:00:42 http: TLS handshake error from 10.128.0.1:46476: EOF level=info timestamp=2018-07-30T15:00:42.673933Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:00:46.726475Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:00:48.967104Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:00:51.857308Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:00:52 http: TLS handshake error from 10.128.0.1:46522: EOF level=info timestamp=2018-07-30T15:00:58.704185Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T15:01:02.101564Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:01:02 http: TLS handshake error from 10.128.0.1:46572: EOF level=info timestamp=2018-07-30T15:01:09.606250Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T15:01:09.610577Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-g5k4q Pod phase: Running level=info timestamp=2018-07-30T14:54:39.655703Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-vrqn2 Pod phase: Running level=info timestamp=2018-07-30T14:56:18.935486Z pos=vm.go:135 component=virt-controller service=http namespace=default name=testvm kind= uid=b53c7a47-9408-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T14:56:18.935595Z pos=vm.go:186 component=virt-controller service=http namespace=default name=testvm kind= uid=b53c7a47-9408-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-30T14:56:18.935631Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-30T14:56:20.824478Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T14:56:20.825030Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T14:56:20.931110Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibkmrq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibkmrq" level=info timestamp=2018-07-30T14:57:12.329993Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T14:57:12.335038Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T14:57:12.616050Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi84mpb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi84mpb" level=info timestamp=2018-07-30T14:57:12.642409Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi84mpb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi84mpb" level=info timestamp=2018-07-30T14:58:10.478138Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib8d7q kind= uid=f979c082-9408-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T14:58:10.478527Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib8d7q kind= uid=f979c082-9408-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T14:58:10.820677Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmib8d7q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmib8d7q" level=info timestamp=2018-07-30T14:59:12.259696Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic9str kind= uid=1e4e6b87-9409-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T14:59:12.261272Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic9str kind= uid=1e4e6b87-9409-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-c8dmh Pod phase: Running level=info timestamp=2018-07-30T14:59:31.007153Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmic9str kind=Domain uid=1e4e6b87-9409-11e8-82a2-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T14:59:31.024701Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmic9str kind= uid=1e4e6b87-9409-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T14:59:31.024876Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmic9str, existing: true\n" level=info timestamp=2018-07-30T14:59:31.024903Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-30T14:59:31.024967Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T14:59:31.024995Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T14:59:31.025065Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmic9str kind= uid=1e4e6b87-9409-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T14:59:31.037201Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T14:59:31.095096Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmic9str kind= uid=1e4e6b87-9409-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T14:59:31.095216Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmic9str, existing: true\n" level=info timestamp=2018-07-30T14:59:31.095239Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-30T14:59:31.095268Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T14:59:31.095286Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T14:59:31.095385Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmic9str kind= uid=1e4e6b87-9409-11e8-82a2-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T14:59:31.103516Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmic9str kind= uid=1e4e6b87-9409-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-rzrl5 Pod phase: Running level=info timestamp=2018-07-30T14:54:43.526335Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-07-30T14:54:43.533614Z pos=vm.go:210 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-07-30T14:54:43.597880Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-07-30T14:54:43.637909Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-30T14:54:43.676431Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-07-30T14:54:43.693120Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" Pod name: netcat26v8q Pod phase: Succeeded ++ head -n 1 +++ nc -ul 31016 +++ echo +++ nc -up 31016 192.168.66.101 31017 -i 1 -w 1 Hello UDP World! succeeded + x='Hello UDP World!' + echo 'Hello UDP World!' + '[' 'Hello UDP World!' = 'Hello UDP World!' ']' + echo succeeded + exit 0 Pod name: netcat28d4s Pod phase: Succeeded ++ head -n 1 +++ nc 172.30.40.32 27017 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcat7q6l4 Pod phase: Succeeded ++ head -n 1 +++ nc -up 28016 172.30.63.49 28017 -i 1 -w 1 +++ nc -ul 28016 +++ echo + x='Hello UDP World!' + echo 'Hello UDP World!' Hello UDP World! + '[' 'Hello UDP World!' = 'Hello UDP World!' ']' + echo succeeded succeeded + exit 0 Pod name: netcatcx4js Pod phase: Running ++ head -n 1 +++ nc -ul 31016 +++ nc -up 31016 192.168.66.102 31017 -i 1 -w 1 +++ echo Pod name: netcatkn87m Pod phase: Succeeded ++ head -n 1 +++ nc -ul 29016 +++ nc -up 29016 172.30.77.2 29017 -i 1 -w 1 +++ echo + x='Hello UDP World!' + echo 'Hello UDP World!' + '[' 'Hello UDP World!' = 'Hello UDP World!' ']' + echo succeeded + exit 0 Hello UDP World! succeeded Pod name: netcattk6d2 Pod phase: Succeeded ++ head -n 1 +++ nc 192.168.66.102 30017 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcatvf8dg Pod phase: Succeeded ++ head -n 1 +++ nc 192.168.66.101 30017 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: virt-launcher-testvmi84mpb-wsbp4 Pod phase: Running level=info timestamp=2018-07-30T14:57:32.815838Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T14:57:32.827316Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:57:33.085265Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T14:57:33.124284Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:57:33.140274Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T14:57:33.140772Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:57:33.140973Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T14:57:33.146134Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T14:57:33.161918Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:57:33.259142Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:57:33.823379Z pos=monitor.go:222 component=virt-launcher msg="Found PID for e9c5d4b6-d279-4afe-a999-db7ce68f7c5c: 168" level=info timestamp=2018-07-30T14:57:34.575653Z pos=converter.go:535 component=virt-launcher msg="The network interface type of default was changed to e1000 due to unsupported interface type by qemu slirp network" level=info timestamp=2018-07-30T14:57:34.576599Z pos=converter.go:751 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-30T14:57:34.576698Z pos=converter.go:752 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-30T14:57:34.593205Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi84mpb kind= uid=d6c37aff-9408-11e8-82a2-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmib8d7q-47hxp Pod phase: Running level=info timestamp=2018-07-30T14:58:27.894645Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-30T14:58:29.251190Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T14:58:29.265376Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 0ae67d83-e80a-406b-b220-f43ea32701b9" level=info timestamp=2018-07-30T14:58:29.268916Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T14:58:29.287258Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:58:29.541679Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T14:58:29.592401Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:58:29.610390Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:58:29.625430Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T14:58:29.684063Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmib8d7q kind= uid=f979c082-9408-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T14:58:29.719194Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmib8d7q kind= uid=f979c082-9408-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T14:58:29.728297Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:58:29.750552Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:58:29.890489Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmib8d7q kind= uid=f979c082-9408-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T14:58:30.291450Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 0ae67d83-e80a-406b-b220-f43ea32701b9: 173" Pod name: virt-launcher-testvmibkmrq-lb4qv Pod phase: Running level=info timestamp=2018-07-30T14:56:39.002613Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T14:56:39.207474Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:56:39.465289Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T14:56:39.500036Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:56:39.519193Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T14:56:39.520195Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:56:39.520353Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T14:56:39.531444Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T14:56:39.542400Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:56:39.551879Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:56:39.660245Z pos=converter.go:535 component=virt-launcher msg="The network interface type of default was changed to e1000 due to unsupported interface type by qemu slirp network" level=info timestamp=2018-07-30T14:56:39.664454Z pos=converter.go:751 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-07-30T14:56:39.664603Z pos=converter.go:752 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-07-30T14:56:39.669924Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmibkmrq kind= uid=b82a155b-9408-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T14:56:40.008655Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 4d4ff3b4-193a-4e53-90a6-06656dfd2914: 163" Pod name: virt-launcher-testvmic9str-rfxvg Pod phase: Running level=info timestamp=2018-07-30T14:59:29.602421Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-30T14:59:30.455385Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T14:59:30.487582Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:59:30.637898Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID a59d387a-5cf4-4e92-ad4c-b2e8557a307e" level=info timestamp=2018-07-30T14:59:30.638858Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T14:59:30.984045Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T14:59:31.001921Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:59:31.008131Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmic9str kind= uid=1e4e6b87-9409-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T14:59:31.008323Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:59:31.008415Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T14:59:31.012565Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmic9str kind= uid=1e4e6b87-9409-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T14:59:31.035245Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T14:59:31.037682Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T14:59:31.101188Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmic9str kind= uid=1e4e6b87-9409-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T14:59:31.645623Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a59d387a-5cf4-4e92-ad4c-b2e8557a307e: 167" • Failure [66.842 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose UDP service on a VMI /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:166 Expose NodePort UDP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:205 Should expose a NodePort service on a VMI and connect to it [It] /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:210 Timed out after 60.000s. Expected : Running to equal : Succeeded /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:247 ------------------------------ STEP: Exposing the service via virtctl command STEP: Getting back the cluster IP given for the service STEP: Starting a pod which tries to reach the VMI via ClusterIP STEP: Getting the node IP from all nodes STEP: Starting a pod which tries to reach the VMI via NodePort STEP: Waiting for the pod to report a successful connection attempt STEP: Starting a pod which tries to reach the VMI via NodePort STEP: Waiting for the pod to report a successful connection attempt 2018/07/30 11:02:02 read closing down: EOF 2018/07/30 11:02:12 read closing down: EOF Service cluster-ip-vmirs successfully exposed for vmirs replicaset6vdzq • [SLOW TEST:66.776 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on a VMI replica set /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:253 Expose ClusterIP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:286 Should create a ClusterIP service on VMRS and connect to it /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:290 ------------------------------ Service cluster-ip-vm successfully exposed for virtualmachine testvmihg7pc VM testvmihg7pc was scheduled to start 2018/07/30 11:03:04 read closing down: EOF • [SLOW TEST:51.723 seconds] Expose /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:53 Expose service on an VM /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:318 Expose ClusterIP service /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:362 Connect to ClusterIP services that was set when VM was offline /root/go/src/kubevirt.io/kubevirt/tests/expose_test.go:363 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.010 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to start a vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:133 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.012 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to stop a running vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:139 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.008 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have correct UUID /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:192 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.017 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have pod IP /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:208 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.010 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to start a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:242 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.016 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to stop a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:250 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ •• ------------------------------ • [SLOW TEST:21.576 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should update VirtualMachine once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:195 ------------------------------ •• ------------------------------ • [SLOW TEST:74.083 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if it gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:245 ------------------------------ • [SLOW TEST:126.771 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should recreate VirtualMachineInstance if the VirtualMachineInstance's pod gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:265 ------------------------------ • [SLOW TEST:67.865 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should stop VirtualMachineInstance if running set to false /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:325 ------------------------------ • [SLOW TEST:405.026 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should start and stop VirtualMachineInstance multiple times /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:333 ------------------------------ • [SLOW TEST:94.361 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should not update the VirtualMachineInstance spec if Running /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:346 ------------------------------ Pod name: disks-images-provider-ctcjh Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-dqv4m Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8n6wn Pod phase: Running 2018/07/30 15:19:09 http: TLS handshake error from 10.129.0.1:56014: EOF 2018/07/30 15:19:19 http: TLS handshake error from 10.129.0.1:56024: EOF 2018/07/30 15:19:29 http: TLS handshake error from 10.129.0.1:56034: EOF 2018/07/30 15:19:39 http: TLS handshake error from 10.129.0.1:56044: EOF 2018/07/30 15:19:49 http: TLS handshake error from 10.129.0.1:56054: EOF 2018/07/30 15:19:59 http: TLS handshake error from 10.129.0.1:56064: EOF 2018/07/30 15:20:09 http: TLS handshake error from 10.129.0.1:56074: EOF 2018/07/30 15:20:19 http: TLS handshake error from 10.129.0.1:56084: EOF level=info timestamp=2018-07-30T15:20:29.159557Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 15:20:29 http: TLS handshake error from 10.129.0.1:56094: EOF 2018/07/30 15:20:39 http: TLS handshake error from 10.129.0.1:56104: EOF 2018/07/30 15:20:49 http: TLS handshake error from 10.129.0.1:56114: EOF level=info timestamp=2018-07-30T15:20:59.008798Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 15:20:59 http: TLS handshake error from 10.129.0.1:56124: EOF 2018/07/30 15:21:09 http: TLS handshake error from 10.129.0.1:56136: EOF Pod name: virt-api-7d79764579-g72bg Pod phase: Running 2018/07/30 15:20:32 http: TLS handshake error from 10.128.0.1:52208: EOF level=info timestamp=2018-07-30T15:20:42.025410Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:20:42 http: TLS handshake error from 10.128.0.1:52254: EOF 2018/07/30 15:20:52 http: TLS handshake error from 10.128.0.1:52300: EOF level=info timestamp=2018-07-30T15:20:52.139018Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:20:52.607447Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T15:20:52.615038Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T15:20:57.569625Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:20:58.361990Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:20:58.483675Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:21:02 http: TLS handshake error from 10.128.0.1:52350: EOF level=info timestamp=2018-07-30T15:21:02.285496Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:21:08.935341Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:21:08.957378Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:21:09.008168Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-g5k4q Pod phase: Running level=info timestamp=2018-07-30T14:54:39.655703Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-vrqn2 Pod phase: Running level=info timestamp=2018-07-30T15:16:26.304716Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsj78 kind= uid=869c7355-940b-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T15:16:26.305604Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsj78 kind= uid=869c7355-940b-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T15:16:26.305808Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsj78 kind= uid=869c7355-940b-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T15:16:26.388149Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsj78 kind= uid=869c7355-940b-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T15:16:26.388295Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsj78 kind= uid=869c7355-940b-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T15:16:26.436640Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizsj78\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizsj78" level=info timestamp=2018-07-30T15:16:26.455946Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsj78 kind= uid=869c7355-940b-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T15:16:26.456028Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsj78 kind= uid=869c7355-940b-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T15:16:26.463226Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizsj78\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizsj78" level=info timestamp=2018-07-30T15:16:43.187013Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsj78 kind= uid=869c7355-940b-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T15:16:43.187656Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsj78 kind= uid=869c7355-940b-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T15:16:45.341203Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsj78 kind= uid=869c7355-940b-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T15:16:45.341439Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsj78 kind= uid=869c7355-940b-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: true" level=info timestamp=2018-07-30T15:16:45.412265Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsj78 kind= uid=869c7355-940b-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T15:16:45.413565Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsj78 kind= uid=869c7355-940b-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: true" Pod name: virt-handler-c8dmh Pod phase: Running level=error timestamp=2018-07-30T15:17:17.000137Z pos=vm.go:424 component=virt-handler namespace=kubevirt-test-default name=testvmitrbvs kind=VirtualMachineInstance uid= reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-30T15:17:17.000367Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmitrbvs" level=info timestamp=2018-07-30T15:17:20.462201Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmitrbvs kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-30T15:17:20.462563Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitrbvs, existing: false\n" level=info timestamp=2018-07-30T15:17:20.462626Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:17:20.462889Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitrbvs kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:17:20.464636Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitrbvs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:17:20.465326Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitrbvs, existing: false\n" level=info timestamp=2018-07-30T15:17:20.465466Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:17:20.465604Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitrbvs kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:17:20.465880Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitrbvs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:17:37.481337Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmitrbvs, existing: false\n" level=info timestamp=2018-07-30T15:17:37.482516Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:17:37.483276Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmitrbvs kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:17:37.484149Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitrbvs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-rzrl5 Pod phase: Running level=error timestamp=2018-07-30T15:03:25.010395Z pos=vm.go:424 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=VirtualMachineInstance uid= reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-30T15:03:25.010536Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9jlm84kmcx" level=info timestamp=2018-07-30T15:03:28.601426Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-30T15:03:28.604300Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi9jlm84kmcx, existing: false\n" level=info timestamp=2018-07-30T15:03:28.606212Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:03:28.610609Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:03:28.612556Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:03:28.614649Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi9jlm84kmcx, existing: false\n" level=info timestamp=2018-07-30T15:03:28.614850Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:03:28.616258Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:03:28.616504Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:03:35.257383Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi9jlm84kmcx, existing: false\n" level=info timestamp=2018-07-30T15:03:35.280161Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:03:35.288219Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:03:35.288468Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmizsj78-xktwp Pod phase: Running level=info timestamp=2018-07-30T15:16:44.481902Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T15:16:44.483888Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID c5e057cc-c155-5821-8294-264c61919d96" level=info timestamp=2018-07-30T15:16:44.491130Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T15:16:44.501715Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:16:45.211689Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T15:16:45.230606Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:16:45.254530Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:16:45.255994Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T15:16:45.255713Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmizsj78 kind= uid=86a7079f-940b-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T15:16:45.271140Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmizsj78 kind= uid=86a7079f-940b-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:16:45.274785Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:16:45.281753Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:16:45.408101Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmizsj78 kind= uid=86a7079f-940b-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:16:45.419769Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmizsj78 kind= uid=86a7079f-940b-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:16:45.499070Z pos=monitor.go:222 component=virt-launcher msg="Found PID for c5e057cc-c155-5821-8294-264c61919d96: 166" 2018/07/30 11:21:11 read closing down: EOF • Failure [285.704 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 should survive guest shutdown, multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:387 Timed out after 240.000s. No new VirtualMachineInstance instance showed up Expected : false to be true /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:429 ------------------------------ STEP: Creating new VMI, not running STEP: Starting the VirtualMachineInstance STEP: VMI has the running condition STEP: Getting the running VirtualMachineInstance STEP: Obtaining the serial console STEP: Guest shutdown STEP: waiting for the controller to replace the shut-down vmi with a new instance VM testvmix77v2 was scheduled to start • [SLOW TEST:21.892 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should start a VirtualMachineInstance once /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:436 ------------------------------ VM testvmidwpfn was scheduled to stop • [SLOW TEST:62.580 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:47 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:115 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:435 should stop a VirtualMachineInstance once /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:467 ------------------------------ Pod name: disks-images-provider-ctcjh Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-dqv4m Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8n6wn Pod phase: Running 2018/07/30 15:24:49 http: TLS handshake error from 10.129.0.1:56358: EOF level=info timestamp=2018-07-30T15:24:59.174563Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 15:24:59 http: TLS handshake error from 10.129.0.1:56368: EOF 2018/07/30 15:25:09 http: TLS handshake error from 10.129.0.1:56378: EOF 2018/07/30 15:25:19 http: TLS handshake error from 10.129.0.1:56388: EOF 2018/07/30 15:25:29 http: TLS handshake error from 10.129.0.1:56398: EOF 2018/07/30 15:25:39 http: TLS handshake error from 10.129.0.1:56408: EOF 2018/07/30 15:25:49 http: TLS handshake error from 10.129.0.1:56418: EOF level=info timestamp=2018-07-30T15:25:59.203141Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 15:25:59 http: TLS handshake error from 10.129.0.1:56428: EOF 2018/07/30 15:26:09 http: TLS handshake error from 10.129.0.1:56438: EOF 2018/07/30 15:26:19 http: TLS handshake error from 10.129.0.1:56448: EOF 2018/07/30 15:26:29 http: TLS handshake error from 10.129.0.1:56458: EOF 2018/07/30 15:26:39 http: TLS handshake error from 10.129.0.1:56468: EOF 2018/07/30 15:26:49 http: TLS handshake error from 10.129.0.1:56478: EOF Pod name: virt-api-7d79764579-g72bg Pod phase: Running level=info timestamp=2018-07-30T15:26:01.796599Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:26:02 http: TLS handshake error from 10.128.0.1:53774: EOF level=info timestamp=2018-07-30T15:26:09.523975Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:26:12 http: TLS handshake error from 10.128.0.1:53820: EOF level=info timestamp=2018-07-30T15:26:19.797957Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:26:22 http: TLS handshake error from 10.128.0.1:53866: EOF level=info timestamp=2018-07-30T15:26:28.859371Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T15:26:30.059081Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:26:30.744007Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:26:31.998149Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:26:32 http: TLS handshake error from 10.128.0.1:53916: EOF level=info timestamp=2018-07-30T15:26:32.255099Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:26:40.302516Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:26:42 http: TLS handshake error from 10.128.0.1:53962: EOF level=info timestamp=2018-07-30T15:26:50.533382Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-g5k4q Pod phase: Running level=info timestamp=2018-07-30T14:54:39.655703Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-vrqn2 Pod phase: Running level=info timestamp=2018-07-30T15:22:35.506049Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidwpfn kind= uid=3ddf2ddc-940c-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T15:22:35.506551Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidwpfn kind= uid=3ddf2ddc-940c-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-30T15:22:35.609063Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidwpfn kind= uid=3ddf2ddc-940c-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T15:22:35.609270Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidwpfn kind= uid=3ddf2ddc-940c-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-30T15:22:35.645027Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidwpfn kind= uid=3ddf2ddc-940c-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T15:22:35.645353Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidwpfn kind= uid=3ddf2ddc-940c-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-30T15:22:35.645571Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-30T15:22:35.685078Z pos=vm.go:135 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidwpfn kind= uid=3ddf2ddc-940c-11e8-82a2-525500d15501 msg="Started processing VM" level=info timestamp=2018-07-30T15:22:35.685392Z pos=vm.go:186 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidwpfn kind= uid=3ddf2ddc-940c-11e8-82a2-525500d15501 msg="Creating or the VirtualMachineInstance: false" level=info timestamp=2018-07-30T15:22:35.685479Z pos=vm.go:262 component=virt-controller service=http msg="vmi is nil" level=info timestamp=2018-07-30T15:22:36.077939Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixjzdr kind= uid=6312f5da-940c-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:22:36.078728Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixjzdr kind= uid=6312f5da-940c-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T15:22:37.057295Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixjzdr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixjzdr" level=info timestamp=2018-07-30T15:23:51.496213Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixjzdr kind= uid=90060af8-940c-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:23:51.497215Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixjzdr kind= uid=90060af8-940c-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-c8dmh Pod phase: Running level=info timestamp=2018-07-30T15:23:50.667713Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmixjzdr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:24:07.696690Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmixjzdr, existing: false\n" level=info timestamp=2018-07-30T15:24:07.697414Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:24:07.697778Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmixjzdr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:24:07.698141Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmixjzdr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:24:08.881086Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmixjzdr, existing: true\n" level=info timestamp=2018-07-30T15:24:08.881700Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-30T15:24:08.882089Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:24:08.882729Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmixjzdr kind= uid=90060af8-940c-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T15:24:08.921804Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmixjzdr kind= uid=90060af8-940c-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:24:08.930615Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmixjzdr, existing: true\n" level=info timestamp=2018-07-30T15:24:08.932203Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-30T15:24:08.934718Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:24:08.935465Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmixjzdr kind= uid=90060af8-940c-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T15:24:08.936030Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmixjzdr kind= uid=90060af8-940c-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-rzrl5 Pod phase: Running level=error timestamp=2018-07-30T15:03:25.010395Z pos=vm.go:424 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=VirtualMachineInstance uid= reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-30T15:03:25.010536Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi9jlm84kmcx" level=info timestamp=2018-07-30T15:03:28.601426Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-30T15:03:28.604300Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi9jlm84kmcx, existing: false\n" level=info timestamp=2018-07-30T15:03:28.606212Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:03:28.610609Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:03:28.612556Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:03:28.614649Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi9jlm84kmcx, existing: false\n" level=info timestamp=2018-07-30T15:03:28.614850Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:03:28.616258Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:03:28.616504Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:03:35.257383Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi9jlm84kmcx, existing: false\n" level=info timestamp=2018-07-30T15:03:35.280161Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:03:35.288219Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:03:35.288468Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi9jlm84kmcx kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmixjzdr-8zhf4 Pod phase: Running level=info timestamp=2018-07-30T15:23:57.176083Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-30T15:23:57.176450Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-30T15:23:57.181481Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-30T15:24:07.193434Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-30T15:24:07.334857Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmixjzdr" level=info timestamp=2018-07-30T15:24:07.339068Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-30T15:24:07.339428Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [256.484 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting and stopping the same VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:90 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:91 should success multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:92 Timed out after 180.013s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1065 ------------------------------ STEP: Starting the VirtualMachineInstance level=info timestamp=2018-07-30T15:22:37.443104Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmixjzdr kind=VirtualMachineInstance uid=6312f5da-940c-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmixjzdr-j8c4g" level=info timestamp=2018-07-30T15:22:54.971023Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmixjzdr kind=VirtualMachineInstance uid=6312f5da-940c-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmixjzdr-j8c4g" level=info timestamp=2018-07-30T15:22:56.868301Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmixjzdr kind=VirtualMachineInstance uid=6312f5da-940c-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T15:22:56.923657Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmixjzdr kind=VirtualMachineInstance uid=6312f5da-940c-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." STEP: Stopping the VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance is gone STEP: Starting the VirtualMachineInstance level=info timestamp=2018-07-30T15:23:52.303235Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmixjzdr kind=VirtualMachineInstance uid=6312f5da-940c-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmixjzdr-j8c4g" level=info timestamp=2018-07-30T15:23:52.303435Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmixjzdr kind=VirtualMachineInstance uid=6312f5da-940c-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmixjzdr-j8c4g" level=info timestamp=2018-07-30T15:23:52.303897Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmixjzdr kind=VirtualMachineInstance uid=6312f5da-940c-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T15:23:52.304131Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmixjzdr kind=VirtualMachineInstance uid=6312f5da-940c-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." • [SLOW TEST:20.616 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:111 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:112 should not modify the spec on status update /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:113 ------------------------------ • [SLOW TEST:36.425 seconds] RegistryDisk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:41 Starting multiple VMIs /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:129 with ephemeral registry disk /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:130 should success /root/go/src/kubevirt.io/kubevirt/tests/registry_disk_test.go:131 ------------------------------ volumedisk0 compute • [SLOW TEST:53.926 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 2018/07/30 11:28:43 read closing down: EOF VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with 3 CPU cores /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:56 should report 3 cpu cores under guest OS /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:62 ------------------------------ • ------------------------------ • [SLOW TEST:20.233 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:164 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-2Mi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ S [SKIPPING] [0.304 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:164 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-1Gi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 No node with hugepages hugepages-1Gi capacity /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:216 ------------------------------ •2018/07/30 11:31:01 read closing down: EOF ------------------------------ • [SLOW TEST:114.804 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model defined /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:340 should report defined CPU model /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:341 ------------------------------ 2018/07/30 11:33:24 read closing down: EOF • [SLOW TEST:142.206 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model equals to passthrough /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:368 should report exactly the same model as node CPU /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:369 ------------------------------ 2018/07/30 11:35:39 read closing down: EOF • [SLOW TEST:134.970 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model not defined /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:392 should report CPU model from libvirt capabilities /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:393 ------------------------------ • [SLOW TEST:44.571 seconds] 2018/07/30 11:36:23 read closing down: EOF Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 New VirtualMachineInstance with all supported drives /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:413 should have all the device nodes /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:436 ------------------------------ • [SLOW TEST:42.732 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance 2018/07/30 11:37:06 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 should have cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:82 ------------------------------ • [SLOW TEST:102.437 seconds] CloudInit UserData 2018/07/30 11:38:48 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:81 with injected ssh-key /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:92 should have ssh-key under authorized keys /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:93 ------------------------------ 2018/07/30 11:39:29 read closing down: EOF • [SLOW TEST:51.241 seconds] CloudInit UserData 2018/07/30 11:39:40 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 with cloudInitNoCloud userData source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:118 should process provided cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:119 ------------------------------ 2018/07/30 11:40:21 read closing down: EOF • [SLOW TEST:40.989 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:80 should take user-data from k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:162 ------------------------------ • [SLOW TEST:21.774 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:21.222 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given an vm /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:20.694 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi preset /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:20.588 seconds] User Access /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:33 With default kubevirt service accounts /root/go/src/kubevirt.io/kubevirt/tests/access_test.go:41 should verify permissions are correct for view, edit, and admin /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 given a vmi replica set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ Pod name: disks-images-provider-ctcjh Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-dqv4m Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8n6wn Pod phase: Running 2018/07/30 15:41:39 http: TLS handshake error from 10.129.0.1:57384: EOF 2018/07/30 15:41:49 http: TLS handshake error from 10.129.0.1:57394: EOF 2018/07/30 15:41:59 http: TLS handshake error from 10.129.0.1:57404: EOF level=info timestamp=2018-07-30T15:42:00.007509Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T15:42:05.012570Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" 2018/07/30 15:42:09 http: TLS handshake error from 10.129.0.1:57416: EOF 2018/07/30 15:42:19 http: TLS handshake error from 10.129.0.1:57426: EOF 2018/07/30 15:42:29 http: TLS handshake error from 10.129.0.1:57438: EOF level=error timestamp=2018-07-30T15:42:30.431920Z pos=subresource.go:85 component=virt-api msg= 2018/07/30 15:42:30 http: response.WriteHeader on hijacked connection level=error timestamp=2018-07-30T15:42:30.433069Z pos=subresource.go:97 component=virt-api reason="read tcp 10.129.0.3:8443->10.128.0.1:51320: use of closed network connection" msg="error ecountered reading from websocket stream" level=info timestamp=2018-07-30T15:42:30.433514Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi76cpq/console proto=HTTP/1.1 statusCode=200 contentLength=0 2018/07/30 15:42:39 http: TLS handshake error from 10.129.0.1:57448: EOF 2018/07/30 15:42:49 http: TLS handshake error from 10.129.0.1:57458: EOF 2018/07/30 15:42:59 http: TLS handshake error from 10.129.0.1:57468: EOF Pod name: virt-api-7d79764579-g72bg Pod phase: Running level=info timestamp=2018-07-30T15:42:29.022175Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:42:29.876089Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 15:42:32 http: TLS handshake error from 10.128.0.1:58670: EOF level=info timestamp=2018-07-30T15:42:39.158220Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:42:42 http: TLS handshake error from 10.128.0.1:58716: EOF level=info timestamp=2018-07-30T15:42:49.553571Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:42:50.282784Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:42:50.570149Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:42:52 http: TLS handshake error from 10.128.0.1:58762: EOF level=info timestamp=2018-07-30T15:42:53.990327Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:42:55.722426Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T15:42:55.741022Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T15:42:59.712254Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T15:42:59.940627Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:43:02 http: TLS handshake error from 10.128.0.1:58812: EOF Pod name: virt-controller-7d57d96b65-g5k4q Pod phase: Running level=info timestamp=2018-07-30T14:54:39.655703Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-vrqn2 Pod phase: Running level=info timestamp=2018-07-30T15:35:39.470118Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivd6zq kind= uid=3603a591-940e-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:35:39.470975Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivd6zq kind= uid=3603a591-940e-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T15:35:39.938152Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmivd6zq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmivd6zq" level=info timestamp=2018-07-30T15:36:23.684533Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim4jtx kind= uid=505ebdf9-940e-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:36:23.685438Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim4jtx kind= uid=505ebdf9-940e-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T15:37:06.303734Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirblhs kind= uid=69c6ad4e-940e-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:37:06.304159Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirblhs kind= uid=69c6ad4e-940e-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T15:37:06.543077Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirblhs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirblhs" level=info timestamp=2018-07-30T15:38:48.644456Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivpqj9 kind= uid=a6c8964f-940e-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:38:48.645272Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmivpqj9 kind= uid=a6c8964f-940e-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T15:39:40.007253Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizfk4r kind= uid=c5649adc-940e-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:39:40.008214Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizfk4r kind= uid=c5649adc-940e-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T15:39:40.237742Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizfk4r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizfk4r" level=info timestamp=2018-07-30T15:41:45.081999Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi76cpq kind= uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:41:45.082767Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi76cpq kind= uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-handler-c8dmh Pod phase: Running level=info timestamp=2018-07-30T15:42:04.568714Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmi76cpq kind=Domain uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T15:42:04.635105Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T15:42:04.650065Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi76cpq kind= uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:42:04.650303Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi76cpq, existing: true\n" level=info timestamp=2018-07-30T15:42:04.650382Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-30T15:42:04.650465Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T15:42:04.650538Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T15:42:04.650705Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi76cpq kind= uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T15:42:04.765558Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi76cpq kind= uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:42:04.765767Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi76cpq, existing: true\n" level=info timestamp=2018-07-30T15:42:04.765917Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-30T15:42:04.766013Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T15:42:04.766064Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T15:42:04.766299Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi76cpq kind= uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T15:42:04.787628Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi76cpq kind= uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-rzrl5 Pod phase: Running level=info timestamp=2018-07-30T15:36:28.612616Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:36:28.614005Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimbzhs kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:36:28.614321Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmimbzhs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:36:29.491016Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilqjrz, existing: false\n" level=info timestamp=2018-07-30T15:36:29.492736Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:36:29.493226Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmilqjrz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:36:29.493559Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilqjrz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:36:29.527212Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmifrjtt, existing: false\n" level=info timestamp=2018-07-30T15:36:29.528407Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:36:29.529441Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmifrjtt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:36:29.530639Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmifrjtt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:36:29.907556Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmimbzhs, existing: false\n" level=info timestamp=2018-07-30T15:36:29.907668Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:36:29.907788Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimbzhs kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:36:29.907927Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmimbzhs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi76cpq-lmsgs Pod phase: Running level=info timestamp=2018-07-30T15:42:03.099060Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmi76cpq kind= uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="Domain defined." level=info timestamp=2018-07-30T15:42:03.858565Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T15:42:03.905142Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:42:03.915300Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID ee1e855f-b82e-4cbd-a9d0-3bf99a6ed830" level=info timestamp=2018-07-30T15:42:03.921243Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T15:42:04.521064Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T15:42:04.562088Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:42:04.576130Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:42:04.582625Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T15:42:04.585155Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi76cpq kind= uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T15:42:04.609245Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi76cpq kind= uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:42:04.615687Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:42:04.637601Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:42:04.777874Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi76cpq kind= uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:42:04.932928Z pos=monitor.go:222 component=virt-launcher msg="Found PID for ee1e855f-b82e-4cbd-a9d0-3bf99a6ed830: 164" 2018/07/30 11:43:06 read closing down: EOF • Failure [81.532 seconds] Health Monitoring /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:37 A VirtualMachineInstance with a watchdog device /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:56 should be shut down when the watchdog expires [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:57 Timed out after 40.011s. Expected : Running to equal : Failed /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:85 ------------------------------ STEP: Starting a VirtualMachineInstance level=info timestamp=2018-07-30T15:41:46.104642Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi76cpq kind=VirtualMachineInstance uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi76cpq-lmsgs" level=info timestamp=2018-07-30T15:42:03.310185Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi76cpq kind=VirtualMachineInstance uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi76cpq-lmsgs" level=info timestamp=2018-07-30T15:42:05.272797Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi76cpq kind=VirtualMachineInstance uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T15:42:05.345996Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi76cpq kind=VirtualMachineInstance uid=0febf1ee-940f-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." STEP: Expecting the VirtualMachineInstance console STEP: Killing the watchdog device STEP: Checking that the VirtualMachineInstance has Failed status • [SLOW TEST:39.606 seconds] LeaderElection /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:43 Start a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:53 when the controller pod is not running /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:54 should success /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:55 ------------------------------ •••••••••••2018/07/30 11:44:33 read closing down: EOF ------------------------------ • [SLOW TEST:41.483 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a cirros image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:67 should return that we are running cirros /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:68 ------------------------------ • [SLOW TEST:45.888 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance 2018/07/30 11:45:19 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a fedora image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:77 should return that we are running fedora /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:78 ------------------------------ 2018/07/30 11:45:51 read closing down: EOF 2018/07/30 11:45:51 read closing down: EOF 2018/07/30 11:45:52 read closing down: EOF 2018/07/30 11:45:53 read closing down: EOF 2018/07/30 11:45:53 read closing down: EOF • [SLOW TEST:34.672 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should be able to reconnect to console multiple times /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:87 ------------------------------ • [SLOW TEST:19.014 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should wait until the virtual machine is in running state and return a stream interface /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:103 ------------------------------ • [SLOW TEST:30.546 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the virtual machine instance to be running /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:111 ------------------------------ • [SLOW TEST:30.542 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the expecter /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:134 ------------------------------ Pod name: disks-images-provider-ctcjh Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-dqv4m Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8n6wn Pod phase: Running level=info timestamp=2018-07-30T15:47:06.410351Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi8fcw5/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-07-30T15:47:08.742372Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:47:08.742916Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi8fcw5/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/07/30 15:47:09 http: TLS handshake error from 10.129.0.1:57644: EOF level=error timestamp=2018-07-30T15:47:09.880033Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:47:09.880255Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi8fcw5/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-07-30T15:47:13.407539Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:47:13.407798Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi8fcw5/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-07-30T15:47:14.498437Z pos=subresource.go:144 component=virt-api reason="VirtualMachineInstance testvmi8fcw5 in namespace kubevirt-test-default not found." msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:47:14.498596Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi8fcw5/console proto=HTTP/1.1 statusCode=404 contentLength=81 2018/07/30 15:47:19 http: TLS handshake error from 10.129.0.1:57650: EOF 2018/07/30 15:47:29 http: TLS handshake error from 10.129.0.1:57656: EOF level=info timestamp=2018-07-30T15:47:32.640007Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 15:47:39 http: TLS handshake error from 10.129.0.1:57662: EOF 2018/07/30 15:47:49 http: TLS handshake error from 10.129.0.1:57668: EOF Pod name: virt-api-7d79764579-g72bg Pod phase: Running level=error timestamp=2018-07-30T15:47:11.834345Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:47:11.834579Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi8fcw5/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/07/30 15:47:12 http: TLS handshake error from 10.128.0.1:60480: EOF level=info timestamp=2018-07-30T15:47:15.213904Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:47:22 http: TLS handshake error from 10.128.0.1:60534: EOF level=info timestamp=2018-07-30T15:47:23.276841Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:47:23.280479Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:47:25.419380Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:47:26.471653Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:47:27.764111Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T15:47:27.831400Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 15:47:32 http: TLS handshake error from 10.128.0.1:60596: EOF level=info timestamp=2018-07-30T15:47:36.428731Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:47:42 http: TLS handshake error from 10.128.0.1:60648: EOF level=info timestamp=2018-07-30T15:47:46.551962Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-68vs2 Pod phase: Running level=info timestamp=2018-07-30T15:43:10.344661Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-g5k4q Pod phase: Running level=info timestamp=2018-07-30T15:46:43.586457Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8fcw5 kind= uid=c1de5976-940f-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T15:46:43.794045Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8fcw5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8fcw5" level=info timestamp=2018-07-30T15:46:43.821331Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8fcw5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8fcw5" level=info timestamp=2018-07-30T15:47:13.690226Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8fcw5\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8fcw5, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: c1de5976-940f-11e8-82a2-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8fcw5" level=info timestamp=2018-07-30T15:47:13.812564Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilhnlp kind= uid=d3e40a7b-940f-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:47:13.813099Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilhnlp kind= uid=d3e40a7b-940f-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T15:47:13.835058Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimrfbx kind= uid=d3e65d71-940f-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:47:13.835341Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimrfbx kind= uid=d3e65d71-940f-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T15:47:13.897235Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin8dpc kind= uid=d3eac6fb-940f-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:47:13.897516Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin8dpc kind= uid=d3eac6fb-940f-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T15:47:13.907975Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizzkfq kind= uid=d3f1d3a2-940f-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:47:13.908187Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizzkfq kind= uid=d3f1d3a2-940f-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T15:47:14.354974Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizzkfq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizzkfq" level=info timestamp=2018-07-30T15:47:14.360526Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmin8dpc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmin8dpc" level=info timestamp=2018-07-30T15:47:14.875860Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizzkfq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizzkfq" Pod name: virt-handler-c8dmh Pod phase: Running level=info timestamp=2018-07-30T15:47:46.406488Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-30T15:47:46.406519Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T15:47:46.406539Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T15:47:46.406617Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmizzkfq kind= uid=d3f1d3a2-940f-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T15:47:47.488539Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmin8dpc kind= uid=d3eac6fb-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:47:47.753231Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T15:47:47.868284Z pos=vm.go:756 component=virt-handler namespace=kubevirt-test-default name=testvmimrfbx kind=Domain uid=d3e65d71-940f-11e8-82a2-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T15:47:47.875403Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmizzkfq kind= uid=d3f1d3a2-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:47:48.712152Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmimrfbx kind= uid=d3e65d71-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:47:48.712270Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmimrfbx, existing: true\n" level=info timestamp=2018-07-30T15:47:48.712298Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-30T15:47:48.712326Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T15:47:48.712344Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T15:47:48.712395Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmimrfbx kind= uid=d3e65d71-940f-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T15:47:51.012408Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" Pod name: virt-handler-rzrl5 Pod phase: Running level=info timestamp=2018-07-30T15:36:28.612616Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:36:28.614005Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimbzhs kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:36:28.614321Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmimbzhs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:36:29.491016Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilqjrz, existing: false\n" level=info timestamp=2018-07-30T15:36:29.492736Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:36:29.493226Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmilqjrz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:36:29.493559Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilqjrz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:36:29.527212Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmifrjtt, existing: false\n" level=info timestamp=2018-07-30T15:36:29.528407Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:36:29.529441Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmifrjtt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:36:29.530639Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmifrjtt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:36:29.907556Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmimbzhs, existing: false\n" level=info timestamp=2018-07-30T15:36:29.907668Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:36:29.907788Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmimbzhs kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:36:29.907927Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmimbzhs kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmilhnlp-wj478 Pod phase: Running level=info timestamp=2018-07-30T15:47:37.938725Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-30T15:47:38.528377Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T15:47:38.535763Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:47:38.888185Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID db174e22-c00c-4608-be40-e2586990feb9" level=info timestamp=2018-07-30T15:47:38.889538Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T15:47:39.214423Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T15:47:39.410213Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:47:39.441144Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmilhnlp kind= uid=d3e40a7b-940f-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T15:47:39.461426Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmilhnlp kind= uid=d3e40a7b-940f-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:47:39.468888Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:47:39.469142Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T15:47:39.512126Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:47:39.853267Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:47:39.914281Z pos=monitor.go:222 component=virt-launcher msg="Found PID for db174e22-c00c-4608-be40-e2586990feb9: 165" level=info timestamp=2018-07-30T15:47:40.468958Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmilhnlp kind= uid=d3e40a7b-940f-11e8-82a2-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmimrfbx-8dv9t Pod phase: Running level=info timestamp=2018-07-30T15:47:42.985037Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmimrfbx kind= uid=d3e65d71-940f-11e8-82a2-525500d15501 msg="Domain defined." level=info timestamp=2018-07-30T15:47:42.991710Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-30T15:47:44.815380Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T15:47:44.852419Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 6ae7e1e4-31a9-4d8e-b5ca-64e9fb2de63a" level=info timestamp=2018-07-30T15:47:44.853222Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T15:47:45.902632Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 6ae7e1e4-31a9-4d8e-b5ca-64e9fb2de63a: 170" level=info timestamp=2018-07-30T15:47:45.983357Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmimrfbx kind= uid=d3e65d71-940f-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T15:47:45.985358Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmimrfbx kind= uid=d3e65d71-940f-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:47:46.155871Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:47:46.156109Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T15:47:46.214536Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:47:47.758279Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:47:47.758688Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T15:47:47.795072Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:47:51.013017Z pos=client.go:145 component=virt-launcher msg="processed event" Pod name: virt-launcher-testvmin8dpc-bfc6s Pod phase: Running level=info timestamp=2018-07-30T15:47:39.481200Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmin8dpc kind= uid=d3eac6fb-940f-11e8-82a2-525500d15501 msg="Domain defined." level=info timestamp=2018-07-30T15:47:39.481770Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-30T15:47:40.641524Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T15:47:40.671773Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 9a59c88f-15c8-4f37-80b5-97be6d6997cc" level=info timestamp=2018-07-30T15:47:40.672983Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T15:47:40.825956Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:47:41.058208Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T15:47:41.094997Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:47:41.104516Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmin8dpc kind= uid=d3eac6fb-940f-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T15:47:41.109135Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmin8dpc kind= uid=d3eac6fb-940f-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:47:41.455227Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:47:41.455356Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T15:47:41.499534Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:47:41.711962Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 9a59c88f-15c8-4f37-80b5-97be6d6997cc: 167" level=info timestamp=2018-07-30T15:47:42.843842Z pos=client.go:145 component=virt-launcher msg="processed event" Pod name: virt-launcher-testvmizzkfq-2xdw4 Pod phase: Running level=info timestamp=2018-07-30T15:47:39.686741Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-30T15:47:40.754169Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T15:47:40.799010Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID a2c43a87-c919-4df7-8629-5b0bb590cd91" level=info timestamp=2018-07-30T15:47:40.799641Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T15:47:40.864046Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:47:41.412196Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T15:47:41.454270Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:47:41.476061Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmizzkfq kind= uid=d3f1d3a2-940f-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T15:47:41.479362Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmizzkfq kind= uid=d3f1d3a2-940f-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:47:41.837464Z pos=monitor.go:222 component=virt-launcher msg="Found PID for a2c43a87-c919-4df7-8629-5b0bb590cd91: 173" level=info timestamp=2018-07-30T15:47:42.759115Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:47:42.759266Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T15:47:42.812257Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:47:45.151352Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:47:51.554077Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmizzkfq kind= uid=d3f1d3a2-940f-11e8-82a2-525500d15501 msg="Synced vmi" • Failure in Spec Setup (BeforeEach) [38.349 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Expected error: <*errors.errorString | 0xc420920780>: { s: "Timeout trying to connect to the virtual machine instance", } Timeout trying to connect to the virtual machine instance not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1102 ------------------------------ level=info timestamp=2018-07-30T15:47:14.909319Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmilhnlp kind=VirtualMachineInstance uid=d3e40a7b-940f-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmilhnlp-wj478" level=info timestamp=2018-07-30T15:47:37.774219Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmilhnlp kind=VirtualMachineInstance uid=d3e40a7b-940f-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmilhnlp-wj478" level=info timestamp=2018-07-30T15:47:40.587494Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmilhnlp kind=VirtualMachineInstance uid=d3e40a7b-940f-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T15:47:41.163827Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmilhnlp kind=VirtualMachineInstance uid=d3e40a7b-940f-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." 2018/07/30 11:49:13 read closing down: EOF 2018/07/30 11:49:24 read closing down: EOF 2018/07/30 11:49:35 read closing down: EOF 2018/07/30 11:49:45 read closing down: EOF 2018/07/30 11:49:46 read closing down: EOF 2018/07/30 11:49:49 read closing down: EOF 2018/07/30 11:49:51 read closing down: EOF 2018/07/30 11:49:51 read closing down: EOF • [SLOW TEST:118.638 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with pod network connectivity explicitly set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ S [SKIPPING] [0.016 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with custom MAC address [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Openshift detected: Custom MAC addresses on pod networks are not suppored /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1570 ------------------------------ 2018/07/30 11:49:54 read closing down: EOF •2018/07/30 11:49:54 read closing down: EOF 2018/07/30 11:49:54 read closing down: EOF ------------------------------ • [SLOW TEST:5.329 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on the same node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:6.262 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on a different node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ •• ------------------------------ • [SLOW TEST:5.511 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a service matching the vmi exposed /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:272 should be able to reach the vmi based on labels specified on the vmi /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:292 ------------------------------ • ------------------------------ • [SLOW TEST:5.623 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a subdomain and a headless service given /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:319 should be able to reach the vmi via its unique fully qualified domain name /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:342 ------------------------------ 2018/07/30 11:53:45 read closing down: EOF Pod name: disks-images-provider-ctcjh Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-dqv4m Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8n6wn Pod phase: Running 2018/07/30 15:51:53 http: TLS handshake error from 10.129.0.1:40910: EOF 2018/07/30 15:52:03 http: TLS handshake error from 10.129.0.1:40914: EOF 2018/07/30 15:52:13 http: TLS handshake error from 10.129.0.1:40920: EOF 2018/07/30 15:52:23 http: TLS handshake error from 10.129.0.1:40926: EOF level=info timestamp=2018-07-30T15:52:30.252616Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 15:52:33 http: TLS handshake error from 10.129.0.1:40932: EOF 2018/07/30 15:52:43 http: TLS handshake error from 10.129.0.1:40936: EOF 2018/07/30 15:52:53 http: TLS handshake error from 10.129.0.1:40942: EOF level=info timestamp=2018-07-30T15:53:00.118732Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 15:53:03 http: TLS handshake error from 10.129.0.1:40948: EOF 2018/07/30 15:53:13 http: TLS handshake error from 10.129.0.1:40954: EOF 2018/07/30 15:53:23 http: TLS handshake error from 10.129.0.1:40960: EOF level=info timestamp=2018-07-30T15:53:30.258994Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 15:53:33 http: TLS handshake error from 10.129.0.1:40968: EOF 2018/07/30 15:53:43 http: TLS handshake error from 10.129.0.1:40974: EOF Pod name: virt-api-7d79764579-g72bg Pod phase: Running level=info timestamp=2018-07-30T15:53:01.090296Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:53:01.118213Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:53:02 http: TLS handshake error from 10.128.0.1:34192: EOF level=info timestamp=2018-07-30T15:53:02.967087Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:53:12 http: TLS handshake error from 10.128.0.1:34242: EOF level=info timestamp=2018-07-30T15:53:13.305859Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:53:22 http: TLS handshake error from 10.128.0.1:34292: EOF level=info timestamp=2018-07-30T15:53:23.397459Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:53:31.252410Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:53:31.395765Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T15:53:31.423814Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:53:32 http: TLS handshake error from 10.128.0.1:34346: EOF level=info timestamp=2018-07-30T15:53:33.766456Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 15:53:42 http: TLS handshake error from 10.128.0.1:34396: EOF level=info timestamp=2018-07-30T15:53:43.907049Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-68vs2 Pod phase: Running level=info timestamp=2018-07-30T15:43:10.344661Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 level=info timestamp=2018-07-30T15:51:06.790737Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-30T15:51:06.793743Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-30T15:51:06.793824Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-30T15:51:06.793894Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-30T15:51:06.793939Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer limitrangeInformer" level=info timestamp=2018-07-30T15:51:06.793988Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-30T15:51:06.794036Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-30T15:51:06.794081Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-30T15:51:06.832459Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-30T15:51:06.844040Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-30T15:51:06.844361Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-30T15:51:06.844555Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-30T15:51:06.851359Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." Pod name: virt-controller-7d57d96b65-g5k4q Pod phase: Running level=info timestamp=2018-07-30T15:51:40.384681Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-c8dmh Pod phase: Running level=info timestamp=2018-07-30T15:51:29.812115Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:51:29.812347Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmitgd9k kind= uid=eb1da33f-940f-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T15:51:29.812477Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitgd9k kind= uid=eb1da33f-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:51:29.835648Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi2sfx7 kind=VirtualMachineInstance uid=eb248dc3-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:51:29.857494Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi2sfx7, existing: true\n" level=info timestamp=2018-07-30T15:51:29.857600Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-30T15:51:29.857653Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:51:29.857807Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi2sfx7 kind= uid=eb248dc3-940f-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T15:51:29.857896Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi2sfx7 kind= uid=eb248dc3-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:51:29.873248Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi22vvl kind=VirtualMachineInstance uid=46f4b824-9410-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:51:29.873383Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi22vvl, existing: true\n" level=info timestamp=2018-07-30T15:51:29.873424Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-30T15:51:29.873470Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:51:29.873585Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi22vvl kind= uid=46f4b824-9410-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T15:51:29.873660Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi22vvl kind= uid=46f4b824-9410-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-rzrl5 Pod phase: Running level=info timestamp=2018-07-30T15:48:27.404333Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T15:48:27.505824Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6bt4k kind= uid=eb294cc0-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:48:27.506263Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6bt4k, existing: true\n" level=info timestamp=2018-07-30T15:48:27.509275Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-30T15:48:27.509594Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T15:48:27.509898Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T15:48:27.510222Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi6bt4k kind= uid=eb294cc0-940f-11e8-82a2-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T15:48:27.518976Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6bt4k kind= uid=eb294cc0-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:48:27.537009Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi25vsv kind= uid=eb3149ec-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:48:27.538445Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi25vsv, existing: true\n" level=info timestamp=2018-07-30T15:48:27.538834Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-30T15:48:27.539664Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T15:48:27.543678Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T15:48:27.552315Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi25vsv kind= uid=eb3149ec-940f-11e8-82a2-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T15:48:27.595746Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi25vsv kind= uid=eb3149ec-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." Pod name: netcatdlf86 Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.67 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcatgphzt Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.67 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcatq7r45 Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.67 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatrmqts Pod phase: Failed Unable to retrieve container logs for docker://4f357b0c6c677016330c1f3f651c0f251a604afb8814ad750677f148aada5013 Pod name: netcatt8p7n Pod phase: Succeeded ++ head -n 1 +++ nc myservice.kubevirt-test-default 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcattj2q9 Pod phase: Succeeded Unable to retrieve container logs for docker://36a19b8cdfaadb5a38b7b74487c1bf2902e49d9523234550f716325b998c9029 Pod name: netcatxb6z9 Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.67 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: virt-launcher-testvmi22vvl-vlx5t Pod phase: Running level=info timestamp=2018-07-30T15:51:45.282834Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-30T15:51:45.283038Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-30T15:51:45.287696Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-30T15:51:55.443946Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-30T15:51:55.517514Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi22vvl" level=info timestamp=2018-07-30T15:51:55.520173Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-30T15:51:55.520642Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmi25vsv-fk8ph Pod phase: Running level=info timestamp=2018-07-30T15:48:24.087740Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-30T15:48:24.672921Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T15:48:24.690987Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 40e3a900-477f-414a-897d-ffae06d4c944" level=info timestamp=2018-07-30T15:48:24.692928Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T15:48:24.917624Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:25.744273Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 40e3a900-477f-414a-897d-ffae06d4c944: 176" level=info timestamp=2018-07-30T15:48:26.686933Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T15:48:26.722863Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:26.725194Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi25vsv kind= uid=eb3149ec-940f-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T15:48:26.729590Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi25vsv kind= uid=eb3149ec-940f-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:48:27.372913Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:27.373161Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T15:48:27.399364Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:27.407468Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:27.592632Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi25vsv kind= uid=eb3149ec-940f-11e8-82a2-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmi2sfx7-dmk7c Pod phase: Failed level=info timestamp=2018-07-30T15:48:33.147936Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmi2sfx7 kind= uid=eb248dc3-940f-11e8-82a2-525500d15501 msg="Domain defined." level=info timestamp=2018-07-30T15:48:33.723478Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T15:48:33.737410Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 38e32161-61c0-4429-a9ed-1f9501f9e8cd" level=info timestamp=2018-07-30T15:48:33.738126Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T15:48:33.745781Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:34.083978Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T15:48:34.105662Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:34.108149Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:34.128112Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T15:48:34.142254Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi2sfx7 kind= uid=eb248dc3-940f-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T15:48:34.145141Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2sfx7 kind= uid=eb248dc3-940f-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:48:34.146253Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:34.222731Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:34.775505Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 38e32161-61c0-4429-a9ed-1f9501f9e8cd: 177" level=info timestamp=2018-07-30T15:48:57.876699Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2sfx7 kind= uid=eb248dc3-940f-11e8-82a2-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmi6bt4k-rhm96 Pod phase: Running level=info timestamp=2018-07-30T15:48:24.177635Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-30T15:48:25.001813Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T15:48:25.021551Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 8977bb50-7e69-4dbc-b70e-fdd5db20eaca" level=info timestamp=2018-07-30T15:48:25.021823Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:25.022769Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T15:48:26.031020Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 8977bb50-7e69-4dbc-b70e-fdd5db20eaca: 177" level=info timestamp=2018-07-30T15:48:26.611334Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T15:48:26.654183Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:26.662734Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:26.673095Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi6bt4k kind= uid=eb294cc0-940f-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T15:48:26.673610Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T15:48:26.679797Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6bt4k kind= uid=eb294cc0-940f-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:48:26.693044Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:27.319749Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:27.517579Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6bt4k kind= uid=eb294cc0-940f-11e8-82a2-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmitgd9k-fmg84 Pod phase: Failed level=info timestamp=2018-07-30T15:48:33.165433Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmitgd9k kind= uid=eb1da33f-940f-11e8-82a2-525500d15501 msg="Domain defined." level=info timestamp=2018-07-30T15:48:33.876230Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T15:48:33.912258Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID c61d2226-92c9-4867-b89c-c696f2c4250c" level=info timestamp=2018-07-30T15:48:33.913561Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T15:48:33.981926Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:34.494981Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T15:48:34.545507Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:34.557436Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:34.557564Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T15:48:34.575933Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmitgd9k kind= uid=eb1da33f-940f-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T15:48:34.594976Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitgd9k kind= uid=eb1da33f-940f-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:48:34.603318Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:34.635633Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:34.928904Z pos=monitor.go:222 component=virt-launcher msg="Found PID for c61d2226-92c9-4867-b89c-c696f2c4250c: 180" level=info timestamp=2018-07-30T15:48:57.869316Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitgd9k kind= uid=eb1da33f-940f-11e8-82a2-525500d15501 msg="Synced vmi" • Failure [201.017 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom interface model /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:368 should expose the right device type to the guest [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:369 Expected error: : 180000000000 expect: timer expired after 180 seconds not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1102 ------------------------------ STEP: checking the device vendor in /sys/class level=info timestamp=2018-07-30T15:50:28.051697Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi22vvl kind=VirtualMachineInstance uid=46f4b824-9410-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi22vvl-vlx5t" level=info timestamp=2018-07-30T15:50:44.168839Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi22vvl kind=VirtualMachineInstance uid=46f4b824-9410-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi22vvl-vlx5t" level=info timestamp=2018-07-30T15:50:45.599385Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi22vvl kind=VirtualMachineInstance uid=46f4b824-9410-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T15:50:45.673305Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi22vvl kind=VirtualMachineInstance uid=46f4b824-9410-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." level=info timestamp=2018-07-30T15:53:45.883114Z pos=utils.go:1291 component=tests namespace=kubevirt-test-default name=testvmi22vvl kind=VirtualMachineInstance uid=46f4b824-9410-11e8-82a2-525500d15501 msg="Login: [{2 \r\nISOLINUX 6.04 6.04-pre1 Copyright (C) 1994-2015 H. Peter Anvin et al\r\nboot: \u001b[?7h\r\n []}]" Pod name: disks-images-provider-ctcjh Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-dqv4m Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8n6wn Pod phase: Running level=error timestamp=2018-07-30T15:54:03.262162Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Failed instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:54:03.262593Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmitgd9k/console proto=HTTP/1.1 statusCode=400 contentLength=86 level=error timestamp=2018-07-30T15:54:05.609319Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Failed instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:54:05.611158Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmitgd9k/console proto=HTTP/1.1 statusCode=400 contentLength=86 level=error timestamp=2018-07-30T15:54:08.041714Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Failed instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:54:08.042047Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmitgd9k/console proto=HTTP/1.1 statusCode=400 contentLength=86 level=error timestamp=2018-07-30T15:54:10.246563Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Failed instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:54:10.246823Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmitgd9k/console proto=HTTP/1.1 statusCode=400 contentLength=86 2018/07/30 15:54:13 http: TLS handshake error from 10.129.0.1:40992: EOF level=error timestamp=2018-07-30T15:54:14.811724Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Failed instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:54:14.812000Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmitgd9k/console proto=HTTP/1.1 statusCode=400 contentLength=86 level=error timestamp=2018-07-30T15:54:16.017989Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Failed instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:54:16.018185Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmitgd9k/console proto=HTTP/1.1 statusCode=400 contentLength=86 level=error timestamp=2018-07-30T15:54:17.216275Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Failed instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:54:17.217101Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmitgd9k/console proto=HTTP/1.1 statusCode=400 contentLength=86 Pod name: virt-api-7d79764579-g72bg Pod phase: Running level=error timestamp=2018-07-30T15:54:04.281544Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Failed instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:54:04.285002Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmitgd9k/console proto=HTTP/1.1 statusCode=400 contentLength=86 level=info timestamp=2018-07-30T15:54:04.428250Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-07-30T15:54:06.713092Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Failed instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:54:06.713284Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmitgd9k/console proto=HTTP/1.1 statusCode=400 contentLength=86 level=error timestamp=2018-07-30T15:54:08.932792Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Failed instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:54:08.932957Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmitgd9k/console proto=HTTP/1.1 statusCode=400 contentLength=86 level=error timestamp=2018-07-30T15:54:11.262436Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Failed instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:54:11.262680Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmitgd9k/console proto=HTTP/1.1 statusCode=400 contentLength=86 2018/07/30 15:54:12 http: TLS handshake error from 10.128.0.1:34594: EOF level=error timestamp=2018-07-30T15:54:12.345167Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Failed instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:54:12.345360Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmitgd9k/console proto=HTTP/1.1 statusCode=400 contentLength=86 level=error timestamp=2018-07-30T15:54:13.455140Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Failed instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-07-30T15:54:13.455596Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmitgd9k/console proto=HTTP/1.1 statusCode=400 contentLength=86 level=info timestamp=2018-07-30T15:54:14.674946Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-68vs2 Pod phase: Running level=info timestamp=2018-07-30T15:43:10.344661Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 level=info timestamp=2018-07-30T15:51:06.790737Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-07-30T15:51:06.793743Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-07-30T15:51:06.793824Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-07-30T15:51:06.793894Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-07-30T15:51:06.793939Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer limitrangeInformer" level=info timestamp=2018-07-30T15:51:06.793988Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-07-30T15:51:06.794036Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-07-30T15:51:06.794081Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-07-30T15:51:06.832459Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-07-30T15:51:06.844040Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-07-30T15:51:06.844361Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-07-30T15:51:06.844555Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-07-30T15:51:06.851359Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." Pod name: virt-controller-7d57d96b65-g5k4q Pod phase: Running level=info timestamp=2018-07-30T15:51:40.384681Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-c8dmh Pod phase: Running level=info timestamp=2018-07-30T15:51:29.812115Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:51:29.812347Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmitgd9k kind= uid=eb1da33f-940f-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T15:51:29.812477Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmitgd9k kind= uid=eb1da33f-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:51:29.835648Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi2sfx7 kind=VirtualMachineInstance uid=eb248dc3-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:51:29.857494Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi2sfx7, existing: true\n" level=info timestamp=2018-07-30T15:51:29.857600Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-30T15:51:29.857653Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:51:29.857807Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi2sfx7 kind= uid=eb248dc3-940f-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T15:51:29.857896Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi2sfx7 kind= uid=eb248dc3-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:51:29.873248Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi22vvl kind=VirtualMachineInstance uid=46f4b824-9410-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:51:29.873383Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi22vvl, existing: true\n" level=info timestamp=2018-07-30T15:51:29.873424Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-30T15:51:29.873470Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:51:29.873585Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmi22vvl kind= uid=46f4b824-9410-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T15:51:29.873660Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi22vvl kind= uid=46f4b824-9410-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-rzrl5 Pod phase: Running level=info timestamp=2018-07-30T15:48:27.404333Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-07-30T15:48:27.505824Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6bt4k kind= uid=eb294cc0-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:48:27.506263Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi6bt4k, existing: true\n" level=info timestamp=2018-07-30T15:48:27.509275Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-30T15:48:27.509594Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T15:48:27.509898Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T15:48:27.510222Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi6bt4k kind= uid=eb294cc0-940f-11e8-82a2-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T15:48:27.518976Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi6bt4k kind= uid=eb294cc0-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:48:27.537009Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi25vsv kind= uid=eb3149ec-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:48:27.538445Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi25vsv, existing: true\n" level=info timestamp=2018-07-30T15:48:27.538834Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-30T15:48:27.539664Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T15:48:27.543678Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T15:48:27.552315Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi25vsv kind= uid=eb3149ec-940f-11e8-82a2-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T15:48:27.595746Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi25vsv kind= uid=eb3149ec-940f-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." Pod name: netcatdlf86 Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.67 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcatgphzt Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.67 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcatq7r45 Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.67 1500 -i 1 -w 1 + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Hello World! succeeded Pod name: netcatrmqts Pod phase: Failed Unable to retrieve container logs for docker://4f357b0c6c677016330c1f3f651c0f251a604afb8814ad750677f148aada5013 Pod name: netcatt8p7n Pod phase: Succeeded ++ head -n 1 +++ nc myservice.kubevirt-test-default 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: netcattj2q9 Pod phase: Succeeded Unable to retrieve container logs for docker://36a19b8cdfaadb5a38b7b74487c1bf2902e49d9523234550f716325b998c9029 Pod name: netcatxb6z9 Pod phase: Succeeded ++ head -n 1 +++ nc 10.129.0.67 1500 -i 1 -w 1 Hello World! succeeded + x='Hello World!' + echo 'Hello World!' + '[' 'Hello World!' = 'Hello World!' ']' + echo succeeded + exit 0 Pod name: virt-launcher-testvmi22vvl-vlx5t Pod phase: Running level=info timestamp=2018-07-30T15:51:45.282834Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-30T15:51:45.283038Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-30T15:51:45.287696Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-30T15:51:55.443946Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-30T15:51:55.517514Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi22vvl" level=info timestamp=2018-07-30T15:51:55.520173Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-30T15:51:55.520642Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmi25vsv-fk8ph Pod phase: Running level=info timestamp=2018-07-30T15:48:24.087740Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-30T15:48:24.672921Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T15:48:24.690987Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 40e3a900-477f-414a-897d-ffae06d4c944" level=info timestamp=2018-07-30T15:48:24.692928Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T15:48:24.917624Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:25.744273Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 40e3a900-477f-414a-897d-ffae06d4c944: 176" level=info timestamp=2018-07-30T15:48:26.686933Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T15:48:26.722863Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:26.725194Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi25vsv kind= uid=eb3149ec-940f-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T15:48:26.729590Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi25vsv kind= uid=eb3149ec-940f-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:48:27.372913Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:27.373161Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T15:48:27.399364Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:27.407468Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:27.592632Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi25vsv kind= uid=eb3149ec-940f-11e8-82a2-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmi2sfx7-dmk7c Pod phase: Failed level=info timestamp=2018-07-30T15:48:33.147936Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmi2sfx7 kind= uid=eb248dc3-940f-11e8-82a2-525500d15501 msg="Domain defined." level=info timestamp=2018-07-30T15:48:33.723478Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T15:48:33.737410Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 38e32161-61c0-4429-a9ed-1f9501f9e8cd" level=info timestamp=2018-07-30T15:48:33.738126Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T15:48:33.745781Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:34.083978Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T15:48:34.105662Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:34.108149Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:34.128112Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T15:48:34.142254Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi2sfx7 kind= uid=eb248dc3-940f-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T15:48:34.145141Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2sfx7 kind= uid=eb248dc3-940f-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:48:34.146253Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:34.222731Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:34.775505Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 38e32161-61c0-4429-a9ed-1f9501f9e8cd: 177" level=info timestamp=2018-07-30T15:48:57.876699Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi2sfx7 kind= uid=eb248dc3-940f-11e8-82a2-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmi6bt4k-rhm96 Pod phase: Running level=info timestamp=2018-07-30T15:48:24.177635Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 0 received" level=info timestamp=2018-07-30T15:48:25.001813Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T15:48:25.021551Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 8977bb50-7e69-4dbc-b70e-fdd5db20eaca" level=info timestamp=2018-07-30T15:48:25.021823Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:25.022769Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T15:48:26.031020Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 8977bb50-7e69-4dbc-b70e-fdd5db20eaca: 177" level=info timestamp=2018-07-30T15:48:26.611334Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T15:48:26.654183Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:26.662734Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:26.673095Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi6bt4k kind= uid=eb294cc0-940f-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T15:48:26.673610Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T15:48:26.679797Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6bt4k kind= uid=eb294cc0-940f-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:48:26.693044Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:27.319749Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:27.517579Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi6bt4k kind= uid=eb294cc0-940f-11e8-82a2-525500d15501 msg="Synced vmi" Pod name: virt-launcher-testvmitgd9k-fmg84 Pod phase: Failed level=info timestamp=2018-07-30T15:48:33.165433Z pos=manager.go:158 component=virt-launcher namespace=kubevirt-test-default name=testvmitgd9k kind= uid=eb1da33f-940f-11e8-82a2-525500d15501 msg="Domain defined." level=info timestamp=2018-07-30T15:48:33.876230Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T15:48:33.912258Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID c61d2226-92c9-4867-b89c-c696f2c4250c" level=info timestamp=2018-07-30T15:48:33.913561Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T15:48:33.981926Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:34.494981Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T15:48:34.545507Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:34.557436Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:34.557564Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T15:48:34.575933Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmitgd9k kind= uid=eb1da33f-940f-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T15:48:34.594976Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitgd9k kind= uid=eb1da33f-940f-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T15:48:34.603318Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T15:48:34.635633Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T15:48:34.928904Z pos=monitor.go:222 component=virt-launcher msg="Found PID for c61d2226-92c9-4867-b89c-c696f2c4250c: 180" level=info timestamp=2018-07-30T15:48:57.869316Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmitgd9k kind= uid=eb1da33f-940f-11e8-82a2-525500d15501 msg="Synced vmi" • Failure [32.447 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with default interface model /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:382 should expose the right device type to the guest [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:383 Expected error: <*errors.errorString | 0xc4206690e0>: { s: "Timeout trying to connect to the virtual machine instance", } Timeout trying to connect to the virtual machine instance not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:365 ------------------------------ STEP: checking the device vendor in /sys/class 2018/07/30 11:54:48 read closing down: EOF • [SLOW TEST:28.632 seconds] 2018/07/30 11:54:49 read closing down: EOF Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:402 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:403 ------------------------------ 2018/07/30 11:55:18 read closing down: EOF • [SLOW TEST:29.903 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 2018/07/30 11:55:19 read closing down: EOF VirtualMachineInstance with custom MAC address in non-conventional format /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:414 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:415 ------------------------------ 2018/07/30 11:55:50 read closing down: EOF • [SLOW TEST:32.104 seconds] Networking 2018/07/30 11:55:51 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address and slirp interface /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:427 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:428 ------------------------------ 2018/07/30 11:56:45 read closing down: EOF • [SLOW TEST:54.884 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with disabled automatic attachment of interfaces /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:440 should not configure any external interfaces /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:441 2018/07/30 11:56:46 read closing down: EOF ------------------------------ • [SLOW TEST:23.489 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should successfully start with hook sidecar annotation /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:60 ------------------------------ • [SLOW TEST:18.978 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should call Collect and OnDefineDomain on the hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:67 ------------------------------ • [SLOW TEST:21.332 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should update domain XML with SM BIOS properties /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:83 ------------------------------ • [SLOW TEST:17.920 seconds] VNC /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:54 with VNC connection /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:62 should allow accessing the VNC device /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:64 ------------------------------ ••• ------------------------------ • [SLOW TEST:33.042 seconds] Storage 2018/07/30 11:58:42 read closing down: EOF /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/07/30 11:59:15 read closing down: EOF • [SLOW TEST:33.070 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ Pod name: disks-images-provider-ctcjh Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-dqv4m Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8n6wn Pod phase: Running 2018/07/30 15:59:43 http: TLS handshake error from 10.129.0.1:41204: EOF 2018/07/30 15:59:53 http: TLS handshake error from 10.129.0.1:41210: EOF 2018/07/30 16:00:03 http: TLS handshake error from 10.129.0.1:41216: EOF 2018/07/30 16:00:13 http: TLS handshake error from 10.129.0.1:41224: EOF 2018/07/30 16:00:23 http: TLS handshake error from 10.129.0.1:41228: EOF 2018/07/30 16:00:33 http: TLS handshake error from 10.129.0.1:41238: EOF 2018/07/30 16:00:43 http: TLS handshake error from 10.129.0.1:41242: EOF 2018/07/30 16:00:53 http: TLS handshake error from 10.129.0.1:41248: EOF level=info timestamp=2018-07-30T16:01:00.160504Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 16:01:03 http: TLS handshake error from 10.129.0.1:41256: EOF 2018/07/30 16:01:13 http: TLS handshake error from 10.129.0.1:41260: EOF 2018/07/30 16:01:23 http: TLS handshake error from 10.129.0.1:41266: EOF 2018/07/30 16:01:33 http: TLS handshake error from 10.129.0.1:41272: EOF 2018/07/30 16:01:43 http: TLS handshake error from 10.129.0.1:41278: EOF 2018/07/30 16:01:53 http: TLS handshake error from 10.129.0.1:41284: EOF Pod name: virt-api-7d79764579-g72bg Pod phase: Running level=info timestamp=2018-07-30T16:01:27.493121Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:01:30.080954Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 16:01:32 http: TLS handshake error from 10.128.0.1:36984: EOF level=info timestamp=2018-07-30T16:01:35.822233Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:01:37.688655Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:01:37.689636Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:01:37.705396Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:01:42 http: TLS handshake error from 10.128.0.1:37034: EOF level=info timestamp=2018-07-30T16:01:47.799967Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:01:48.160106Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:01:48.175077Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:01:48.188044Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:01:52 http: TLS handshake error from 10.128.0.1:37086: EOF level=info timestamp=2018-07-30T16:01:57.951638Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:02:00.010872Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-68vs2 Pod phase: Running level=info timestamp=2018-07-30T15:58:08.224085Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidrtpk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidrtpk" level=info timestamp=2018-07-30T15:58:08.291209Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidrtpk\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmidrtpk, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5a100dc7-9411-11e8-82a2-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidrtpk" level=info timestamp=2018-07-30T15:58:08.417740Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7qjn9 kind= uid=5a3b4336-9411-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:58:08.417901Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7qjn9 kind= uid=5a3b4336-9411-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T15:58:08.565237Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7qjn9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7qjn9" level=info timestamp=2018-07-30T15:58:41.637153Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7zhxp kind= uid=6e06e582-9411-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:58:41.638840Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7zhxp kind= uid=6e06e582-9411-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T15:58:41.805136Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7zhxp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7zhxp" level=info timestamp=2018-07-30T15:58:41.997131Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7zhxp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7zhxp" level=info timestamp=2018-07-30T15:59:14.965817Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqfmr6 kind= uid=81e29b14-9411-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:59:14.976455Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqfmr6 kind= uid=81e29b14-9411-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:00:29.522922Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqfmr6 kind= uid=ae51d64d-9411-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:00:29.523658Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqfmr6 kind= uid=ae51d64d-9411-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:00:29.839350Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqfmr6\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqfmr6" level=info timestamp=2018-07-30T16:00:29.870104Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqfmr6\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqfmr6" Pod name: virt-controller-7d57d96b65-g5k4q Pod phase: Running level=info timestamp=2018-07-30T15:51:40.384681Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-c8dmh Pod phase: Running level=info timestamp=2018-07-30T15:58:55.537096Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:58:55.537201Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi25hd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:58:55.537738Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi25hd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:58:55.538247Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi25hd6, existing: false\n" level=info timestamp=2018-07-30T15:58:55.538371Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:58:55.538498Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi25hd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:58:55.538679Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi25hd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:58:58.647578Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi25hd6, existing: false\n" level=info timestamp=2018-07-30T15:58:58.648545Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:58:58.649100Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi25hd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:58:58.649490Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi25hd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:59:00.754859Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmifr9fm, existing: false\n" level=info timestamp=2018-07-30T15:59:00.755286Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:59:00.755863Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmifr9fm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:59:00.756148Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmifr9fm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-rzrl5 Pod phase: Running level=info timestamp=2018-07-30T16:00:28.784344Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiqfmr6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:00:44.633132Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiqfmr6, existing: false\n" level=info timestamp=2018-07-30T16:00:44.633653Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:00:44.635645Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiqfmr6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:00:44.636239Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiqfmr6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:00:45.667310Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiqfmr6, existing: true\n" level=info timestamp=2018-07-30T16:00:45.667520Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-30T16:00:45.667600Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:00:45.667938Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmiqfmr6 kind= uid=ae51d64d-9411-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T16:00:45.705409Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiqfmr6 kind= uid=ae51d64d-9411-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:00:45.705532Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiqfmr6, existing: true\n" level=info timestamp=2018-07-30T16:00:45.705557Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-30T16:00:45.705583Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:00:45.705667Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmiqfmr6 kind= uid=ae51d64d-9411-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T16:00:45.708051Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiqfmr6 kind= uid=ae51d64d-9411-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiqfmr6-pppqt Pod phase: Running level=info timestamp=2018-07-30T16:00:34.460101Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-30T16:00:34.461063Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-30T16:00:34.467459Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-30T16:00:44.478540Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-30T16:00:44.567994Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiqfmr6" level=info timestamp=2018-07-30T16:00:44.570372Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-30T16:00:44.571400Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [166.134 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 90.007s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1065 ------------------------------ STEP: Starting and stopping the VirtualMachineInstance number of times STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-07-30T15:59:16.165369Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiqfmr6 kind=VirtualMachineInstance uid=81e29b14-9411-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiqfmr6-c6ht8" level=info timestamp=2018-07-30T15:59:32.650832Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiqfmr6 kind=VirtualMachineInstance uid=81e29b14-9411-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmiqfmr6-c6ht8" level=info timestamp=2018-07-30T15:59:34.071807Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiqfmr6 kind=VirtualMachineInstance uid=81e29b14-9411-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T15:59:34.095577Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiqfmr6 kind=VirtualMachineInstance uid=81e29b14-9411-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-07-30T16:00:30.627394Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiqfmr6 kind=VirtualMachineInstance uid=81e29b14-9411-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiqfmr6-c6ht8" level=info timestamp=2018-07-30T16:00:30.627581Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiqfmr6 kind=VirtualMachineInstance uid=81e29b14-9411-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmiqfmr6-c6ht8" level=info timestamp=2018-07-30T16:00:30.628263Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiqfmr6 kind=VirtualMachineInstance uid=81e29b14-9411-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T16:00:30.628731Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiqfmr6 kind=VirtualMachineInstance uid=81e29b14-9411-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." Pod name: disks-images-provider-ctcjh Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-dqv4m Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8n6wn Pod phase: Running 2018/07/30 16:02:33 http: TLS handshake error from 10.129.0.1:41308: EOF 2018/07/30 16:02:43 http: TLS handshake error from 10.129.0.1:41314: EOF 2018/07/30 16:02:53 http: TLS handshake error from 10.129.0.1:41322: EOF level=info timestamp=2018-07-30T16:03:00.142582Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 16:03:03 http: TLS handshake error from 10.129.0.1:41326: EOF 2018/07/30 16:03:13 http: TLS handshake error from 10.129.0.1:41332: EOF 2018/07/30 16:03:23 http: TLS handshake error from 10.129.0.1:41340: EOF 2018/07/30 16:03:33 http: TLS handshake error from 10.129.0.1:41348: EOF 2018/07/30 16:03:43 http: TLS handshake error from 10.129.0.1:41352: EOF 2018/07/30 16:03:53 http: TLS handshake error from 10.129.0.1:41358: EOF 2018/07/30 16:04:03 http: TLS handshake error from 10.129.0.1:41366: EOF 2018/07/30 16:04:13 http: TLS handshake error from 10.129.0.1:41372: EOF 2018/07/30 16:04:23 http: TLS handshake error from 10.129.0.1:41378: EOF 2018/07/30 16:04:33 http: TLS handshake error from 10.129.0.1:41382: EOF 2018/07/30 16:04:43 http: TLS handshake error from 10.129.0.1:41388: EOF Pod name: virt-api-7d79764579-g72bg Pod phase: Running level=info timestamp=2018-07-30T16:04:07.265678Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:04:10.020424Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:04:10.029958Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:04:11.474010Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:04:12 http: TLS handshake error from 10.128.0.1:37810: EOF level=info timestamp=2018-07-30T16:04:21.753988Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:04:22 http: TLS handshake error from 10.128.0.1:37860: EOF level=info timestamp=2018-07-30T16:04:30.022384Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T16:04:32.000745Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:04:32 http: TLS handshake error from 10.128.0.1:37914: EOF level=info timestamp=2018-07-30T16:04:37.445151Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:04:40.304341Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:04:40.304784Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:04:42 http: TLS handshake error from 10.128.0.1:37964: EOF level=info timestamp=2018-07-30T16:04:42.085373Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-68vs2 Pod phase: Running level=info timestamp=2018-07-30T15:58:08.565237Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7qjn9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7qjn9" level=info timestamp=2018-07-30T15:58:41.637153Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7zhxp kind= uid=6e06e582-9411-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:58:41.638840Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7zhxp kind= uid=6e06e582-9411-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T15:58:41.805136Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7zhxp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7zhxp" level=info timestamp=2018-07-30T15:58:41.997131Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi7zhxp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi7zhxp" level=info timestamp=2018-07-30T15:59:14.965817Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqfmr6 kind= uid=81e29b14-9411-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T15:59:14.976455Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqfmr6 kind= uid=81e29b14-9411-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:00:29.522922Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqfmr6 kind= uid=ae51d64d-9411-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:00:29.523658Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqfmr6 kind= uid=ae51d64d-9411-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:00:29.839350Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqfmr6\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqfmr6" level=info timestamp=2018-07-30T16:00:29.870104Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqfmr6\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqfmr6" level=info timestamp=2018-07-30T16:02:00.674946Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilwmwg kind= uid=e4ab50bc-9411-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:02:00.677166Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilwmwg kind= uid=e4ab50bc-9411-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:03:14.741918Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilwmwg kind= uid=10cdc114-9412-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:03:14.743434Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilwmwg kind= uid=10cdc114-9412-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-g5k4q Pod phase: Running level=info timestamp=2018-07-30T15:51:40.384681Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-c8dmh Pod phase: Running level=info timestamp=2018-07-30T15:58:55.537096Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:58:55.537201Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi25hd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:58:55.537738Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi25hd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:58:55.538247Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi25hd6, existing: false\n" level=info timestamp=2018-07-30T15:58:55.538371Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:58:55.538498Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi25hd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:58:55.538679Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi25hd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:58:58.647578Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi25hd6, existing: false\n" level=info timestamp=2018-07-30T15:58:58.648545Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:58:58.649100Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmi25hd6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:58:58.649490Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi25hd6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T15:59:00.754859Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmifr9fm, existing: false\n" level=info timestamp=2018-07-30T15:59:00.755286Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T15:59:00.755863Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmifr9fm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T15:59:00.756148Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmifr9fm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-rzrl5 Pod phase: Running level=info timestamp=2018-07-30T16:03:13.691642Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilwmwg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:03:30.147505Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilwmwg, existing: false\n" level=info timestamp=2018-07-30T16:03:30.165638Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:03:30.169575Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmilwmwg kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:03:30.170414Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilwmwg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:03:30.644267Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilwmwg, existing: true\n" level=info timestamp=2018-07-30T16:03:30.644434Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-07-30T16:03:30.644465Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:03:30.644647Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmilwmwg kind= uid=10cdc114-9412-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T16:03:30.710341Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilwmwg kind= uid=10cdc114-9412-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:03:30.711776Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmilwmwg, existing: true\n" level=info timestamp=2018-07-30T16:03:30.712624Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-30T16:03:30.716306Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:03:30.716802Z pos=vm.go:419 component=virt-handler namespace=kubevirt-test-default name=testvmilwmwg kind= uid=10cdc114-9412-11e8-82a2-525500d15501 msg="No update processing required" level=info timestamp=2018-07-30T16:03:30.717134Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmilwmwg kind= uid=10cdc114-9412-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmilwmwg-rvt76 Pod phase: Running level=info timestamp=2018-07-30T16:03:20.003148Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-07-30T16:03:20.003681Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-07-30T16:03:20.008128Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-07-30T16:03:30.015823Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-07-30T16:03:30.080785Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmilwmwg" level=info timestamp=2018-07-30T16:03:30.082487Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-07-30T16:03:30.082996Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [165.209 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Timed out after 90.025s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1065 ------------------------------ STEP: Starting and stopping the VirtualMachineInstance number of times STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-07-30T16:02:01.949257Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmilwmwg kind=VirtualMachineInstance uid=e4ab50bc-9411-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmilwmwg-6d44j" level=info timestamp=2018-07-30T16:02:17.501432Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmilwmwg kind=VirtualMachineInstance uid=e4ab50bc-9411-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmilwmwg-6d44j" level=info timestamp=2018-07-30T16:02:18.937892Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmilwmwg kind=VirtualMachineInstance uid=e4ab50bc-9411-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T16:02:19.000221Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmilwmwg kind=VirtualMachineInstance uid=e4ab50bc-9411-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-07-30T16:03:15.801872Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmilwmwg kind=VirtualMachineInstance uid=e4ab50bc-9411-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmilwmwg-6d44j" level=info timestamp=2018-07-30T16:03:15.802067Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmilwmwg kind=VirtualMachineInstance uid=e4ab50bc-9411-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmilwmwg-6d44j" level=info timestamp=2018-07-30T16:03:15.802816Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmilwmwg kind=VirtualMachineInstance uid=e4ab50bc-9411-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T16:03:15.803009Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmilwmwg kind=VirtualMachineInstance uid=e4ab50bc-9411-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." 2018/07/30 12:05:25 read closing down: EOF • [SLOW TEST:39.242 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:113 should create a writeable emptyDisk with the right capacity /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:115 ------------------------------ • [SLOW TEST:40.455 seconds] 2018/07/30 12:06:06 read closing down: EOF Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined and a specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:163 should create a writeable emptyDisk with the specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:165 ------------------------------ • [SLOW TEST:33.084 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 2018/07/30 12:06:39 read closing down: EOF With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should be successfully started /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:207 ------------------------------ 2018/07/30 12:08:33 read closing down: EOF • [SLOW TEST:114.430 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 2018/07/30 12:08:33 read closing down: EOF Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should not persist data /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:218 ------------------------------ 2018/07/30 12:12:45 read closing down: EOF • [SLOW TEST:251.990 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With VirtualMachineInstance with two PVCs /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:266 should start vmi multiple times /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:278 ------------------------------ • ------------------------------ • [SLOW TEST:18.499 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should start it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:80 ------------------------------ • [SLOW TEST:19.048 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 should attach virt-launcher to it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:86 ------------------------------ ••••2018/07/30 12:13:58 read closing down: EOF ------------------------------ • [SLOW TEST:33.960 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:174 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Alpine as first boot /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ 2018/07/30 12:14:22 read closing down: EOF • [SLOW TEST:24.058 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:174 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Cirros as first boot /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:17.413 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:205 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:206 should retry starting the VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:207 ------------------------------ • [SLOW TEST:20.259 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:205 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:206 should log warning and proceed once the secret is there /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:237 ------------------------------ • [SLOW TEST:40.642 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-launcher crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:285 should be stopped and have Failed phase /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:286 ------------------------------ Pod name: disks-images-provider-ctcjh Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-dqv4m Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8n6wn Pod phase: Running 2018/07/30 16:14:53 http: TLS handshake error from 10.129.0.1:41768: EOF 2018/07/30 16:15:03 http: TLS handshake error from 10.129.0.1:41776: EOF 2018/07/30 16:15:13 http: TLS handshake error from 10.129.0.1:41782: EOF 2018/07/30 16:15:23 http: TLS handshake error from 10.129.0.1:41786: EOF 2018/07/30 16:15:33 http: TLS handshake error from 10.129.0.1:41796: EOF 2018/07/30 16:15:43 http: TLS handshake error from 10.129.0.1:41800: EOF 2018/07/30 16:15:53 http: TLS handshake error from 10.129.0.1:41808: EOF 2018/07/30 16:16:03 http: TLS handshake error from 10.129.0.1:41814: EOF 2018/07/30 16:16:13 http: TLS handshake error from 10.129.0.1:41820: EOF 2018/07/30 16:16:23 http: TLS handshake error from 10.129.0.1:41826: EOF level=info timestamp=2018-07-30T16:16:30.184001Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 16:16:33 http: TLS handshake error from 10.129.0.1:41834: EOF 2018/07/30 16:16:43 http: TLS handshake error from 10.129.0.1:41840: EOF 2018/07/30 16:16:53 http: TLS handshake error from 10.129.0.1:41846: EOF 2018/07/30 16:17:03 http: TLS handshake error from 10.129.0.1:41852: EOF Pod name: virt-api-7d79764579-g72bg Pod phase: Running level=info timestamp=2018-07-30T16:16:29.682561Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:16:32 http: TLS handshake error from 10.128.0.1:41688: EOF level=info timestamp=2018-07-30T16:16:40.031227Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:16:42 http: TLS handshake error from 10.128.0.1:41738: EOF level=info timestamp=2018-07-30T16:16:43.640851Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:16:49.868440Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:16:49.899336Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:16:50.116187Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:16:52 http: TLS handshake error from 10.128.0.1:41788: EOF level=info timestamp=2018-07-30T16:16:54.744765Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:16:54.797340Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:16:54.816750Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:17:00.152637Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T16:17:00.510436Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:17:02 http: TLS handshake error from 10.128.0.1:41844: EOF Pod name: virt-controller-7d57d96b65-68vs2 Pod phase: Running level=info timestamp=2018-07-30T16:13:24.566881Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi76kq2 kind= uid=7c4d27bb-9413-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:13:24.567575Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi76kq2 kind= uid=7c4d27bb-9413-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:13:24.867016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi76kq2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi76kq2" level=info timestamp=2018-07-30T16:13:58.421869Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimdc8f kind= uid=907b5534-9413-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:13:58.424574Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimdc8f kind= uid=907b5534-9413-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:14:22.490390Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi887cs kind= uid=9ed28fd0-9413-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:14:22.491999Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi887cs kind= uid=9ed28fd0-9413-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:14:22.851846Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi887cs\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi887cs" level=info timestamp=2018-07-30T16:14:40.042942Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirxjzr kind= uid=a945dca2-9413-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:14:40.046275Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirxjzr kind= uid=a945dca2-9413-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:14:40.606349Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirxjzr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirxjzr" level=info timestamp=2018-07-30T16:15:00.566900Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis79gp kind= uid=b58370d3-9413-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:15:00.567141Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis79gp kind= uid=b58370d3-9413-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:15:40.802000Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi755lk kind= uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:15:40.802865Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi755lk kind= uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-g5k4q Pod phase: Running level=info timestamp=2018-07-30T15:51:40.384681Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-c8dmh Pod phase: Running level=info timestamp=2018-07-30T16:16:05.571578Z pos=cache.go:121 component=virt-handler msg="List domains from sock /var/run/kubevirt/sockets/kubevirt-test-default_testvmi22vvl_sock" level=error timestamp=2018-07-30T16:16:05.572063Z pos=cache.go:124 component=virt-handler reason="dial unix /var/run/kubevirt/sockets/kubevirt-test-default_testvmi22vvl_sock: connect: connection refused" msg="failed to connect to cmd client socket" level=info timestamp=2018-07-30T16:16:05.572384Z pos=cache.go:121 component=virt-handler msg="List domains from sock /var/run/kubevirt/sockets/kubevirt-test-default_testvmi755lk_sock" level=info timestamp=2018-07-30T16:16:05.610323Z pos=cache.go:121 component=virt-handler msg="List domains from sock /var/run/kubevirt/sockets/kubevirt-test-default_testvmi887cs_sock" level=error timestamp=2018-07-30T16:16:05.611051Z pos=cache.go:124 component=virt-handler reason="dial unix /var/run/kubevirt/sockets/kubevirt-test-default_testvmi887cs_sock: connect: connection refused" msg="failed to connect to cmd client socket" level=info timestamp=2018-07-30T16:16:05.611683Z pos=vm.go:725 component=virt-handler namespace=kubevirt-test-default name=testvmi755lk kind=Domain uid= msg="Domain is in state Running reason Unknown" level=info timestamp=2018-07-30T16:16:05.672093Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-07-30T16:16:05.715386Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-07-30T16:16:05.836580Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" level=info timestamp=2018-07-30T16:16:06.073752Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi755lk, existing: true\n" level=info timestamp=2018-07-30T16:16:06.073875Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-30T16:16:06.073906Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T16:16:06.073926Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T16:16:06.074061Z pos=vm.go:416 component=virt-handler namespace=kubevirt-test-default name=testvmi755lk kind=VirtualMachineInstance uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="Processing vmi update" level=info timestamp=2018-07-30T16:16:06.385344Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmi755lk kind=VirtualMachineInstance uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-rzrl5 Pod phase: Running level=error timestamp=2018-07-30T16:15:40.630339Z pos=vm.go:424 component=virt-handler namespace=kubevirt-test-default name=testvmirxjzr kind=VirtualMachineInstance uid= reason="connection is shut down" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-07-30T16:15:40.630404Z pos=vm.go:251 component=virt-handler reason="connection is shut down" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirxjzr" level=info timestamp=2018-07-30T16:15:43.600184Z pos=vm.go:746 component=virt-handler namespace=kubevirt-test-default name=testvmirxjzr kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-07-30T16:15:43.602471Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirxjzr, existing: false\n" level=info timestamp=2018-07-30T16:15:43.603381Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:15:43.604217Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirxjzr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:15:43.605514Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmirxjzr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:15:43.608391Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirxjzr, existing: false\n" level=info timestamp=2018-07-30T16:15:43.609161Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:15:43.609392Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirxjzr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:15:43.609585Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmirxjzr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:15:50.871902Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmirxjzr, existing: false\n" level=info timestamp=2018-07-30T16:15:50.872620Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:15:50.873331Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmirxjzr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:15:50.875231Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmirxjzr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi755lk-58ssn Pod phase: Running level=info timestamp=2018-07-30T16:15:58.467257Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID f2818a2c-afce-4e50-893e-4c0f35041abc" level=info timestamp=2018-07-30T16:15:58.471986Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T16:15:58.475111Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T16:15:58.497652Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T16:15:59.052463Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T16:15:59.077579Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T16:15:59.091320Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmi755lk kind= uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T16:15:59.093229Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi755lk kind= uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T16:15:59.114420Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T16:15:59.114600Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T16:15:59.146915Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T16:15:59.158104Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T16:15:59.243875Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi755lk kind= uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T16:15:59.484697Z pos=monitor.go:222 component=virt-launcher msg="Found PID for f2818a2c-afce-4e50-893e-4c0f35041abc: 160" level=info timestamp=2018-07-30T16:16:06.224435Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi755lk kind=VirtualMachineInstance uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="Synced vmi" Pod name: vmi-killerd5kcb Pod phase: Succeeded Pod name: vmi-killers8sft Pod phase: Succeeded • Failure [84.443 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:309 should recover and continue management [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:310 Expected : Running to equal : Failed /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:336 ------------------------------ level=info timestamp=2018-07-30T16:15:41.919041Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi755lk kind=VirtualMachineInstance uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi755lk-58ssn" level=info timestamp=2018-07-30T16:15:57.834973Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi755lk kind=VirtualMachineInstance uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi755lk-58ssn" level=info timestamp=2018-07-30T16:15:59.771846Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi755lk kind=VirtualMachineInstance uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T16:15:59.824077Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi755lk kind=VirtualMachineInstance uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." STEP: Crashing the virt-handler STEP: Killing the VirtualMachineInstance level=info timestamp=2018-07-30T16:16:05.187000Z pos=utils.go:256 component=tests namespace=kubevirt-test-default name=testvmi755lk kind=VirtualMachineInstance uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi755lk-58ssn" level=info timestamp=2018-07-30T16:16:05.187206Z pos=utils.go:256 component=tests namespace=kubevirt-test-default name=testvmi755lk kind=VirtualMachineInstance uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi755lk-58ssn" level=info timestamp=2018-07-30T16:16:05.187958Z pos=utils.go:256 component=tests namespace=kubevirt-test-default name=testvmi755lk kind=VirtualMachineInstance uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T16:16:05.188204Z pos=utils.go:256 component=tests namespace=kubevirt-test-default name=testvmi755lk kind=VirtualMachineInstance uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." level=info timestamp=2018-07-30T16:16:07.069891Z pos=utils.go:256 component=tests namespace=kubevirt-test-default name=testvmi755lk kind=VirtualMachineInstance uid=cd80ce4b-9413-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." STEP: Checking that VirtualMachineInstance has 'Failed' phase • [SLOW TEST:47.146 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler is responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:340 should indicate that a node is ready for vmis /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:341 ------------------------------ • [SLOW TEST:77.584 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 when virt-handler is not responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:371 the node controller should react /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:410 ------------------------------ • [SLOW TEST:23.378 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with node tainted /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:463 the vmi with tolerations should be scheduled /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:485 ------------------------------ • ------------------------------ • [SLOW TEST:69.865 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:535 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-default /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:75.128 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:535 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-alternative /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.161 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should enable emulation in virt-launcher [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:604 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.124 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should be reflected in domain XML [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:641 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.205 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:74 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:592 should request a TUN device but not KVM [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:685 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:600 ------------------------------ •••• ------------------------------ • [SLOW TEST:65.954 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance's Pod /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:837 should result in the VirtualMachineInstance moving to a finalized state /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:838 ------------------------------ • [SLOW TEST:51.912 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with an active pod. /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:870 should result in pod being terminated /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:871 ------------------------------ 2018/07/30 12:24:42 read closing down: EOF Pod name: disks-images-provider-ctcjh Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-dqv4m Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8n6wn Pod phase: Running 2018/07/30 16:23:33 http: TLS handshake error from 10.129.0.1:42090: EOF 2018/07/30 16:23:43 http: TLS handshake error from 10.129.0.1:42098: EOF 2018/07/30 16:23:53 http: TLS handshake error from 10.129.0.1:42104: EOF level=info timestamp=2018-07-30T16:24:00.440684Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 16:24:03 http: TLS handshake error from 10.129.0.1:42108: EOF 2018/07/30 16:24:13 http: TLS handshake error from 10.129.0.1:42116: EOF level=info timestamp=2018-07-30T16:24:20.517748Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" 2018/07/30 16:24:23 http: TLS handshake error from 10.129.0.1:42124: EOF level=info timestamp=2018-07-30T16:24:30.648354Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/07/30 16:24:33 http: TLS handshake error from 10.129.0.1:42130: EOF level=error timestamp=2018-07-30T16:24:42.447450Z pos=subresource.go:85 component=virt-api msg= 2018/07/30 16:24:42 http: response.WriteHeader on hijacked connection level=error timestamp=2018-07-30T16:24:42.450220Z pos=subresource.go:97 component=virt-api reason="read tcp 10.129.0.79:8443->10.128.0.1:36902: use of closed network connection" msg="error ecountered reading from websocket stream" level=info timestamp=2018-07-30T16:24:42.451057Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmicjhmz/console proto=HTTP/1.1 statusCode=200 contentLength=0 2018/07/30 16:24:43 http: TLS handshake error from 10.129.0.1:42136: EOF Pod name: virt-api-7d79764579-g72bg Pod phase: Running 2018/07/30 16:24:02 http: TLS handshake error from 10.128.0.1:44026: EOF level=info timestamp=2018-07-30T16:24:09.831203Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:24:12 http: TLS handshake error from 10.128.0.1:44076: EOF level=info timestamp=2018-07-30T16:24:18.076629Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:24:20.140536Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:24:22 http: TLS handshake error from 10.128.0.1:44132: EOF level=info timestamp=2018-07-30T16:24:26.367927Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:24:26.440784Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:24:30.607917Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:24:32 http: TLS handshake error from 10.128.0.1:44186: EOF level=info timestamp=2018-07-30T16:24:35.938262Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:24:35.967375Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:24:35.991216Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:24:40.715635Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:24:42 http: TLS handshake error from 10.128.0.1:44236: EOF Pod name: virt-controller-7d57d96b65-68vs2 Pod phase: Running level=info timestamp=2018-07-30T16:22:02.361011Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8gc94 kind= uid=b0ed67e7-9414-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:22:02.361516Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8gc94 kind= uid=b0ed67e7-9414-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:22:02.490186Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8gc94\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8gc94" level=info timestamp=2018-07-30T16:22:02.556797Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8gc94\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8gc94" level=info timestamp=2018-07-30T16:22:02.589777Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8gc94\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8gc94, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b0ed67e7-9414-11e8-82a2-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8gc94" level=info timestamp=2018-07-30T16:22:03.206475Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikqglk kind= uid=b16f1afa-9414-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:22:03.206956Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikqglk kind= uid=b16f1afa-9414-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:22:03.358140Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikqglk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikqglk" level=info timestamp=2018-07-30T16:22:03.400370Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikqglk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikqglk" level=info timestamp=2018-07-30T16:23:09.265436Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiknzs2 kind= uid=d8cd12a2-9414-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:23:09.270131Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiknzs2 kind= uid=d8cd12a2-9414-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:23:09.925114Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiknzs2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiknzs2" level=info timestamp=2018-07-30T16:24:01.312043Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicjhmz kind= uid=f7d2e556-9414-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:24:01.314379Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicjhmz kind= uid=f7d2e556-9414-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:24:01.534094Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicjhmz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicjhmz" Pod name: virt-controller-7d57d96b65-g5k4q Pod phase: Running level=info timestamp=2018-07-30T15:51:40.384681Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-84297 Pod phase: Running level=info timestamp=2018-07-30T16:21:59.555205Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind= uid=83a06718-9414-11e8-82a2-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:21:59.561178Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind= uid=83a06718-9414-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:21:59.561853Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmigjfdc, existing: true\n" level=info timestamp=2018-07-30T16:21:59.562524Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-30T16:21:59.563248Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:21:59.563965Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind= uid=83a06718-9414-11e8-82a2-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:21:59.564924Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind= uid=83a06718-9414-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:21:59.796326Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmigjfdc, existing: false\n" level=info timestamp=2018-07-30T16:21:59.796533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:21:59.802882Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:21:59.803216Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:22:14.523550Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmigjfdc, existing: false\n" level=info timestamp=2018-07-30T16:22:14.527581Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:22:14.528907Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:22:14.530773Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-fcwk4 Pod phase: Running level=info timestamp=2018-07-30T16:24:23.515747Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiknzs2, existing: false\n" level=info timestamp=2018-07-30T16:24:23.516047Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:24:23.516222Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiknzs2 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:24:23.516562Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiknzs2 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:24:23.652017Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmiknzs2, existing: false\n" level=info timestamp=2018-07-30T16:24:23.652168Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:24:23.652357Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmiknzs2 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:24:23.652542Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmiknzs2 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:24:41.898810Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicjhmz, existing: true\n" level=info timestamp=2018-07-30T16:24:41.899003Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-30T16:24:41.899094Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T16:24:41.899135Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T16:24:41.899285Z pos=vm.go:370 component=virt-handler namespace=kubevirt-test-default name=testvmicjhmz kind= uid=f7d2e556-9414-11e8-82a2-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T16:24:41.899333Z pos=vm.go:407 component=virt-handler namespace=kubevirt-test-default name=testvmicjhmz kind= uid=f7d2e556-9414-11e8-82a2-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T16:24:41.900467Z pos=vm.go:556 component=virt-handler namespace=kubevirt-test-default name=testvmicjhmz kind= uid=f7d2e556-9414-11e8-82a2-525500d15501 msg="Grace period expired, killing deleted VirtualMachineInstance testvmicjhmz" Pod name: virt-launcher-testvmicjhmz-gdtc9 Pod phase: Running level=info timestamp=2018-07-30T16:24:19.259376Z pos=client.go:119 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-07-30T16:24:19.267623Z pos=virt-launcher.go:214 component=virt-launcher msg="Detected domain with UUID 4c0a0b50-9477-4739-8c46-c8179bd52884" level=info timestamp=2018-07-30T16:24:19.268422Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-07-30T16:24:19.276544Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T16:24:19.996029Z pos=client.go:136 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-07-30T16:24:20.026835Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T16:24:20.030910Z pos=manager.go:189 component=virt-launcher namespace=kubevirt-test-default name=testvmicjhmz kind= uid=f7d2e556-9414-11e8-82a2-525500d15501 msg="Domain started." level=info timestamp=2018-07-30T16:24:20.034945Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicjhmz kind= uid=f7d2e556-9414-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T16:24:20.041590Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T16:24:20.041876Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T16:24:20.065378Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T16:24:20.071476Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T16:24:20.238641Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmicjhmz kind= uid=f7d2e556-9414-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T16:24:20.277605Z pos=monitor.go:222 component=virt-launcher msg="Found PID for 4c0a0b50-9477-4739-8c46-c8179bd52884: 165" level=info timestamp=2018-07-30T16:24:41.977737Z pos=monitor.go:266 component=virt-launcher msg="Received signal 15." • Failure [46.320 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with ACPI and 0 grace period seconds /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:895 should result in vmi status failed [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:896 Timed out after 5.000s. Expected : Running to equal : Failed /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:917 ------------------------------ STEP: Creating the VirtualMachineInstance level=info timestamp=2018-07-30T16:24:02.484906Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmicjhmz kind=VirtualMachineInstance uid=f7d2e556-9414-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmicjhmz-gdtc9" level=info timestamp=2018-07-30T16:24:18.504849Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmicjhmz kind=VirtualMachineInstance uid=f7d2e556-9414-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmicjhmz-gdtc9" level=info timestamp=2018-07-30T16:24:20.740040Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmicjhmz kind=VirtualMachineInstance uid=f7d2e556-9414-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T16:24:20.815157Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmicjhmz kind=VirtualMachineInstance uid=f7d2e556-9414-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." STEP: Deleting the VirtualMachineInstance STEP: Verifying VirtualMachineInstance's status is Failed 2018/07/30 12:25:32 read closing down: EOF Pod name: disks-images-provider-ctcjh Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-dqv4m Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-8n6wn Pod phase: Running level=error timestamp=2018-07-30T16:24:42.450220Z pos=subresource.go:97 component=virt-api reason="read tcp 10.129.0.79:8443->10.128.0.1:36902: use of closed network connection" msg="error ecountered reading from websocket stream" level=info timestamp=2018-07-30T16:24:42.451057Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmicjhmz/console proto=HTTP/1.1 statusCode=200 contentLength=0 2018/07/30 16:24:43 http: TLS handshake error from 10.129.0.1:42136: EOF 2018/07/30 16:24:53 http: TLS handshake error from 10.129.0.1:42144: EOF 2018/07/30 16:25:03 http: TLS handshake error from 10.129.0.1:42150: EOF level=info timestamp=2018-07-30T16:25:07.442067Z pos=subresource.go:75 component=virt-api msg="Websocket connection upgraded" 2018/07/30 16:25:13 http: TLS handshake error from 10.129.0.1:42156: EOF 2018/07/30 16:25:23 http: TLS handshake error from 10.129.0.1:42164: EOF level=error timestamp=2018-07-30T16:25:32.540514Z pos=subresource.go:97 component=virt-api reason="websocket: close 1006 (abnormal closure): unexpected EOF" msg="error ecountered reading from websocket stream" level=error timestamp=2018-07-30T16:25:32.541413Z pos=subresource.go:106 component=virt-api reason="websocket: close 1006 (abnormal closure): unexpected EOF" msg="Error in websocket proxy" 2018/07/30 16:25:32 http: response.WriteHeader on hijacked connection level=info timestamp=2018-07-30T16:25:32.542461Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi8wnr2/console proto=HTTP/1.1 statusCode=500 contentLength=0 2018/07/30 16:25:33 http: TLS handshake error from 10.129.0.1:42168: EOF level=error timestamp=2018-07-30T16:25:33.573110Z pos=subresource.go:91 component=virt-api reason="tls: use of closed connection" msg="error ecountered reading from remote podExec stream" 2018/07/30 16:25:43 http: TLS handshake error from 10.129.0.1:42174: EOF Pod name: virt-api-7d79764579-g72bg Pod phase: Running level=info timestamp=2018-07-30T16:25:00.586631Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T16:25:01.005254Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:25:02 http: TLS handshake error from 10.128.0.1:44340: EOF level=info timestamp=2018-07-30T16:25:11.256597Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:25:12 http: TLS handshake error from 10.128.0.1:44396: EOF level=info timestamp=2018-07-30T16:25:18.579984Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:25:21.555242Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:25:22 http: TLS handshake error from 10.128.0.1:44446: EOF level=info timestamp=2018-07-30T16:25:27.366412Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:25:27.368033Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-07-30T16:25:30.599103Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-07-30T16:25:31.770307Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/07/30 16:25:32 http: TLS handshake error from 10.128.0.1:44500: EOF 2018/07/30 16:25:42 http: TLS handshake error from 10.128.0.1:44550: EOF level=info timestamp=2018-07-30T16:25:42.143469Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-controller-7d57d96b65-68vs2 Pod phase: Running level=info timestamp=2018-07-30T16:22:02.556797Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8gc94\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8gc94" level=info timestamp=2018-07-30T16:22:02.589777Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8gc94\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8gc94, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: b0ed67e7-9414-11e8-82a2-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8gc94" level=info timestamp=2018-07-30T16:22:03.206475Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikqglk kind= uid=b16f1afa-9414-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:22:03.206956Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikqglk kind= uid=b16f1afa-9414-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:22:03.358140Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikqglk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikqglk" level=info timestamp=2018-07-30T16:22:03.400370Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikqglk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikqglk" level=info timestamp=2018-07-30T16:23:09.265436Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiknzs2 kind= uid=d8cd12a2-9414-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:23:09.270131Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiknzs2 kind= uid=d8cd12a2-9414-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:23:09.925114Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiknzs2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiknzs2" level=info timestamp=2018-07-30T16:24:01.312043Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicjhmz kind= uid=f7d2e556-9414-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:24:01.314379Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmicjhmz kind= uid=f7d2e556-9414-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:24:01.534094Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmicjhmz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmicjhmz" level=info timestamp=2018-07-30T16:24:47.582367Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8wnr2 kind= uid=13643bd2-9415-11e8-82a2-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-07-30T16:24:47.593092Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8wnr2 kind= uid=13643bd2-9415-11e8-82a2-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-07-30T16:24:47.907669Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8wnr2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi8wnr2" Pod name: virt-controller-7d57d96b65-g5k4q Pod phase: Running level=info timestamp=2018-07-30T15:51:40.384681Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-84297 Pod phase: Running level=info timestamp=2018-07-30T16:21:59.555205Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind= uid=83a06718-9414-11e8-82a2-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:21:59.561178Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind= uid=83a06718-9414-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:21:59.561853Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmigjfdc, existing: true\n" level=info timestamp=2018-07-30T16:21:59.562524Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-07-30T16:21:59.563248Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:21:59.563965Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind= uid=83a06718-9414-11e8-82a2-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:21:59.564924Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind= uid=83a06718-9414-11e8-82a2-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:21:59.796326Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmigjfdc, existing: false\n" level=info timestamp=2018-07-30T16:21:59.796533Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:21:59.802882Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:21:59.803216Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:22:14.523550Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmigjfdc, existing: false\n" level=info timestamp=2018-07-30T16:22:14.527581Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:22:14.528907Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:22:14.530773Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-alternative name=testvmigjfdc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-fcwk4 Pod phase: Running level=info timestamp=2018-07-30T16:25:38.508213Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:25:38.508426Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicjhmz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:25:38.509140Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicjhmz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:25:38.509592Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmicjhmz, existing: false\n" level=info timestamp=2018-07-30T16:25:38.509849Z pos=vm.go:329 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-07-30T16:25:38.510084Z pos=vm.go:413 component=virt-handler namespace=kubevirt-test-default name=testvmicjhmz kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-07-30T16:25:38.510293Z pos=vm.go:440 component=virt-handler namespace=kubevirt-test-default name=testvmicjhmz kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-07-30T16:25:42.626208Z pos=vm.go:313 component=virt-handler msg="Processing vmi testvmi8wnr2, existing: true\n" level=info timestamp=2018-07-30T16:25:42.626457Z pos=vm.go:315 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-07-30T16:25:42.626569Z pos=vm.go:329 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-07-30T16:25:42.626654Z pos=vm.go:331 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-07-30T16:25:42.626954Z pos=vm.go:344 component=virt-handler namespace=kubevirt-test-default name=testvmi8wnr2 kind= uid=13643bd2-9415-11e8-82a2-525500d15501 msg="Shutting down due to graceful shutdown signal." level=info timestamp=2018-07-30T16:25:42.627150Z pos=vm.go:370 component=virt-handler namespace=kubevirt-test-default name=testvmi8wnr2 kind= uid=13643bd2-9415-11e8-82a2-525500d15501 msg="Shutting down domain for VirtualMachineInstance with deletion timestamp." level=info timestamp=2018-07-30T16:25:42.627355Z pos=vm.go:407 component=virt-handler namespace=kubevirt-test-default name=testvmi8wnr2 kind= uid=13643bd2-9415-11e8-82a2-525500d15501 msg="Processing shutdown." level=info timestamp=2018-07-30T16:25:42.629450Z pos=vm.go:556 component=virt-handler namespace=kubevirt-test-default name=testvmi8wnr2 kind= uid=13643bd2-9415-11e8-82a2-525500d15501 msg="Grace period expired, killing deleted VirtualMachineInstance testvmi8wnr2" Pod name: virt-launcher-testvmi8wnr2-vkgst Pod phase: Running level=info timestamp=2018-07-30T16:25:06.999092Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T16:25:06.999256Z pos=client.go:136 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-07-30T16:25:07.021721Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T16:25:07.043717Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T16:25:07.092506Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmi8wnr2 kind= uid=13643bd2-9415-11e8-82a2-525500d15501 msg="Synced vmi" level=info timestamp=2018-07-30T16:25:07.684326Z pos=monitor.go:222 component=virt-launcher msg="Found PID for cfbd8f86-90d7-4d4d-8e32-17ec81ba80c0: 166" level=info timestamp=2018-07-30T16:25:32.432247Z pos=manager.go:255 component=virt-launcher namespace=kubevirt-test-default name=testvmi8wnr2 kind= uid=13643bd2-9415-11e8-82a2-525500d15501 msg="Signaled graceful shutdown for testvmi8wnr2" level=info timestamp=2018-07-30T16:25:32.451606Z pos=monitor.go:266 component=virt-launcher msg="Received signal 15." level=info timestamp=2018-07-30T16:25:32.580631Z pos=client.go:136 component=virt-launcher msg="Libvirt event 0 with reason 1 received" level=info timestamp=2018-07-30T16:25:32.585808Z pos=server.go:118 component=virt-launcher namespace=kubevirt-test-default name=testvmi8wnr2 kind= uid=13643bd2-9415-11e8-82a2-525500d15501 msg="Signaled vmi shutdown" level=info timestamp=2018-07-30T16:25:32.596210Z pos=client.go:119 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-07-30T16:25:32.600184Z pos=client.go:145 component=virt-launcher msg="processed event" level=info timestamp=2018-07-30T16:25:32.610171Z pos=server.go:118 component=virt-launcher namespace=kubevirt-test-default name=testvmi8wnr2 kind= uid=13643bd2-9415-11e8-82a2-525500d15501 msg="Signaled vmi shutdown" level=info timestamp=2018-07-30T16:25:32.624991Z pos=server.go:118 component=virt-launcher namespace=kubevirt-test-default name=testvmi8wnr2 kind= uid=13643bd2-9415-11e8-82a2-525500d15501 msg="Signaled vmi shutdown" level=info timestamp=2018-07-30T16:25:42.633407Z pos=client.go:136 component=virt-launcher msg="Libvirt event 6 with reason 1 received" • Failure [60.572 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:52 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:869 with ACPI and some grace period seconds /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:920 should result in vmi status succeeded [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:921 Timed out after 15.000s. Expected : Running to equal : Succeeded /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:942 ------------------------------ STEP: Creating the VirtualMachineInstance level=info timestamp=2018-07-30T16:24:48.763648Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi8wnr2 kind=VirtualMachineInstance uid=13643bd2-9415-11e8-82a2-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi8wnr2-vkgst" level=info timestamp=2018-07-30T16:25:05.986981Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi8wnr2 kind=VirtualMachineInstance uid=13643bd2-9415-11e8-82a2-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi8wnr2-vkgst" level=info timestamp=2018-07-30T16:25:07.658871Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi8wnr2 kind=VirtualMachineInstance uid=13643bd2-9415-11e8-82a2-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-07-30T16:25:07.723984Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi8wnr2 kind=VirtualMachineInstance uid=13643bd2-9415-11e8-82a2-525500d15501 msg="VirtualMachineInstance started." STEP: Deleting the VirtualMachineInstance STEP: Verifying VirtualMachineInstance's status is Succeeded panic: test timed out after 1h30m0s goroutine 9378 [running]: testing.(*M).startAlarm.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1240 +0xfc created by time.goFunc /gimme/.gimme/versions/go1.10.linux.amd64/src/time/sleep.go:172 +0x44 goroutine 1 [chan receive, 90 minutes]: testing.(*T).Run(0xc42042be00, 0x139e775, 0x9, 0x1430c98, 0x4801e6) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:825 +0x301 testing.runTests.func1(0xc42042bd10) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1063 +0x64 testing.tRunner(0xc42042bd10, 0xc420671df8) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 testing.runTests(0xc4208a2120, 0x1d32a50, 0x1, 0x1, 0x412009) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1061 +0x2c4 testing.(*M).Run(0xc4208b8c00, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:978 +0x171 main.main() _testmain.go:44 +0x151 goroutine 20 [chan receive]: kubevirt.io/kubevirt/vendor/github.com/golang/glog.(*loggingT).flushDaemon(0x1d5e280) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:879 +0x8b created by kubevirt.io/kubevirt/vendor/github.com/golang/glog.init.0 /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:410 +0x203 goroutine 21 [syscall, 90 minutes]: os/signal.signal_recv(0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/sigqueue.go:139 +0xa6 os/signal.loop() /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:22 +0x22 created by os/signal.init.0 /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:28 +0x41 goroutine 8 [select]: kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).Watch(0xc420f7cf90, 0xc4208d26c0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:285 +0x579 kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).WaitFor(0xc420f7cf90, 0x139a511, 0x6, 0x11e3660, 0x14b11b0, 0x0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:295 +0xba kubevirt.io/kubevirt/tests.waitForVMIStart(0x14ba800, 0xc42109f400, 0x5a, 0x0, 0x0, 0x1d7c901) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1051 +0x4ea kubevirt.io/kubevirt/tests.WaitForSuccessfulVMIStart(0x14ba800, 0xc42109f400, 0x1d7c938, 0x0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1088 +0x43 kubevirt.io/kubevirt/tests_test.glob..func16.6.4.1() /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:971 +0x759 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).runSync(0xc4208f0960, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:113 +0x9c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).run(0xc4208f0960, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:64 +0x13e kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*ItNode).Run(0xc4208e06c0, 0x14b6ca0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/it_node.go:26 +0x7f kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).runSample(0xc4205841e0, 0x0, 0x14b6ca0, 0xc4200ff480) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:203 +0x648 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).Run(0xc4205841e0, 0x14b6ca0, 0xc4200ff480) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:138 +0xff kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpec(0xc420146500, 0xc4205841e0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:200 +0x10d kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpecs(0xc420146500, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:170 +0x329 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run(0xc420146500, 0xb) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:66 +0x11b kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite.(*Suite).Run(0xc4200fcaf0, 0x7f7152b3fda0, 0xc42042be00, 0x13a0d58, 0xb, 0xc4208a2160, 0x2, 0x2, 0x14d35a0, 0xc4200ff480, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite/suite.go:62 +0x27c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithCustomReporters(0x14b7d00, 0xc42042be00, 0x13a0d58, 0xb, 0xc4208a2140, 0x2, 0x2, 0x2) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:221 +0x258 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithDefaultAndCustomReporters(0x14b7d00, 0xc42042be00, 0x13a0d58, 0xb, 0xc4203d7a60, 0x1, 0x1, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:209 +0xab kubevirt.io/kubevirt/tests_test.TestTests(0xc42042be00) /root/go/src/kubevirt.io/kubevirt/tests/tests_suite_test.go:43 +0xaa testing.tRunner(0xc42042be00, 0x1430c98) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 created by testing.(*T).Run /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:824 +0x2e0 goroutine 9 [chan receive, 90 minutes]: kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).registerForInterrupts(0xc420146500, 0xc4204ee300) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:223 +0xd1 created by kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:60 +0x88 goroutine 10 [select, 90 minutes, locked to thread]: runtime.gopark(0x1432e70, 0x0, 0x139b297, 0x6, 0x18, 0x1) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/proc.go:291 +0x11a runtime.selectgo(0xc42047ff50, 0xc4204ee420) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/select.go:392 +0xe50 runtime.ensureSigM.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/signal_unix.go:549 +0x1f4 runtime.goexit() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/asm_amd64.s:2361 +0x1 goroutine 15 [IO wait]: internal/poll.runtime_pollWait(0x7f7152b8af00, 0x72, 0xc421381850) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/netpoll.go:173 +0x57 internal/poll.(*pollDesc).wait(0xc4203f3818, 0x72, 0xffffffffffffff00, 0x14b8ec0, 0x1c497d0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:85 +0x9b internal/poll.(*pollDesc).waitRead(0xc4203f3818, 0xc420a7c000, 0x8000, 0x8000) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:90 +0x3d internal/poll.(*FD).Read(0xc4203f3800, 0xc420a7c000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_unix.go:157 +0x17d net.(*netFD).Read(0xc4203f3800, 0xc420a7c000, 0x8000, 0x8000, 0x0, 0x8, 0x7ffb) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/fd_unix.go:202 +0x4f net.(*conn).Read(0xc4200fadb0, 0xc420a7c000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/net.go:176 +0x6a crypto/tls.(*block).readFromUntil(0xc42041e780, 0x7f7152b400d0, 0xc4200fadb0, 0x5, 0xc4200fadb0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:493 +0x96 crypto/tls.(*Conn).readRecord(0xc4207ee700, 0x1432f17, 0xc4207ee820, 0x20) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:595 +0xe0 crypto/tls.(*Conn).Read(0xc4207ee700, 0xc4208d8000, 0x1000, 0x1000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:1156 +0x100 bufio.(*Reader).Read(0xc420578000, 0xc420680f18, 0x9, 0x9, 0xc420bcda28, 0x1, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/bufio/bufio.go:216 +0x238 io.ReadAtLeast(0x14b5aa0, 0xc420578000, 0xc420680f18, 0x9, 0x9, 0x9, 0xc421316120, 0x43f2c1, 0xc421047800) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:309 +0x86 io.ReadFull(0x14b5aa0, 0xc420578000, 0xc420680f18, 0x9, 0x9, 0x1432fb8, 0xc421381d10, 0x462d33) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:327 +0x58 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.readFrameHeader(0xc420680f18, 0x9, 0x9, 0x14b5aa0, 0xc420578000, 0x0, 0xc400000000, 0x7efb60, 0xc420d8dba8) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:237 +0x7b kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Framer).ReadFrame(0xc420680ee0, 0xc420814600, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:492 +0xa4 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*clientConnReadLoop).run(0xc421381fb0, 0x1431bf0, 0xc420098fb0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1428 +0x8e kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*ClientConn).readLoop(0xc4202c56c0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1354 +0x76 created by kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Transport).newClientConn /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:579 +0x651 goroutine 6021 [chan send, 28 minutes]: kubevirt.io/kubevirt/tests_test.glob..func23.1.2.1.1(0x14f12a0, 0xc420767500, 0xc4200fa1b8, 0xc420764e40, 0xc4200fac30, 0xc4200fac70) /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:81 +0x138 created by kubevirt.io/kubevirt/tests_test.glob..func23.1.2.1 /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:73 +0x386 goroutine 4084 [chan receive, 39 minutes]: kubevirt.io/kubevirt/pkg/kubecli.(*asyncWSRoundTripper).WebsocketCallback(0xc420669720, 0xc420d72500, 0xc420690090, 0x0, 0x0, 0x18, 0xc420f49ec8) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:163 +0x32b kubevirt.io/kubevirt/pkg/kubecli.(*asyncWSRoundTripper).WebsocketCallback-fm(0xc420d72500, 0xc420690090, 0x0, 0x0, 0xc420d72500, 0xc420690090) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:313 +0x52 kubevirt.io/kubevirt/pkg/kubecli.(*WebsocketRoundTripper).RoundTrip(0xc420669c60, 0xc42105ec00, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:142 +0xab kubevirt.io/kubevirt/pkg/kubecli.(*vmis).asyncSubresourceHelper.func1(0x14b5f80, 0xc420669c60, 0xc42105ec00, 0xc42079af60) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:328 +0x56 created by kubevirt.io/kubevirt/pkg/kubecli.(*vmis).asyncSubresourceHelper /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:326 +0x33a goroutine 7600 [chan send, 17 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4207aa6c0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 9365 [chan receive]: kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).Watch.func3(0x14bfd00, 0xc42087ede0, 0xc420921170, 0xc420bb37a0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:276 +0x93 created by kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).Watch /root/go/src/kubevirt.io/kubevirt/tests/utils.go:274 +0x4ae goroutine 5427 [chan receive, 38 minutes]: kubevirt.io/kubevirt/pkg/kubecli.(*asyncWSRoundTripper).WebsocketCallback(0xc4202bc760, 0xc420882c80, 0xc420aeae10, 0x0, 0x0, 0x18, 0xc4213f5ec8) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:163 +0x32b kubevirt.io/kubevirt/pkg/kubecli.(*asyncWSRoundTripper).WebsocketCallback-fm(0xc420882c80, 0xc420aeae10, 0x0, 0x0, 0xc420882c80, 0xc420aeae10) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:313 +0x52 kubevirt.io/kubevirt/pkg/kubecli.(*WebsocketRoundTripper).RoundTrip(0xc4203dc3e0, 0xc420d4a300, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:142 +0xab kubevirt.io/kubevirt/pkg/kubecli.(*vmis).asyncSubresourceHelper.func1(0x14b5f80, 0xc4203dc3e0, 0xc420d4a300, 0xc420dcfaa0) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:328 +0x56 created by kubevirt.io/kubevirt/pkg/kubecli.(*vmis).asyncSubresourceHelper /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:326 +0x33a goroutine 9364 [semacquire]: sync.runtime_notifyListWait(0xc420d8dbc0, 0xc400000001) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/sema.go:510 +0x10b sync.(*Cond).Wait(0xc420d8dbb0) /gimme/.gimme/versions/go1.10.linux.amd64/src/sync/cond.go:56 +0x80 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*pipe).Read(0xc420d8dba8, 0xc420add201, 0x5ff, 0x5ff, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/pipe.go:64 +0x8f kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.transportResponseBody.Read(0xc420d8db80, 0xc420add201, 0x5ff, 0x5ff, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1674 +0xa1 encoding/json.(*Decoder).refill(0xc4202001e0, 0x835c0a, 0x9) /gimme/.gimme/versions/go1.10.linux.amd64/src/encoding/json/stream.go:159 +0x132 encoding/json.(*Decoder).readValue(0xc4202001e0, 0x0, 0x0, 0x11f55c0) /gimme/.gimme/versions/go1.10.linux.amd64/src/encoding/json/stream.go:134 +0x23d encoding/json.(*Decoder).Decode(0xc4202001e0, 0x1211500, 0xc420ac8f40, 0x14bdac0, 0xc42109f680) /gimme/.gimme/versions/go1.10.linux.amd64/src/encoding/json/stream.go:63 +0x78 kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/util/framer.(*jsonFrameReader).Read(0xc42087edb0, 0xc420ae2800, 0x400, 0x400, 0xc420767ec0, 0x40, 0x38) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/util/framer/framer.go:150 +0x295 kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/runtime/serializer/streaming.(*decoder).Decode(0xc420165310, 0x0, 0x14bf780, 0xc420767ec0, 0x380, 0x14bdac0, 0xc420bb3798, 0x456ae0, 0xc420bb3740) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/runtime/serializer/streaming/streaming.go:77 +0x95 kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest/watch.(*Decoder).Decode(0xc42092de80, 0xc42137dfa8, 0x5, 0x14bdac0, 0xc42109f680, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest/watch/decoder.go:49 +0x7c kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc42087ede0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:93 +0x12e created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 6211 [chan send, 26 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4206c7260) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 make: *** [functest] Error 2 + make cluster-down ./cluster/down.sh