+ export WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + [[ openshift-3.10-release =~ openshift-.* ]] + [[ openshift-3.10-release =~ .*-crio-.* ]] + export KUBEVIRT_PROVIDER=os-3.10.0 + KUBEVIRT_PROVIDER=os-3.10.0 + export KUBEVIRT_NUM_NODES=2 + KUBEVIRT_NUM_NODES=2 + export NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + export NAMESPACE=kube-system + NAMESPACE=kube-system + trap '{ make cluster-down; }' EXIT SIGINT SIGTERM SIGSTOP + make cluster-down ./cluster/down.sh + make cluster-up ./cluster/up.sh Downloading ....... Downloading ....... 2018/08/02 10:18:07 Waiting for host: 192.168.66.102:22 2018/08/02 10:18:10 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/02 10:18:18 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/02 10:18:26 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/02 10:18:34 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/02 10:18:39 Connected to tcp://192.168.66.102:22 + systemctl stop origin-node.service + rm -rf /etc/origin/ /etc/etcd/ /var/lib/origin /var/lib/etcd/ ++ docker ps -q + containers= + '[' -n '' ']' ++ docker ps -q -a + containers='2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' + '[' -n '2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3' ']' + docker rm -f 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2cfbef31c987 e183c40c07dc 861f604efed4 12902ad26342 028539b1f68b bd6f07c1906c d1f95a33a226 c43f96b6da26 e007e5cfd226 b42e2bceca6e 00531aec6f9a e4ad39ba6cef 504c3df6bbf4 eb1ec0b445ce b8955b91e8e5 f739ed8f3e59 07668d85ab3a a6045d125d7b 2ce17110e009 b45f64ab28ef 3a15945be9e1 2a0af99ae1d1 0ece927846d7 0202d5f5dfae 8ce743769d8f 2efb36567bd8 96b65c0493c5 e9ce89fa30e3 2018/08/02 10:18:42 Waiting for host: 192.168.66.101:22 2018/08/02 10:18:45 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/02 10:18:53 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/02 10:19:01 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/02 10:19:09 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/02 10:19:14 Connected to tcp://192.168.66.101:22 + inventory_file=/root/inventory + openshift_ansible=/root/openshift-ansible + echo '[new_nodes]' + sed -i '/\[OSEv3:children\]/a new_nodes' /root/inventory + nodes_found=false ++ seq 2 100 + for i in '$(seq 2 100)' ++ printf node%02d 2 + node=node02 ++ printf 192.168.66.1%02d 2 + node_ip=192.168.66.102 + set +e + ping 192.168.66.102 -c 1 PING 192.168.66.102 (192.168.66.102) 56(84) bytes of data. 64 bytes from 192.168.66.102: icmp_seq=1 ttl=64 time=1.30 ms --- 192.168.66.102 ping statistics --- 1 packets transmitted, 1 received, 0% packet loss, time 0ms rtt min/avg/max/mdev = 1.307/1.307/1.307/0.000 ms Found node02. Adding it to the inventory. + '[' 0 -ne 0 ']' + nodes_found=true + set -e + echo '192.168.66.102 node02' + echo 'Found node02. Adding it to the inventory.' + echo 'node02 openshift_node_group_name="node-config-compute" openshift_schedulable=true openshift_ip=192.168.66.102' + for i in '$(seq 2 100)' ++ printf node%02d 3 + node=node03 ++ printf 192.168.66.1%02d 3 + node_ip=192.168.66.103 + set +e + ping 192.168.66.103 -c 1 PING 192.168.66.103 (192.168.66.103) 56(84) bytes of data. From 192.168.66.101 icmp_seq=1 Destination Host Unreachable --- 192.168.66.103 ping statistics --- 1 packets transmitted, 0 received, +1 errors, 100% packet loss, time 0ms + '[' 1 -ne 0 ']' + break + '[' true = true ']' + ansible-playbook -i /root/inventory /root/openshift-ansible/playbooks/openshift-node/scaleup.yml PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) PLAY [Ensure there are new_nodes] ********************************************** TASK [fail] ******************************************************************** skipping: [localhost] TASK [fail] ******************************************************************** skipping: [localhost] PLAY [Initialization Checkpoint Start] ***************************************** TASK [Set install initialization 'In Progress'] ******************************** ok: [node01] PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) [WARNING]: Could not match supplied host pattern, ignoring: oo_lb_to_config PLAY [Ensure that all non-node hosts are accessible] *************************** TASK [Gathering Facts] ********************************************************* ok: [node01] PLAY [Initialize basic host facts] ********************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml for node01, node02 TASK [openshift_sanitize_inventory : Check for usage of deprecated variables] *** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : debug] ************************************ skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_stats] ******************************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Assign deprecated variables to correct counterparts] *** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml for node01, node02 included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml for node01, node02 TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Standardize on latest variable names] ***** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Normalize openshift_release] ************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Abort when openshift_release is invalid] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml for node01, node02 TASK [openshift_sanitize_inventory : Ensure that openshift_use_dnsmasq is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that openshift_node_dnsmasq_install_network_manager_hook is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* skipping: [node01] => (item=openshift_hosted_etcd_storage_kind) skipping: [node02] => (item=openshift_hosted_etcd_storage_kind) TASK [openshift_sanitize_inventory : Ensure that dynamic provisioning is set if using dynamic storage] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure clusterid is set along with the cloudprovider] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure ansible_service_broker_remove and ansible_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure template_service_broker_remove and template_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that all requires vsphere configuration variables are set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : ensure provider configuration variables are defined] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure removed web console extension variables are not set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that web console port matches API server port] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : At least one master is schedulable] ******* skipping: [node01] skipping: [node02] TASK [Detecting Operating System from ostree_booted] *************************** ok: [node02] ok: [node01] TASK [set openshift_deployment_type if unset] ********************************** skipping: [node01] skipping: [node02] TASK [check for node already bootstrapped] ************************************* ok: [node02] ok: [node01] TASK [initialize_facts set fact openshift_is_bootstrapped] ********************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_atomic and openshift_is_containerized] *** ok: [node01] ok: [node02] TASK [Determine Atomic Host Docker Version] ************************************ skipping: [node01] skipping: [node02] TASK [assert atomic host docker version is 1.12 or later] ********************** skipping: [node01] skipping: [node02] PLAY [Retrieve existing master configs and validate] *************************** TASK [openshift_control_plane : stat] ****************************************** ok: [node01] TASK [openshift_control_plane : slurp] ***************************************** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [openshift_control_plane : Check for file paths outside of /etc/origin/master in master's config] *** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Initialize special first-master variables] ******************************* TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Disable web console if required] ***************************************** TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Setup yum repositories for all hosts] ************************************ TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Install Red Hat Subscription manager] ******************* skipping: [node02] TASK [rhel_subscribe : Is host already registered?] **************************** skipping: [node02] TASK [rhel_subscribe : Register host] ****************************************** skipping: [node02] TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Determine if OpenShift Pool Already Attached] *********** skipping: [node02] TASK [rhel_subscribe : Attach to OpenShift Pool] ******************************* skipping: [node02] TASK [rhel_subscribe : Satellite preparation] ********************************** skipping: [node02] TASK [openshift_repos : openshift_repos detect ostree] ************************* ok: [node02] TASK [openshift_repos : Ensure libselinux-python is installed] ***************** ok: [node02] TASK [openshift_repos : Remove openshift_additional.repo file] ***************** ok: [node02] TASK [openshift_repos : Create any additional repos that are defined] ********** TASK [openshift_repos : include_tasks] ***************************************** skipping: [node02] TASK [openshift_repos : include_tasks] ***************************************** included: /root/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml for node02 TASK [openshift_repos : Configure origin gpg keys] ***************************** ok: [node02] TASK [openshift_repos : Configure correct origin release repository] *********** ok: [node02] => (item=/root/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin.repo.j2) TASK [openshift_repos : Ensure clean repo cache in the event repos have been changed manually] *** changed: [node02] => { "msg": "First run of openshift_repos" } TASK [openshift_repos : Record that openshift_repos already ran] *************** ok: [node02] RUNNING HANDLER [openshift_repos : refresh cache] ****************************** changed: [node02] PLAY [Install packages necessary for installer] ******************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Determine if chrony is installed] **************************************** changed: [node02] [WARNING]: Consider using the yum, dnf or zypper module rather than running rpm. If you need to use command because yum, dnf or zypper is insufficient you can add warn=False to this command task or set command_warnings=False in ansible.cfg to get rid of this message. TASK [Install ntp package] ***************************************************** skipping: [node02] TASK [Start and enable ntpd/chronyd] ******************************************* changed: [node02] TASK [Ensure openshift-ansible installer package deps are installed] *********** ok: [node02] => (item=iproute) ok: [node02] => (item=dbus-python) ok: [node02] => (item=PyYAML) ok: [node02] => (item=python-ipaddress) ok: [node02] => (item=libsemanage-python) ok: [node02] => (item=yum-utils) ok: [node02] => (item=python-docker) PLAY [Initialize cluster facts] ************************************************ TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [get openshift_current_version] ******************************************* ok: [node02] ok: [node01] TASK [set_fact openshift_portal_net if present on masters] ********************* ok: [node02] ok: [node01] TASK [Gather Cluster facts] **************************************************** changed: [node02] changed: [node01] TASK [Set fact of no_proxy_internal_hostnames] ********************************* skipping: [node01] skipping: [node02] TASK [Initialize openshift.node.sdn_mtu] *************************************** changed: [node02] ok: [node01] PLAY [Initialize etcd host variables] ****************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** ok: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** ok: [node01] => { "msg": "openshift_pkg_version was not defined. Falling back to -3.10.0" } TASK [openshift_version : set_fact] ******************************************** ok: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Verify Requirements] ***************************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [Run variable sanity checks] ********************************************** ok: [node01] TASK [Validate openshift_node_groups and openshift_node_group_name] ************ ok: [node01] PLAY [Initialization Checkpoint End] ******************************************* TASK [Set install initialization 'Complete'] *********************************** ok: [node01] PLAY [Validate node hostnames] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Query DNS for IP address of node02] ************************************** ok: [node02] TASK [Validate openshift_hostname when defined] ******************************** skipping: [node02] TASK [Validate openshift_ip exists on node when defined] *********************** skipping: [node02] PLAY [Configure os_firewall] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [os_firewall : Detecting Atomic Host Operating System] ******************** ok: [node02] TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** ok: [node02] TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** skipping: [node02] TASK [os_firewall : Install firewalld packages] ******************************** skipping: [node02] TASK [os_firewall : Ensure iptables services are not enabled] ****************** skipping: [node02] => (item=iptables) skipping: [node02] => (item=ip6tables) TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** skipping: [node02] TASK [os_firewall : Start and enable firewalld service] ************************ skipping: [node02] TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** skipping: [node02] TASK [os_firewall : Restart polkitd] ******************************************* skipping: [node02] TASK [os_firewall : Wait for polkit action to have been created] *************** skipping: [node02] TASK [os_firewall : Ensure firewalld service is not enabled] ******************* ok: [node02] TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** skipping: [node02] TASK [os_firewall : Install iptables packages] ********************************* ok: [node02] => (item=iptables) ok: [node02] => (item=iptables-services) TASK [os_firewall : Start and enable iptables service] ************************* ok: [node02 -> node02] => (item=node02) TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** skipping: [node02] PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [container_runtime : Setup the docker-storage for overlay] **************** skipping: [node02] TASK [container_runtime : Create file system on extra volume device] *********** TASK [container_runtime : Create mount entry for extra volume] ***************** PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** ok: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* ok: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Add http_proxy to /etc/atomic.conf] ****************** skipping: [node02] TASK [container_runtime : Add https_proxy to /etc/atomic.conf] ***************** skipping: [node02] TASK [container_runtime : Add no_proxy to /etc/atomic.conf] ******************** skipping: [node02] TASK [container_runtime : Get current installed Docker version] **************** ok: [node02] TASK [container_runtime : Error out if Docker pre-installed but too old] ******* skipping: [node02] TASK [container_runtime : Error out if requested Docker is too old] ************ skipping: [node02] TASK [container_runtime : Install Docker] ************************************** skipping: [node02] TASK [container_runtime : Ensure docker.service.d directory exists] ************ ok: [node02] TASK [container_runtime : Configure Docker service unit file] ****************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Set registry params] ********************************* skipping: [node02] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) TASK [container_runtime : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** skipping: [node02] TASK [container_runtime : Set Proxy Settings] ********************************** skipping: [node02] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) TASK [container_runtime : Set various Docker options] ************************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Configure Docker Network OPTIONS] ******************** ok: [node02] TASK [container_runtime : Detect if docker is already started] ***************** ok: [node02] TASK [container_runtime : Start the Docker service] **************************** ok: [node02] TASK [container_runtime : set_fact] ******************************************** ok: [node02] TASK [container_runtime : Check for docker_storage_path/overlay2] ************** ok: [node02] TASK [container_runtime : Fixup SELinux permissions for docker] **************** changed: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* ok: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** ok: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** ok: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] TASK [container_runtime : Fail if Atomic Host since this is an rpm request] **** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Check that overlay is in the kernel] ***************** skipping: [node02] TASK [container_runtime : Add overlay to modprobe.d] *************************** skipping: [node02] TASK [container_runtime : Manually modprobe overlay into the kernel] *********** skipping: [node02] TASK [container_runtime : Enable and start systemd-modules-load] *************** skipping: [node02] TASK [container_runtime : Install cri-o] *************************************** skipping: [node02] TASK [container_runtime : Remove CRI-O default configuration files] ************ skipping: [node02] => (item=/etc/cni/net.d/200-loopback.conf) skipping: [node02] => (item=/etc/cni/net.d/100-crio-bridge.conf) TASK [container_runtime : Create the CRI-O configuration] ********************** skipping: [node02] TASK [container_runtime : Ensure CNI configuration directory exists] *********** skipping: [node02] TASK [container_runtime : Add iptables allow rules] **************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove iptables rules] ******************************* TASK [container_runtime : Add firewalld allow rules] *************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove firewalld allow rules] ************************ TASK [container_runtime : Configure the CNI network] *************************** skipping: [node02] TASK [container_runtime : Create /etc/sysconfig/crio-network] ****************** skipping: [node02] TASK [container_runtime : Start the CRI-O service] ***************************** skipping: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* skipping: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** skipping: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** skipping: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** skipping: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Node Preparation Checkpoint Start] *************************************** TASK [Set Node preparation 'In Progress'] ************************************** ok: [node01] PLAY [Only target nodes that have not yet been bootstrapped] ******************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [add_host] **************************************************************** skipping: [localhost] => (item=node02) ok: [localhost] => (item=node01) PLAY [Disable excluders] ******************************************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for node02 TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-docker-excluder version detected] ************ skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-docker-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-excluder version detected] ******************* skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** skipping: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] PLAY [Configure nodes] ********************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_cloud_provider : Set cloud provider facts] ********************* skipping: [node02] TASK [openshift_cloud_provider : Create cloudprovider config dir] ************** skipping: [node02] TASK [openshift_cloud_provider : include the defined cloud provider files] ***** skipping: [node02] TASK [openshift_node : fail] *************************************************** skipping: [node02] TASK [openshift_node : Check for NetworkManager service] *********************** ok: [node02] TASK [openshift_node : Set fact using_network_manager] ************************* ok: [node02] TASK [openshift_node : Install dnsmasq] **************************************** ok: [node02] TASK [openshift_node : ensure origin/node directory exists] ******************** changed: [node02] => (item=/etc/origin) changed: [node02] => (item=/etc/origin/node) TASK [openshift_node : Install NetworkManager during node_bootstrap provisioning] *** skipping: [node02] TASK [openshift_node : Install network manager dispatch script] **************** skipping: [node02] TASK [openshift_node : Install dnsmasq configuration] ************************** ok: [node02] TASK [openshift_node : Deploy additional dnsmasq.conf] ************************* skipping: [node02] TASK [openshift_node : Enable dnsmasq] ***************************************** ok: [node02] TASK [openshift_node : Install network manager dispatch script] **************** ok: [node02] TASK [openshift_node : Add iptables allow rules] ******************************* ok: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) ok: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) ok: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) ok: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) ok: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove iptables rules] ********************************** TASK [openshift_node : Add firewalld allow rules] ****************************** skipping: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) skipping: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) skipping: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) skipping: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) skipping: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove firewalld allow rules] *************************** TASK [openshift_node : Checking for journald.conf] ***************************** ok: [node02] TASK [openshift_node : Create journald persistence directories] **************** ok: [node02] TASK [openshift_node : Update journald setup] ********************************** ok: [node02] => (item={u'var': u'Storage', u'val': u'persistent'}) ok: [node02] => (item={u'var': u'Compress', u'val': True}) ok: [node02] => (item={u'var': u'SyncIntervalSec', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitInterval', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitBurst', u'val': 10000}) ok: [node02] => (item={u'var': u'SystemMaxUse', u'val': u'8G'}) ok: [node02] => (item={u'var': u'SystemKeepFree', u'val': u'20%'}) ok: [node02] => (item={u'var': u'SystemMaxFileSize', u'val': u'10M'}) ok: [node02] => (item={u'var': u'MaxRetentionSec', u'val': u'1month'}) ok: [node02] => (item={u'var': u'MaxFileSec', u'val': u'1day'}) ok: [node02] => (item={u'var': u'ForwardToSyslog', u'val': False}) ok: [node02] => (item={u'var': u'ForwardToWall', u'val': False}) TASK [openshift_node : Restart journald] *************************************** skipping: [node02] TASK [openshift_node : Disable swap] ******************************************* ok: [node02] TASK [openshift_node : Install node, clients, and conntrack packages] ********** ok: [node02] => (item={u'name': u'origin-node-3.10.0*'}) ok: [node02] => (item={u'name': u'origin-clients-3.10.0*'}) ok: [node02] => (item={u'name': u'conntrack-tools'}) TASK [openshift_node : Restart cri-o] ****************************************** skipping: [node02] TASK [openshift_node : restart NetworkManager to ensure resolv.conf is present] *** changed: [node02] TASK [openshift_node : sysctl] ************************************************* ok: [node02] TASK [openshift_node : Check for credentials file for registry auth] *********** skipping: [node02] TASK [openshift_node : Create credentials for registry auth] ******************* skipping: [node02] TASK [openshift_node : Create credentials for registry auth (alternative)] ***** skipping: [node02] TASK [openshift_node : Setup ro mount of /root/.docker for containerized hosts] *** skipping: [node02] TASK [openshift_node : Check that node image is present] *********************** changed: [node02] TASK [openshift_node : Pre-pull node image] ************************************ skipping: [node02] TASK [openshift_node : Copy node script to the node] *************************** ok: [node02] TASK [openshift_node : Install Node service file] ****************************** ok: [node02] TASK [openshift_node : Ensure old system path is set] ************************** skipping: [node02] => (item=/etc/origin/openvswitch) skipping: [node02] => (item=/var/lib/kubelet) skipping: [node02] => (item=/opt/cni/bin) TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Copy node container image to ostree storage] ************ skipping: [node02] TASK [openshift_node : Install or Update node system container] **************** skipping: [node02] TASK [openshift_node : Restart network manager to ensure networking configuration is in place] *** skipping: [node02] TASK [openshift_node : Configure Node settings] ******************************** ok: [node02] => (item={u'regex': u'^OPTIONS=', u'line': u'OPTIONS='}) ok: [node02] => (item={u'regex': u'^DEBUG_LOGLEVEL=', u'line': u'DEBUG_LOGLEVEL=2'}) ok: [node02] => (item={u'regex': u'^IMAGE_VERSION=', u'line': u'IMAGE_VERSION=v3.10.0-rc.0'}) TASK [openshift_node : Configure Proxy Settings] ******************************* skipping: [node02] => (item={u'regex': u'^HTTP_PROXY=', u'line': u'HTTP_PROXY='}) skipping: [node02] => (item={u'regex': u'^HTTPS_PROXY=', u'line': u'HTTPS_PROXY='}) skipping: [node02] => (item={u'regex': u'^NO_PROXY=', u'line': u'NO_PROXY=[],172.30.0.0/16,10.128.0.0/14'}) TASK [openshift_node : file] *************************************************** skipping: [node02] TASK [openshift_node : Create the Node config] ********************************* changed: [node02] TASK [openshift_node : Configure Node Environment Variables] ******************* TASK [openshift_node : Ensure the node static pod directory exists] ************ changed: [node02] TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Install NFS storage plugin dependencies] **************** ok: [node02] TASK [openshift_node : Check for existence of nfs sebooleans] ****************** ok: [node02] => (item=virt_use_nfs) ok: [node02] => (item=virt_sandbox_use_nfs) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 10:27:49.325537', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.012428', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 10:27:49.313109', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 10:27:50.868913', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.010207', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 10:27:50.858706', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 10:27:49.325537', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.012428', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 10:27:49.313109', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 10:27:50.868913', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.010207', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 10:27:50.858706', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install GlusterFS storage plugin dependencies] ********** ok: [node02] TASK [openshift_node : Check for existence of fusefs sebooleans] *************** ok: [node02] => (item=virt_use_fusefs) ok: [node02] => (item=virt_sandbox_use_fusefs) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 10:27:58.310430', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.010475', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 10:27:58.299955', '_ansible_ignore_errors': None, 'failed': False}) ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 10:27:59.811027', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.010330', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 10:27:59.800697', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 10:27:58.310430', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.010475', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 10:27:58.299955', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-02 10:27:59.811027', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.010330', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-02 10:27:59.800697', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install Ceph storage plugin dependencies] *************** ok: [node02] TASK [openshift_node : Install iSCSI storage plugin dependencies] ************** ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=device-mapper-multipath) TASK [openshift_node : restart services] *************************************** ok: [node02] => (item=multipathd) ok: [node02] => (item=rpcbind) ok: [node02] => (item=iscsid) TASK [openshift_node : Template multipath configuration] *********************** changed: [node02] TASK [openshift_node : Enable and start multipath] ***************************** changed: [node02] TASK [tuned : Check for tuned package] ***************************************** ok: [node02] TASK [tuned : Set tuned OpenShift variables] *********************************** ok: [node02] TASK [tuned : Ensure directory structure exists] ******************************* ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Ensure files are populated from templates] *********************** skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1531032437.8490183}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1531032437.8490183, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1531032437.8490183, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Make tuned use the recommended tuned profile on restart] ********* changed: [node02] => (item=/etc/tuned/active_profile) changed: [node02] => (item=/etc/tuned/profile_mode) TASK [tuned : Restart tuned service] ******************************************* changed: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] ******* ok: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] *** PLAY [node bootstrap config] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_node : install needed rpm(s)] ********************************** ok: [node02] => (item=origin-node) ok: [node02] => (item=origin-docker-excluder) ok: [node02] => (item=ansible) ok: [node02] => (item=bash-completion) ok: [node02] => (item=docker) ok: [node02] => (item=haproxy) ok: [node02] => (item=dnsmasq) ok: [node02] => (item=ntp) ok: [node02] => (item=logrotate) ok: [node02] => (item=httpd-tools) ok: [node02] => (item=bind-utils) ok: [node02] => (item=firewalld) ok: [node02] => (item=libselinux-python) ok: [node02] => (item=conntrack-tools) ok: [node02] => (item=openssl) ok: [node02] => (item=iproute) ok: [node02] => (item=python-dbus) ok: [node02] => (item=PyYAML) ok: [node02] => (item=yum-utils) ok: [node02] => (item=glusterfs-fuse) ok: [node02] => (item=device-mapper-multipath) ok: [node02] => (item=nfs-utils) ok: [node02] => (item=cockpit-ws) ok: [node02] => (item=cockpit-system) ok: [node02] => (item=cockpit-bridge) ok: [node02] => (item=cockpit-docker) ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=ceph-common) TASK [openshift_node : create the directory for node] ************************** skipping: [node02] TASK [openshift_node : laydown systemd override] ******************************* skipping: [node02] TASK [openshift_node : update the sysconfig to have necessary variables] ******* ok: [node02] => (item={u'regexp': u'^KUBECONFIG=.*', u'line': u'KUBECONFIG=/etc/origin/node/bootstrap.kubeconfig'}) TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : disable origin-node service] **************************** changed: [node02] => (item=origin-node.service) TASK [openshift_node : Check for RPM generated config marker file .config_managed] *** ok: [node02] TASK [openshift_node : create directories for bootstrapping] ******************* ok: [node02] => (item=/root/openshift_bootstrap) changed: [node02] => (item=/var/lib/origin/openshift.local.config) changed: [node02] => (item=/var/lib/origin/openshift.local.config/node) ok: [node02] => (item=/etc/docker/certs.d/docker-registry.default.svc:5000) TASK [openshift_node : laydown the bootstrap.yml file for on boot configuration] *** ok: [node02] TASK [openshift_node : Create a symlink to the node client CA for the docker registry] *** ok: [node02] TASK [openshift_node : Remove RPM generated config files if present] *********** skipping: [node02] => (item=master) skipping: [node02] => (item=.config_managed) TASK [openshift_node : find all files in /etc/origin/node so we can remove them] *** skipping: [node02] TASK [openshift_node : Remove everything except the resolv.conf required for node] *** skipping: [node02] TASK [openshift_node_group : create node config template] ********************** changed: [node02] TASK [openshift_node_group : remove existing node config] ********************** changed: [node02] TASK [openshift_node_group : Ensure required directories are present] ********** ok: [node02] => (item=/etc/origin/node/pods) changed: [node02] => (item=/etc/origin/node/certificates) TASK [openshift_node_group : Update the sysconfig to group "node-config-compute"] *** changed: [node02] TASK [set_fact] **************************************************************** ok: [node02] PLAY [Re-enable excluder if it was previously enabled] ************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/enable.yml for node02 TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] PLAY [Node Preparation Checkpoint End] ***************************************** TASK [Set Node preparation 'Complete'] ***************************************** ok: [node01] PLAY [Distribute bootstrap and start nodes] ************************************ TASK [openshift_node : Gather node information] ******************************** changed: [node02] ok: [node01] TASK [openshift_node : Copy master bootstrap config locally] ******************* ok: [node02] TASK [openshift_node : Distribute bootstrap kubeconfig if one does not exist] *** ok: [node01] changed: [node02] TASK [openshift_node : Start and enable node for bootstrapping] **************** changed: [node01] changed: [node02] TASK [openshift_node : Get node logs] ****************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : debug] ************************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : fail] *************************************************** skipping: [node02] skipping: [node01] PLAY [Approve any pending CSR requests from inventory nodes] ******************* TASK [Dump all candidate bootstrap hostnames] ********************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Find all hostnames for bootstrapping] ************************************ ok: [node01] TASK [Dump the bootstrap hostnames] ******************************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Approve bootstrap nodes] ************************************************* changed: [node01] TASK [Get CSRs] **************************************************************** skipping: [node01] TASK [Report approval errors] ************************************************** skipping: [node01] PLAY [Ensure any inventory labels are applied to the nodes] ******************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_manage_node : Wait for master API to become available before proceeding] *** skipping: [node02] TASK [openshift_manage_node : Wait for Node Registration] ********************** ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/config.yml for node02, node01 TASK [openshift_manage_node : Set node schedulability] ************************* ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/set_default_node_role.yml for node02, node01 TASK [openshift_manage_node : Retrieve nodes that are marked with the infra selector or the legacy infra selector] *** ok: [node02 -> node01] TASK [openshift_manage_node : Label infra or legacy infra nodes with the new role label] *** TASK [openshift_manage_node : Retrieve non-infra, non-master nodes that are not yet labeled compute] *** ok: [node02 -> node01] TASK [openshift_manage_node : label non-master non-infra nodes compute] ******** TASK [openshift_manage_node : Label all-in-one master as a compute node] ******* skipping: [node02] PLAY RECAP ********************************************************************* localhost : ok=30 changed=0 unreachable=0 failed=0 node01 : ok=71 changed=3 unreachable=0 failed=0 node02 : ok=155 changed=33 unreachable=0 failed=0 INSTALLER STATUS *************************************************************** Initialization : Complete (0:04:01) Node Preparation : Complete (0:05:07) Sending file modes: C0755 110489328 oc Sending file modes: C0600 5649 admin.kubeconfig Cluster "node01:8443" set. Cluster "node01:8443" set. + set +e + kubectl get nodes --no-headers + cluster/kubectl.sh get nodes --no-headers node01 Ready compute,infra,master 25d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + kubectl_rc=0 + '[' 0 -ne 0 ']' ++ kubectl get nodes --no-headers ++ cluster/kubectl.sh get nodes --no-headers ++ grep NotReady + '[' -n '' ']' + set -e + echo 'Nodes are ready:' Nodes are ready: + kubectl get nodes + cluster/kubectl.sh get nodes NAME STATUS ROLES AGE VERSION node01 Ready compute,infra,master 25d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + make cluster-sync ./cluster/build.sh Building ... Untagged: localhost:33123/kubevirt/virt-controller:devel Untagged: localhost:33123/kubevirt/virt-controller@sha256:e2a11bf5a9df4966887a6a49feb695869bf15044ba59af3b660fb4bd00592cd1 Deleted: sha256:3e796ef03cc76eecba9f7ea0ffbb15bcb8ea32cb316604f8c0abe3e155d1795c Untagged: localhost:33123/kubevirt/virt-launcher:devel Untagged: localhost:33123/kubevirt/virt-launcher@sha256:1d2465426dd109134bb9a46db1cd978aff10378da5f1a19846eb6d67a1453aae Deleted: sha256:67c986ccc0e13c704c1ef7931ecbbbebd8c107aac0698116de8f5bc8d52a8e88 Untagged: localhost:33123/kubevirt/virt-handler:devel Untagged: localhost:33123/kubevirt/virt-handler@sha256:0269d71e09dcf1e123d48e412054f0dbbde9732bcd485c9686df21e7d52e72a7 Deleted: sha256:d1ad5d6a31caea8478ae0654e09189675e3e03652cc5463f8ca9cb5cbba27470 Untagged: localhost:33123/kubevirt/virt-api:devel Untagged: localhost:33123/kubevirt/virt-api@sha256:e31833c4aa11e322937f8e400416b0197f204c143ed8d29b4a3efbce26836ab6 Deleted: sha256:f5fff83cd9ade60566b35b31ec9006c1b9d8f6ec200fe4f7af85067c422ff736 Untagged: localhost:33123/kubevirt/subresource-access-test:devel Untagged: localhost:33123/kubevirt/subresource-access-test@sha256:92bf78a9705333bcb6cdf6e7c89c64e282c4ccc51cd2f53e57e2a58ee8e83730 Deleted: sha256:6746d8b1f5c45b89151b66c8c3cb898b0ab394901626f57e0100c7c2510548f6 Untagged: localhost:33123/kubevirt/example-hook-sidecar:devel Untagged: localhost:33123/kubevirt/example-hook-sidecar@sha256:e83c1ba64329cede32495223eafabaff15725e40ecbbd13d8c98d1eca853fc36 Deleted: sha256:3b0db870a1b1a75c316974b23ceba34bc3ab241cbab7861be2e77a0de3eb1248 sha256:dcf2b21fa2ed11dcf9dbba21b1cca0ee3fad521a0e9aee61c06d0b0b66a4b200 go version go1.10 linux/amd64 go version go1.10 linux/amd64 make[1]: Entering directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' hack/dockerized "./hack/check.sh && KUBEVIRT_VERSION= ./hack/build-go.sh install " && ./hack/build-copy-artifacts.sh sha256:dcf2b21fa2ed11dcf9dbba21b1cca0ee3fad521a0e9aee61c06d0b0b66a4b200 go version go1.10 linux/amd64 go version go1.10 linux/amd64 find: '/root/go/src/kubevirt.io/kubevirt/_out/cmd': No such file or directory Compiling tests... compiled tests.test hack/build-docker.sh build Sending build context to Docker daemon 40.39 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-controller ---> Using cache ---> b00c84523b53 Step 4/8 : WORKDIR /home/virt-controller ---> Using cache ---> b76b8bd8cd39 Step 5/8 : USER 1001 ---> Using cache ---> b6d9ad9ed232 Step 6/8 : COPY virt-controller /usr/bin/virt-controller ---> Using cache ---> 1fe9b2ea3d80 Step 7/8 : ENTRYPOINT /usr/bin/virt-controller ---> Using cache ---> 12696670c79a Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-controller" '' ---> Running in af712c6308c1 ---> 23b2b9fee556 Removing intermediate container af712c6308c1 Successfully built 23b2b9fee556 Sending build context to Docker daemon 43.31 MB Step 1/10 : FROM kubevirt/libvirt:4.2.0 ---> 5f0bfe81a3e0 Step 2/10 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 945996802736 Step 3/10 : RUN dnf -y install socat genisoimage util-linux libcgroup-tools ethtool net-tools sudo && dnf -y clean all && test $(id -u qemu) = 107 # make sure that the qemu user really is 107 ---> Using cache ---> 672f9ab56316 Step 4/10 : COPY virt-launcher /usr/bin/virt-launcher ---> Using cache ---> 55cd676cbbb2 Step 5/10 : COPY kubevirt-sudo /etc/sudoers.d/kubevirt ---> Using cache ---> d983f5a36dd5 Step 6/10 : RUN setcap CAP_NET_BIND_SERVICE=+eip /usr/bin/qemu-system-x86_64 ---> Using cache ---> a739c38539f9 Step 7/10 : RUN mkdir -p /usr/share/kubevirt/virt-launcher ---> Using cache ---> e965885ad121 Step 8/10 : COPY entrypoint.sh libvirtd.sh sock-connector /usr/share/kubevirt/virt-launcher/ ---> Using cache ---> 19e70c9eb5cc Step 9/10 : ENTRYPOINT /usr/share/kubevirt/virt-launcher/entrypoint.sh ---> Using cache ---> 798fdb45c56c Step 10/10 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-launcher" '' ---> Running in 8cf2e316380b ---> 8b619fa3a54a Removing intermediate container 8cf2e316380b Successfully built 8b619fa3a54a Sending build context to Docker daemon 41.74 MB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/5 : COPY virt-handler /usr/bin/virt-handler ---> Using cache ---> 49501334b17e Step 4/5 : ENTRYPOINT /usr/bin/virt-handler ---> Using cache ---> 7aa34460126f Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-handler" '' ---> Running in 51d3b0a86146 ---> e2452feed4d7 Removing intermediate container 51d3b0a86146 Successfully built e2452feed4d7 Sending build context to Docker daemon 38.81 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-api ---> Using cache ---> ed1ebf600ee1 Step 4/8 : WORKDIR /home/virt-api ---> Using cache ---> 0769dad023e5 Step 5/8 : USER 1001 ---> Using cache ---> 0cb65afb0c2b Step 6/8 : COPY virt-api /usr/bin/virt-api ---> Using cache ---> 0dd839df23c9 Step 7/8 : ENTRYPOINT /usr/bin/virt-api ---> Using cache ---> 6b1cf321b3e6 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "virt-api" '' ---> Running in b424fc27c6a9 ---> 2deaa57cc52f Removing intermediate container b424fc27c6a9 Successfully built 2deaa57cc52f Sending build context to Docker daemon 4.096 kB Step 1/7 : FROM fedora:28 ---> cc510acfcd70 Step 2/7 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/7 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/7 : RUN mkdir -p /images/custom /images/alpine && truncate -s 64M /images/custom/disk.img && curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /images/alpine/disk.img ---> Using cache ---> 02134835a6aa Step 5/7 : ADD entrypoint.sh / ---> Using cache ---> ec0843818da7 Step 6/7 : CMD /entrypoint.sh ---> Using cache ---> 754029bb4bd2 Step 7/7 : LABEL "disks-images-provider" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> 6327b8256318 Successfully built 6327b8256318 Sending build context to Docker daemon 2.56 kB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/5 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/5 : RUN dnf -y install procps-ng nmap-ncat && dnf -y clean all ---> Using cache ---> 207487abe7b2 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "vm-killer" '' ---> Using cache ---> 27cf5472530f Successfully built 27cf5472530f Sending build context to Docker daemon 5.12 kB Step 1/7 : FROM debian:sid ---> 68f33cf86aab Step 2/7 : MAINTAINER "David Vossel" \ ---> Using cache ---> 5734d749eb5c Step 3/7 : ENV container docker ---> Using cache ---> f8775a77966f Step 4/7 : RUN apt-get update && apt-get install -y bash curl bzip2 qemu-utils && mkdir -p /disk && rm -rf /var/lib/apt/lists/* ---> Using cache ---> 1a40cf222a61 Step 5/7 : ADD entry-point.sh / ---> Using cache ---> 77b545d92fe7 Step 6/7 : CMD /entry-point.sh ---> Using cache ---> dfe20d463305 Step 7/7 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "registry-disk-v1alpha" '' ---> Using cache ---> 5efdf368e732 Successfully built 5efdf368e732 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33482/kubevirt/registry-disk-v1alpha:devel ---> 5efdf368e732 Step 2/4 : MAINTAINER "David Vossel" \ ---> Using cache ---> 386f7e924456 Step 3/4 : RUN curl https://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img > /disk/cirros.img ---> Using cache ---> f473a86e4d6a Step 4/4 : LABEL "cirros-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> a4ca4c67d45c Successfully built a4ca4c67d45c Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33482/kubevirt/registry-disk-v1alpha:devel ---> 5efdf368e732 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 369bca39dcc2 Step 3/4 : RUN curl -g -L https://download.fedoraproject.org/pub/fedora/linux/releases/27/CloudImages/x86_64/images/Fedora-Cloud-Base-27-1.6.x86_64.qcow2 > /disk/fedora.qcow2 ---> Using cache ---> de1e81f43a59 Step 4/4 : LABEL "fedora-cloud-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> a5867eac6e05 Successfully built a5867eac6e05 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33482/kubevirt/registry-disk-v1alpha:devel ---> 5efdf368e732 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 369bca39dcc2 Step 3/4 : RUN curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /disk/alpine.iso ---> Using cache ---> 1083d820f9c8 Step 4/4 : LABEL "alpine-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Using cache ---> 11512d828b9c Successfully built 11512d828b9c Sending build context to Docker daemon 35.59 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virtctl ---> Using cache ---> 985fe391c056 Step 4/8 : WORKDIR /home/virtctl ---> Using cache ---> 3b2cae8ac543 Step 5/8 : USER 1001 ---> Using cache ---> 0c06e5b4a900 Step 6/8 : COPY subresource-access-test /subresource-access-test ---> Using cache ---> 545f7412d6d4 Step 7/8 : ENTRYPOINT /subresource-access-test ---> Using cache ---> e6875a79ca11 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "subresource-access-test" '' ---> Running in 0e631d1ae78f ---> 1c9401dd4029 Removing intermediate container 0e631d1ae78f Successfully built 1c9401dd4029 Sending build context to Docker daemon 3.072 kB Step 1/9 : FROM fedora:28 ---> cc510acfcd70 Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/9 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/9 : RUN dnf -y install make git gcc && dnf -y clean all ---> Using cache ---> d3456b1644b1 Step 5/9 : ENV GIMME_GO_VERSION 1.9.2 ---> Using cache ---> 0ba81fddbba1 Step 6/9 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 5d33abe3f819 Step 7/9 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 783826523be1 Step 8/9 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/masterzen/winrm-cli ---> Using cache ---> 711bc8d15952 Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release1" '' "winrmcli" '' ---> Using cache ---> fe40426b785b Successfully built fe40426b785b Sending build context to Docker daemon 36.8 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> e3238544ad97 Step 3/5 : COPY example-hook-sidecar /example-hook-sidecar ---> Using cache ---> 1ca86501391b Step 4/5 : ENTRYPOINT /example-hook-sidecar ---> Using cache ---> 85753d9f4fe3 Step 5/5 : LABEL "example-hook-sidecar" '' "kubevirt-functional-tests-openshift-3.10-release1" '' ---> Running in 4380093bb05b ---> bcaa96a87eb4 Removing intermediate container 4380093bb05b Successfully built bcaa96a87eb4 hack/build-docker.sh push The push refers to a repository [localhost:33482/kubevirt/virt-controller] 4c814cfb0a8b: Preparing aa89340cf7a8: Preparing 891e1e4ef82a: Preparing aa89340cf7a8: Pushed 4c814cfb0a8b: Pushed 891e1e4ef82a: Pushed devel: digest: sha256:8fe3f0279b3baebd7ed836edd76a2914d3a18dcddb926bcc097315d97dd93ea8 size: 949 The push refers to a repository [localhost:33482/kubevirt/virt-launcher] aa056233b0a2: Preparing b15857bd25aa: Preparing 59bdc69e2684: Preparing 43175215dfed: Preparing ce7996782210: Preparing 633427c64a24: Preparing da38cf808aa5: Preparing b83399358a92: Preparing 186d8b3e4fd8: Preparing fa6154170bf5: Preparing 5eefb9960a36: Preparing 633427c64a24: Waiting 891e1e4ef82a: Preparing da38cf808aa5: Waiting b83399358a92: Waiting 5eefb9960a36: Waiting b15857bd25aa: Pushed 43175215dfed: Pushed aa056233b0a2: Pushed da38cf808aa5: Pushed b83399358a92: Pushed fa6154170bf5: Pushed 186d8b3e4fd8: Pushed 891e1e4ef82a: Mounted from kubevirt/virt-controller 59bdc69e2684: Pushed 633427c64a24: Pushed ce7996782210: Pushed 5eefb9960a36: Pushed devel: digest: sha256:7850de2a5fea9172b45e6cd38aaae49810889dcef81ba435b729ef9571dcbe18 size: 2828 The push refers to a repository [localhost:33482/kubevirt/virt-handler] 778f0be73b59: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-launcher 778f0be73b59: Pushed devel: digest: sha256:1369ce3b15e6699a3300b7f39249ae6151e6b67dd6edcf40a27b9171dd9789fe size: 741 The push refers to a repository [localhost:33482/kubevirt/virt-api] ac007035f4ea: Preparing 82fc744c99b4: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-handler 82fc744c99b4: Pushed ac007035f4ea: Pushed devel: digest: sha256:1fc94a503f9db5555c8596a34509b81ad805aba7494d812583221b69c7caacc0 size: 948 The push refers to a repository [localhost:33482/kubevirt/disks-images-provider] 71ad31feb2c5: Preparing 21d4b721776e: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-api 71ad31feb2c5: Pushed 21d4b721776e: Pushed devel: digest: sha256:5dc088106df85eb01f2ad0566624239b95b34986820107944e36d309183fd4cd size: 948 The push refers to a repository [localhost:33482/kubevirt/vm-killer] c4cfadeeaf5f: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/disks-images-provider c4cfadeeaf5f: Pushed devel: digest: sha256:39b817b79b1fbce75dbb476bc261b2752fd6466bf98d373208d5144579da22b0 size: 740 The push refers to a repository [localhost:33482/kubevirt/registry-disk-v1alpha] 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 661cce8d8e52: Pushed 41e0baba3077: Pushed 25edbec0eaea: Pushed devel: digest: sha256:0df707a55243af8792380fba68a76307017494c503e0e9071ed55d7d3c3611d4 size: 948 The push refers to a repository [localhost:33482/kubevirt/cirros-registry-disk-demo] f9f97de3966a: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 25edbec0eaea: Mounted from kubevirt/registry-disk-v1alpha 661cce8d8e52: Mounted from kubevirt/registry-disk-v1alpha 41e0baba3077: Mounted from kubevirt/registry-disk-v1alpha f9f97de3966a: Pushed devel: digest: sha256:3f818f67105a36bdc42bdbfad87fc29d0028e39a0dceef92d12efbcf8e16e5ed size: 1160 The push refers to a repository [localhost:33482/kubevirt/fedora-cloud-registry-disk-demo] 24cdf3b545f2: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 25edbec0eaea: Mounted from kubevirt/cirros-registry-disk-demo 41e0baba3077: Mounted from kubevirt/cirros-registry-disk-demo 661cce8d8e52: Mounted from kubevirt/cirros-registry-disk-demo 24cdf3b545f2: Pushed devel: digest: sha256:a6a571626690141c7da4cf0e1eb4fd75e5dd9ae427d5070c2729214cfbd6a192 size: 1161 The push refers to a repository [localhost:33482/kubevirt/alpine-registry-disk-demo] d8e356e905f4: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 41e0baba3077: Mounted from kubevirt/fedora-cloud-registry-disk-demo 25edbec0eaea: Mounted from kubevirt/fedora-cloud-registry-disk-demo 661cce8d8e52: Mounted from kubevirt/fedora-cloud-registry-disk-demo d8e356e905f4: Pushed devel: digest: sha256:c27568048aa8e031860d98cdced0370763745ad80581e62432568dac45abf1fb size: 1160 The push refers to a repository [localhost:33482/kubevirt/subresource-access-test] bae3696c44c1: Preparing 25cb73590a9d: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/vm-killer 25cb73590a9d: Pushed bae3696c44c1: Pushed devel: digest: sha256:54fe5a07cf23145953c5b07412005e79b037e03c658f6524aa7a8d2c11399724 size: 948 The push refers to a repository [localhost:33482/kubevirt/winrmcli] f8083e002d0b: Preparing 53c709abc882: Preparing 9ca98a0f492b: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/subresource-access-test f8083e002d0b: Pushed 9ca98a0f492b: Pushed 53c709abc882: Pushed devel: digest: sha256:4fe6c9666a841b61b962d7fb73ccb7cb0dabc3b56e1657cfdfd9005e1a36d38c size: 1165 The push refers to a repository [localhost:33482/kubevirt/example-hook-sidecar] 5ea0f62707d3: Preparing 39bae602f753: Preparing 5ea0f62707d3: Pushed 39bae602f753: Pushed devel: digest: sha256:3213e12b3ed844449a2d305356458f4fb3cca3c022960ae3548f6cfa6fb12c2f size: 740 make[1]: Leaving directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' Done ./cluster/clean.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release1 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release1 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-180-g2b4f231 ++ KUBEVIRT_VERSION=v0.7.0-180-g2b4f231 + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33482/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Cleaning up ...' Cleaning up ... + cluster/kubectl.sh get vmis --all-namespaces -o=custom-columns=NAME:.metadata.name,NAMESPACE:.metadata.namespace,FINALIZERS:.metadata.finalizers --no-headers + grep foregroundDeleteVirtualMachine + read p error: the server doesn't have a resource type "vmis" + _kubectl delete ds -l kubevirt.io -n kube-system --cascade=false --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=libvirt --force --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=virt-handler --force --grace-period 0 No resources found + namespaces=(default ${namespace}) + for i in '${namespaces[@]}' + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete deployment -l kubevirt.io No resources found + _kubectl -n default delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rs -l kubevirt.io No resources found + _kubectl -n default delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete services -l kubevirt.io No resources found + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n default delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete secrets -l kubevirt.io No resources found + _kubectl -n default delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pv -l kubevirt.io No resources found + _kubectl -n default delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pvc -l kubevirt.io No resources found + _kubectl -n default delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete ds -l kubevirt.io No resources found + _kubectl -n default delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n default delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pods -l kubevirt.io No resources found + _kubectl -n default delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n default delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rolebinding -l kubevirt.io No resources found + _kubectl -n default delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete roles -l kubevirt.io No resources found + _kubectl -n default delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterroles -l kubevirt.io No resources found + _kubectl -n default delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n default get crd offlinevirtualmachines.kubevirt.io ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ wc -l ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n default get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + for i in '${namespaces[@]}' + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete deployment -l kubevirt.io No resources found + _kubectl -n kube-system delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rs -l kubevirt.io No resources found + _kubectl -n kube-system delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete services -l kubevirt.io No resources found + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n kube-system delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete secrets -l kubevirt.io No resources found + _kubectl -n kube-system delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pv -l kubevirt.io No resources found + _kubectl -n kube-system delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pvc -l kubevirt.io No resources found + _kubectl -n kube-system delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete ds -l kubevirt.io No resources found + _kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n kube-system delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pods -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete roles -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterroles -l kubevirt.io No resources found + _kubectl -n kube-system delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + sleep 2 + echo Done Done ./cluster/deploy.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release1 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release1 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.7.0-180-g2b4f231 ++ KUBEVIRT_VERSION=v0.7.0-180-g2b4f231 + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:50a4b8ee3e07d592e7e4fbf3eb1401980a5947499dfdc3d847c085b5775aaa9a ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33482/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace + echo 'Deploying ...' Deploying ... + [[ -z openshift-3.10-release ]] + [[ openshift-3.10-release =~ .*-dev ]] + [[ openshift-3.10-release =~ .*-release ]] + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/demo-content.yaml =~ .*demo.* ]] + continue + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml =~ .*demo.* ]] + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml clusterrole.rbac.authorization.k8s.io "kubevirt.io:admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:edit" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:view" created serviceaccount "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver-auth-delegator" created rolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created role.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-controller" created serviceaccount "kubevirt-controller" created serviceaccount "kubevirt-privileged" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller-cluster-admin" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-privileged-cluster-admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:default" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt.io:default" created service "virt-api" created deployment.extensions "virt-api" created deployment.extensions "virt-controller" created daemonset.extensions "virt-handler" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstances.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancereplicasets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancepresets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachines.kubevirt.io" created + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R persistentvolumeclaim "disk-alpine" created persistentvolume "host-path-disk-alpine" created persistentvolumeclaim "disk-custom" created persistentvolume "host-path-disk-custom" created daemonset.extensions "disks-images-provider" created serviceaccount "kubevirt-testing" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-testing-cluster-admin" created + [[ os-3.10.0 =~ os-* ]] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-controller"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-testing"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-privileged"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-apiserver"] + _kubectl adm policy add-scc-to-user privileged admin + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged admin scc "privileged" added to: ["admin"] + echo Done Done + namespaces=(kube-system default) + [[ kube-system != \k\u\b\e\-\s\y\s\t\e\m ]] + timeout=300 + sample=30 + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n 'disks-images-provider-7b7bs 0/1 ContainerCreating 0 5s disks-images-provider-g5zxr 0/1 ContainerCreating 0 6s virt-api-7d79764579-x6llj 0/1 ContainerCreating 0 8s virt-api-7d79764579-x6tjd 0/1 ContainerCreating 0 8s virt-handler-blk8f 0/1 ContainerCreating 0 8s virt-handler-zmfm7 0/1 ContainerCreating 0 8s' ']' + echo 'Waiting for kubevirt pods to enter the Running state ...' Waiting for kubevirt pods to enter the Running state ... + kubectl get pods -n kube-system --no-headers + cluster/kubectl.sh get pods -n kube-system --no-headers + grep -v Running disks-images-provider-7b7bs 0/1 ContainerCreating 0 6s disks-images-provider-g5zxr 0/1 ContainerCreating 0 7s virt-api-7d79764579-x6llj 0/1 ContainerCreating 0 9s virt-handler-blk8f 0/1 ContainerCreating 0 9s virt-handler-zmfm7 0/1 ContainerCreating 0 9s + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system --no-headers ++ grep -v Running ++ cluster/kubectl.sh get pods -n kube-system --no-headers + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n false ']' + echo 'Waiting for KubeVirt containers to become ready ...' Waiting for KubeVirt containers to become ready ... + kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + grep false + cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers false + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n kube-system + cluster/kubectl.sh get pods -n kube-system NAME READY STATUS RESTARTS AGE disks-images-provider-7b7bs 1/1 Running 0 1m disks-images-provider-g5zxr 1/1 Running 0 1m master-api-node01 1/1 Running 1 25d master-controllers-node01 1/1 Running 1 25d master-etcd-node01 1/1 Running 1 25d virt-api-7d79764579-x6llj 1/1 Running 0 1m virt-api-7d79764579-x6tjd 1/1 Running 1 1m virt-controller-7d57d96b65-dx6hk 1/1 Running 0 1m virt-controller-7d57d96b65-x56j2 1/1 Running 0 1m virt-handler-blk8f 1/1 Running 0 1m virt-handler-zmfm7 1/1 Running 0 1m + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n default --no-headers ++ cluster/kubectl.sh get pods -n default --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n '' ']' + kubectl get pods -n default + cluster/kubectl.sh get pods -n default NAME READY STATUS RESTARTS AGE docker-registry-1-rl562 1/1 Running 1 25d registry-console-1-rw9zf 1/1 Running 1 25d router-1-6cch9 1/1 Running 1 25d + kubectl version + cluster/kubectl.sh version oc v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 features: Basic-Auth GSSAPI Kerberos SPNEGO Server https://127.0.0.1:33479 openshift v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 + ginko_params='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + [[ openshift-3.10-release =~ windows.* ]] + FUNC_TEST_ARGS='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + make functest hack/dockerized "hack/build-func-tests.sh" sha256:dcf2b21fa2ed11dcf9dbba21b1cca0ee3fad521a0e9aee61c06d0b0b66a4b200 go version go1.10 linux/amd64 go version go1.10 linux/amd64 Compiling tests... compiled tests.test hack/functests.sh Running Suite: Tests Suite ========================== Random Seed: 1533206809 Will run 151 of 151 specs Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 10:46:41 http: TLS handshake error from 10.129.0.1:33262: EOF level=info timestamp=2018-08-02T10:46:41.564923Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T10:46:41.849661Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:46:45.186602Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:46:51 http: TLS handshake error from 10.129.0.1:33274: EOF level=info timestamp=2018-08-02T10:46:54.637379Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:46:55.904830Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:47:01 http: TLS handshake error from 10.129.0.1:33304: EOF level=info timestamp=2018-08-02T10:47:06.166352Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T10:47:06.166916Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:47:06.293901Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T10:47:06.765749Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:47:11.329753Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:47:11 http: TLS handshake error from 10.129.0.1:33342: EOF level=info timestamp=2018-08-02T10:47:12.177373Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 10:45:36 http: TLS handshake error from 10.129.0.1:42846: EOF level=info timestamp=2018-08-02T10:45:37.384926Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:45:46 http: TLS handshake error from 10.129.0.1:42858: EOF 2018/08/02 10:45:56 http: TLS handshake error from 10.129.0.1:42870: EOF 2018/08/02 10:46:06 http: TLS handshake error from 10.129.0.1:42882: EOF 2018/08/02 10:46:16 http: TLS handshake error from 10.129.0.1:42936: EOF 2018/08/02 10:46:26 http: TLS handshake error from 10.129.0.1:42950: EOF 2018/08/02 10:46:36 http: TLS handshake error from 10.129.0.1:42982: EOF 2018/08/02 10:46:46 http: TLS handshake error from 10.129.0.1:42994: EOF 2018/08/02 10:46:56 http: TLS handshake error from 10.129.0.1:43006: EOF 2018/08/02 10:47:06 http: TLS handshake error from 10.129.0.1:43062: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:45:21.964599Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-08-02T10:45:21.964756Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-08-02T10:45:21.964836Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-08-02T10:45:21.964901Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer limitrangeInformer" level=info timestamp=2018-08-02T10:45:21.964962Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-08-02T10:45:21.965023Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-08-02T10:45:21.965436Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-08-02T10:45:21.965529Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-08-02T10:45:21.965705Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-08-02T10:45:21.968840Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-08-02T10:45:21.968914Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-08-02T10:45:21.968954Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-08-02T10:45:21.969011Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-08-02T10:46:53.766673Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:46:53.772028Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T10:47:14.841525Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:47:14.844961Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:47:14.845373Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:47:14.925676Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: true\n" level=info timestamp=2018-08-02T10:47:14.925759Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:47:14.925796Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:47:14.925951Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:47:14.926876Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:47:14.927017Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:47:15.087250Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: true\n" level=info timestamp=2018-08-02T10:47:15.087366Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:47:15.087397Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:47:15.087555Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:47:15.088660Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:47:15.088823Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T10:45:33.392212Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-02T10:45:33.411429Z pos=vm.go:212 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-02T10:45:33.412469Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-02T10:45:33.523397Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-02T10:45:33.588365Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-02T10:45:33.602142Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmi8s6c6-g5qj4 Pod phase: Running level=info timestamp=2018-08-02T10:47:03.683800Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T10:47:03.684011Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T10:47:03.685698Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T10:47:13.705401Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T10:47:13.784834Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi8s6c6" level=info timestamp=2018-08-02T10:47:13.786633Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T10:47:13.786903Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T10:49:10.008500Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:49:11 http: TLS handshake error from 10.129.0.1:33498: EOF level=info timestamp=2018-08-02T10:49:13.446273Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:49:20.451116Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:49:21 http: TLS handshake error from 10.129.0.1:33512: EOF level=info timestamp=2018-08-02T10:49:27.344842Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:49:30.813319Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:49:31 http: TLS handshake error from 10.129.0.1:33524: EOF level=info timestamp=2018-08-02T10:49:38.762593Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:49:41.051812Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T10:49:41.068346Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:49:41 http: TLS handshake error from 10.129.0.1:33536: EOF level=info timestamp=2018-08-02T10:49:43.684236Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:49:51.423375Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:49:51 http: TLS handshake error from 10.129.0.1:33548: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 10:47:36 http: TLS handshake error from 10.129.0.1:43098: EOF 2018/08/02 10:47:46 http: TLS handshake error from 10.129.0.1:43110: EOF 2018/08/02 10:47:56 http: TLS handshake error from 10.129.0.1:43122: EOF 2018/08/02 10:48:06 http: TLS handshake error from 10.129.0.1:43146: EOF 2018/08/02 10:48:16 http: TLS handshake error from 10.129.0.1:43158: EOF 2018/08/02 10:48:26 http: TLS handshake error from 10.129.0.1:43170: EOF 2018/08/02 10:48:36 http: TLS handshake error from 10.129.0.1:43182: EOF 2018/08/02 10:48:46 http: TLS handshake error from 10.129.0.1:43194: EOF 2018/08/02 10:48:56 http: TLS handshake error from 10.129.0.1:43206: EOF 2018/08/02 10:49:06 http: TLS handshake error from 10.129.0.1:43218: EOF level=info timestamp=2018-08-02T10:49:11.086893Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:49:16 http: TLS handshake error from 10.129.0.1:43230: EOF 2018/08/02 10:49:26 http: TLS handshake error from 10.129.0.1:43244: EOF 2018/08/02 10:49:36 http: TLS handshake error from 10.129.0.1:43256: EOF 2018/08/02 10:49:46 http: TLS handshake error from 10.129.0.1:43268: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:45:21.964599Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmirsInformer" level=info timestamp=2018-08-02T10:45:21.964756Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer configMapInformer" level=info timestamp=2018-08-02T10:45:21.964836Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmInformer" level=info timestamp=2018-08-02T10:45:21.964901Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer limitrangeInformer" level=info timestamp=2018-08-02T10:45:21.964962Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-08-02T10:45:21.965023Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-08-02T10:45:21.965436Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-08-02T10:45:21.965529Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-08-02T10:45:21.965705Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-08-02T10:45:21.968840Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-08-02T10:45:21.968914Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-08-02T10:45:21.968954Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-08-02T10:45:21.969011Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-08-02T10:46:53.766673Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:46:53.772028Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T10:47:35.283524Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:47:35.289543Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:47:35.290203Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:47:55.771652Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: true\n" level=info timestamp=2018-08-02T10:47:55.772690Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:47:55.773061Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:47:55.773728Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:47:55.779637Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:47:55.780833Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:48:36.743229Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: true\n" level=info timestamp=2018-08-02T10:48:36.744235Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:48:36.744350Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:48:36.744979Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:48:36.755573Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:48:36.756537Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T10:45:33.392212Z pos=virt-handler.go:87 component=virt-handler hostname=node01 level=info timestamp=2018-08-02T10:45:33.411429Z pos=vm.go:212 component=virt-handler msg="Starting virt-handler controller." level=info timestamp=2018-08-02T10:45:33.412469Z pos=cache.go:151 component=virt-handler msg="Synchronizing domains" level=info timestamp=2018-08-02T10:45:33.523397Z pos=device_controller.go:133 component=virt-handler msg="Starting device plugin controller" level=info timestamp=2018-08-02T10:45:33.588365Z pos=device_controller.go:127 component=virt-handler msg="tun device plugin started" level=info timestamp=2018-08-02T10:45:33.602142Z pos=device_controller.go:127 component=virt-handler msg="kvm device plugin started" Pod name: virt-launcher-testvmi8s6c6-g5qj4 Pod phase: Running level=info timestamp=2018-08-02T10:47:03.683800Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T10:47:03.684011Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T10:47:03.685698Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T10:47:13.705401Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T10:47:13.784834Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi8s6c6" level=info timestamp=2018-08-02T10:47:13.786633Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T10:47:13.786903Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [181.564 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with 3 CPU cores /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:56 should report 3 cpu cores under guest OS [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:62 Unexpected Warning event received: testvmi8s6c6,5dbb121f-9641-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ STEP: Starting a VirtualMachineInstance level=info timestamp=2018-08-02T10:46:53.751385Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi8s6c6-g5qj4" level=info timestamp=2018-08-02T10:47:14.268182Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi8s6c6-g5qj4" level=error timestamp=2018-08-02T10:47:14.390322Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" • Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 10:49:31 http: TLS handshake error from 10.129.0.1:33524: EOF level=info timestamp=2018-08-02T10:49:38.762593Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:49:41.051812Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T10:49:41.068346Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:49:41 http: TLS handshake error from 10.129.0.1:33536: EOF level=info timestamp=2018-08-02T10:49:43.684236Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:49:51.423375Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:49:51 http: TLS handshake error from 10.129.0.1:33548: EOF level=info timestamp=2018-08-02T10:49:57.640910Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:50:01 http: TLS handshake error from 10.129.0.1:33560: EOF level=info timestamp=2018-08-02T10:50:01.767914Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:50:09.456542Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:50:11 http: TLS handshake error from 10.129.0.1:33572: EOF level=info timestamp=2018-08-02T10:50:12.027759Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:50:13.926626Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 10:48:16 http: TLS handshake error from 10.129.0.1:43158: EOF 2018/08/02 10:48:26 http: TLS handshake error from 10.129.0.1:43170: EOF 2018/08/02 10:48:36 http: TLS handshake error from 10.129.0.1:43182: EOF 2018/08/02 10:48:46 http: TLS handshake error from 10.129.0.1:43194: EOF 2018/08/02 10:48:56 http: TLS handshake error from 10.129.0.1:43206: EOF 2018/08/02 10:49:06 http: TLS handshake error from 10.129.0.1:43218: EOF level=info timestamp=2018-08-02T10:49:11.086893Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:49:16 http: TLS handshake error from 10.129.0.1:43230: EOF 2018/08/02 10:49:26 http: TLS handshake error from 10.129.0.1:43244: EOF 2018/08/02 10:49:36 http: TLS handshake error from 10.129.0.1:43256: EOF 2018/08/02 10:49:46 http: TLS handshake error from 10.129.0.1:43268: EOF 2018/08/02 10:49:56 http: TLS handshake error from 10.129.0.1:43280: EOF 2018/08/02 10:50:06 http: TLS handshake error from 10.129.0.1:43292: EOF level=info timestamp=2018-08-02T10:50:09.596675Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T10:50:11.508993Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:45:21.964962Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-08-02T10:45:21.965023Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-08-02T10:45:21.965436Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-08-02T10:45:21.965529Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-08-02T10:45:21.965705Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-08-02T10:45:21.968840Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-08-02T10:45:21.968914Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-08-02T10:45:21.968954Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-08-02T10:45:21.969011Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-08-02T10:46:53.766673Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:46:53.772028Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:49:54.821955Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8rxl6 kind= uid=c9cb7aee-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:49:54.822757Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8rxl6 kind= uid=c9cb7aee-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:49:56.244104Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:49:56.244482Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T10:48:36.756537Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.449061Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: true\n" level=info timestamp=2018-08-02T10:49:54.450937Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:49:54.451075Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.451835Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=error timestamp=2018-08-02T10:49:54.687110Z pos=vm.go:433 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T10:49:54.687566Z pos=vm.go:253 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.688079Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:54.688192Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.688350Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:54.688572Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T10:49:58.677616Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:58.677891Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:58.678041Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:58.678470Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T10:50:14.295633Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:50:14.297541Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:50:14.298541Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6gkrx" level=info timestamp=2018-08-02T10:50:14.388845Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6gkrx, existing: true\n" level=info timestamp=2018-08-02T10:50:14.389539Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:50:14.390139Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:50:14.390662Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:50:14.395953Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:50:14.400940Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6gkrx" level=info timestamp=2018-08-02T10:50:14.566218Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6gkrx, existing: true\n" level=info timestamp=2018-08-02T10:50:14.566445Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:50:14.566520Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:50:14.566737Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:50:14.568842Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:50:14.569298Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6gkrx" Pod name: virt-launcher-testvmi6gkrx-fgl4s Pod phase: Running level=info timestamp=2018-08-02T10:50:02.058289Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T10:50:02.058520Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T10:50:02.060216Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T10:50:12.855142Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T10:50:12.933578Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi6gkrx" level=info timestamp=2018-08-02T10:50:12.935066Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T10:50:12.935500Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T10:52:11.409746Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:52:11 http: TLS handshake error from 10.129.0.1:33716: EOF level=info timestamp=2018-08-02T10:52:14.861801Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:52:14.941494Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:52:21 http: TLS handshake error from 10.129.0.1:33730: EOF level=info timestamp=2018-08-02T10:52:25.027013Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:52:29.323836Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:52:31 http: TLS handshake error from 10.129.0.1:33742: EOF level=info timestamp=2018-08-02T10:52:35.559204Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:52:41 http: TLS handshake error from 10.129.0.1:33754: EOF level=info timestamp=2018-08-02T10:52:41.627979Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:52:45.138347Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:52:45.912459Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:52:51 http: TLS handshake error from 10.129.0.1:33766: EOF level=info timestamp=2018-08-02T10:52:56.082349Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 10:51:06 http: TLS handshake error from 10.129.0.1:43364: EOF level=info timestamp=2018-08-02T10:51:10.999655Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:51:16 http: TLS handshake error from 10.129.0.1:43376: EOF 2018/08/02 10:51:26 http: TLS handshake error from 10.129.0.1:43388: EOF 2018/08/02 10:51:36 http: TLS handshake error from 10.129.0.1:43400: EOF level=info timestamp=2018-08-02T10:51:41.247300Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:51:46 http: TLS handshake error from 10.129.0.1:43412: EOF 2018/08/02 10:51:56 http: TLS handshake error from 10.129.0.1:43424: EOF 2018/08/02 10:52:06 http: TLS handshake error from 10.129.0.1:43436: EOF level=info timestamp=2018-08-02T10:52:10.925650Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:52:16 http: TLS handshake error from 10.129.0.1:43448: EOF 2018/08/02 10:52:26 http: TLS handshake error from 10.129.0.1:43462: EOF 2018/08/02 10:52:36 http: TLS handshake error from 10.129.0.1:43474: EOF level=info timestamp=2018-08-02T10:52:40.971425Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:52:46 http: TLS handshake error from 10.129.0.1:43486: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:45:21.964962Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiInformer" level=info timestamp=2018-08-02T10:45:21.965023Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtPodInformer" level=info timestamp=2018-08-02T10:45:21.965436Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer kubeVirtNodeInformer" level=info timestamp=2018-08-02T10:45:21.965529Z pos=virtinformers.go:107 component=virt-controller service=http msg="STARTING informer vmiPresetInformer" level=info timestamp=2018-08-02T10:45:21.965705Z pos=vm.go:85 component=virt-controller service=http msg="Starting VirtualMachine controller." level=info timestamp=2018-08-02T10:45:21.968840Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-08-02T10:45:21.968914Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-08-02T10:45:21.968954Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-08-02T10:45:21.969011Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-08-02T10:46:53.766673Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:46:53.772028Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:49:54.821955Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8rxl6 kind= uid=c9cb7aee-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:49:54.822757Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8rxl6 kind= uid=c9cb7aee-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:49:56.244104Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:49:56.244482Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T10:48:36.756537Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.449061Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: true\n" level=info timestamp=2018-08-02T10:49:54.450937Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:49:54.451075Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.451835Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=error timestamp=2018-08-02T10:49:54.687110Z pos=vm.go:433 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T10:49:54.687566Z pos=vm.go:253 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.688079Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:54.688192Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.688350Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:54.688572Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T10:49:58.677616Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:58.677891Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:58.678041Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:58.678470Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T10:50:34.849373Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:50:34.852748Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:50:34.852945Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6gkrx" level=info timestamp=2018-08-02T10:50:55.334453Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6gkrx, existing: true\n" level=info timestamp=2018-08-02T10:50:55.335486Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:50:55.335586Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:50:55.337617Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:50:55.356897Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:50:55.359377Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6gkrx" level=info timestamp=2018-08-02T10:51:36.325128Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6gkrx, existing: true\n" level=info timestamp=2018-08-02T10:51:36.325912Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:51:36.326062Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:51:36.326476Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:51:36.335800Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:51:36.343010Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6gkrx" Pod name: virt-launcher-testvmi6gkrx-fgl4s Pod phase: Running level=info timestamp=2018-08-02T10:50:02.058289Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T10:50:02.058520Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T10:50:02.060216Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T10:50:12.855142Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T10:50:12.933578Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi6gkrx" level=info timestamp=2018-08-02T10:50:12.935066Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T10:50:12.935500Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure [180.783 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:164 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-2Mi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received: testvmi6gkrx,caa5d74d-9641-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ STEP: Starting a VM level=info timestamp=2018-08-02T10:49:56.029980Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi6gkrx kind=VirtualMachineInstance uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi6gkrx-fgl4s" level=info timestamp=2018-08-02T10:50:14.666443Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi6gkrx kind=VirtualMachineInstance uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi6gkrx-fgl4s" level=error timestamp=2018-08-02T10:50:14.833624Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi6gkrx kind=VirtualMachineInstance uid=caa5d74d-9641-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" S [SKIPPING] [0.508 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:164 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-1Gi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 No node with hugepages hugepages-1Gi capacity /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:216 ------------------------------ • Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T10:52:35.559204Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:52:41 http: TLS handshake error from 10.129.0.1:33754: EOF level=info timestamp=2018-08-02T10:52:41.627979Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:52:45.138347Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:52:45.912459Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:52:51 http: TLS handshake error from 10.129.0.1:33766: EOF level=info timestamp=2018-08-02T10:52:56.082349Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:52:59.610435Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:53:01 http: TLS handshake error from 10.129.0.1:33778: EOF level=info timestamp=2018-08-02T10:53:06.569035Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:53:11.039926Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:53:11 http: TLS handshake error from 10.129.0.1:33790: EOF level=info timestamp=2018-08-02T10:53:11.786604Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:53:15.394993Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:53:16.837913Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 10:51:26 http: TLS handshake error from 10.129.0.1:43388: EOF 2018/08/02 10:51:36 http: TLS handshake error from 10.129.0.1:43400: EOF level=info timestamp=2018-08-02T10:51:41.247300Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:51:46 http: TLS handshake error from 10.129.0.1:43412: EOF 2018/08/02 10:51:56 http: TLS handshake error from 10.129.0.1:43424: EOF 2018/08/02 10:52:06 http: TLS handshake error from 10.129.0.1:43436: EOF level=info timestamp=2018-08-02T10:52:10.925650Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:52:16 http: TLS handshake error from 10.129.0.1:43448: EOF 2018/08/02 10:52:26 http: TLS handshake error from 10.129.0.1:43462: EOF 2018/08/02 10:52:36 http: TLS handshake error from 10.129.0.1:43474: EOF level=info timestamp=2018-08-02T10:52:40.971425Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:52:46 http: TLS handshake error from 10.129.0.1:43486: EOF 2018/08/02 10:52:56 http: TLS handshake error from 10.129.0.1:43498: EOF 2018/08/02 10:53:06 http: TLS handshake error from 10.129.0.1:43510: EOF 2018/08/02 10:53:16 http: TLS handshake error from 10.129.0.1:43522: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:45:21.968840Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-08-02T10:45:21.968914Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-08-02T10:45:21.968954Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-08-02T10:45:21.969011Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-08-02T10:46:53.766673Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:46:53.772028Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:49:54.821955Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8rxl6 kind= uid=c9cb7aee-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:49:54.822757Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8rxl6 kind= uid=c9cb7aee-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:49:56.244104Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:49:56.244482Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:57.321874Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:57.322977Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:58.762403Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9vs9b\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9vs9b, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3694bff7-9642-11e8-bba8-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9vs9b" level=info timestamp=2018-08-02T10:52:58.953779Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:58.954288Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T10:48:36.756537Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.449061Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: true\n" level=info timestamp=2018-08-02T10:49:54.450937Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:49:54.451075Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.451835Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=error timestamp=2018-08-02T10:49:54.687110Z pos=vm.go:433 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T10:49:54.687566Z pos=vm.go:253 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.688079Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:54.688192Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.688350Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:54.688572Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T10:49:58.677616Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:58.677891Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:58.678041Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:58.678470Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T10:53:15.736470Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:53:15.738534Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:53:15.740564Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4wzkn" level=info timestamp=2018-08-02T10:53:15.823277Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4wzkn, existing: true\n" level=info timestamp=2018-08-02T10:53:15.823370Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:53:15.823399Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:53:15.823515Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:53:15.825463Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:53:15.825706Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4wzkn" level=info timestamp=2018-08-02T10:53:15.986655Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4wzkn, existing: true\n" level=info timestamp=2018-08-02T10:53:15.986751Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:53:15.986780Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:53:15.986891Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:53:15.987746Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:53:15.987919Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4wzkn" Pod name: virt-launcher-testvmi4wzkn-94p7m Pod phase: Running level=info timestamp=2018-08-02T10:53:03.732920Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T10:53:03.736575Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T10:53:03.744059Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T10:53:13.758291Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T10:53:13.843486Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi4wzkn" level=info timestamp=2018-08-02T10:53:13.848816Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T10:53:13.850326Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 10:55:21 http: TLS handshake error from 10.129.0.1:33946: EOF level=info timestamp=2018-08-02T10:55:22.411726Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:55:22.460803Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:55:22.485673Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:55:30.195409Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:55:31 http: TLS handshake error from 10.129.0.1:33960: EOF level=info timestamp=2018-08-02T10:55:31.596578Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:55:34.901660Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T10:55:34.905468Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T10:55:40.779458Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:55:41 http: TLS handshake error from 10.129.0.1:33972: EOF level=info timestamp=2018-08-02T10:55:42.975919Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:55:46.638835Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:55:50.960895Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:55:51 http: TLS handshake error from 10.129.0.1:33984: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 10:53:56 http: TLS handshake error from 10.129.0.1:43570: EOF 2018/08/02 10:54:06 http: TLS handshake error from 10.129.0.1:43582: EOF 2018/08/02 10:54:16 http: TLS handshake error from 10.129.0.1:43594: EOF 2018/08/02 10:54:26 http: TLS handshake error from 10.129.0.1:43606: EOF 2018/08/02 10:54:36 http: TLS handshake error from 10.129.0.1:43618: EOF level=info timestamp=2018-08-02T10:54:40.902319Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:54:46 http: TLS handshake error from 10.129.0.1:43630: EOF 2018/08/02 10:54:56 http: TLS handshake error from 10.129.0.1:43642: EOF 2018/08/02 10:55:06 http: TLS handshake error from 10.129.0.1:43654: EOF 2018/08/02 10:55:16 http: TLS handshake error from 10.129.0.1:43666: EOF 2018/08/02 10:55:26 http: TLS handshake error from 10.129.0.1:43680: EOF 2018/08/02 10:55:36 http: TLS handshake error from 10.129.0.1:43692: EOF level=info timestamp=2018-08-02T10:55:40.938692Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:55:46 http: TLS handshake error from 10.129.0.1:43704: EOF 2018/08/02 10:55:56 http: TLS handshake error from 10.129.0.1:43716: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:45:21.968840Z pos=node.go:104 component=virt-controller service=http msg="Starting node controller." level=info timestamp=2018-08-02T10:45:21.968914Z pos=vmi.go:129 component=virt-controller service=http msg="Starting vmi controller." level=info timestamp=2018-08-02T10:45:21.968954Z pos=replicaset.go:111 component=virt-controller service=http msg="Starting VirtualMachineInstanceReplicaSet controller." level=info timestamp=2018-08-02T10:45:21.969011Z pos=preset.go:74 component=virt-controller service=http msg="Starting Virtual Machine Initializer." level=info timestamp=2018-08-02T10:46:53.766673Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:46:53.772028Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:49:54.821955Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8rxl6 kind= uid=c9cb7aee-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:49:54.822757Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8rxl6 kind= uid=c9cb7aee-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:49:56.244104Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:49:56.244482Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:57.321874Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:57.322977Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:58.762403Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9vs9b\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9vs9b, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3694bff7-9642-11e8-bba8-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9vs9b" level=info timestamp=2018-08-02T10:52:58.953779Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:58.954288Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T10:48:36.756537Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.449061Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: true\n" level=info timestamp=2018-08-02T10:49:54.450937Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:49:54.451075Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.451835Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=error timestamp=2018-08-02T10:49:54.687110Z pos=vm.go:433 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T10:49:54.687566Z pos=vm.go:253 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.688079Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:54.688192Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.688350Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:54.688572Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T10:49:58.677616Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:58.677891Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:58.678041Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:58.678470Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T10:53:36.205431Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:53:36.224852Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:53:36.235370Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4wzkn" level=info timestamp=2018-08-02T10:53:56.722363Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4wzkn, existing: true\n" level=info timestamp=2018-08-02T10:53:56.722758Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:53:56.722851Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:53:56.723203Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:53:56.726573Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:53:56.727281Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4wzkn" level=info timestamp=2018-08-02T10:54:37.688773Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4wzkn, existing: true\n" level=info timestamp=2018-08-02T10:54:37.691282Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:54:37.692004Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:54:37.693599Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:54:37.703541Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:54:37.721706Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4wzkn" Pod name: virt-launcher-testvmi4wzkn-94p7m Pod phase: Running level=info timestamp=2018-08-02T10:53:03.732920Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T10:53:03.736575Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T10:53:03.744059Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T10:53:13.758291Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T10:53:13.843486Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi4wzkn" level=info timestamp=2018-08-02T10:53:13.848816Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T10:53:13.850326Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" ------------------------------ • Failure in Spec Setup (BeforeEach) [181.090 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model defined [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:340 should report defined CPU model /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:341 Unexpected Warning event received: testvmi4wzkn,378dba91-9642-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T10:52:58.958444Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi4wzkn kind=VirtualMachineInstance uid=378dba91-9642-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi4wzkn-94p7m" level=info timestamp=2018-08-02T10:53:16.213322Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi4wzkn kind=VirtualMachineInstance uid=378dba91-9642-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi4wzkn-94p7m" level=error timestamp=2018-08-02T10:53:16.388365Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi4wzkn kind=VirtualMachineInstance uid=378dba91-9642-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T10:55:34.905468Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T10:55:40.779458Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:55:41 http: TLS handshake error from 10.129.0.1:33972: EOF level=info timestamp=2018-08-02T10:55:42.975919Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:55:46.638835Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:55:50.960895Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:55:51 http: TLS handshake error from 10.129.0.1:33984: EOF level=info timestamp=2018-08-02T10:56:01.088523Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:56:01 http: TLS handshake error from 10.129.0.1:33996: EOF level=info timestamp=2018-08-02T10:56:01.793993Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:56:10.961231Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T10:56:11.168726Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:56:11 http: TLS handshake error from 10.129.0.1:34008: EOF level=info timestamp=2018-08-02T10:56:13.231821Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:56:17.012613Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 10:54:16 http: TLS handshake error from 10.129.0.1:43594: EOF 2018/08/02 10:54:26 http: TLS handshake error from 10.129.0.1:43606: EOF 2018/08/02 10:54:36 http: TLS handshake error from 10.129.0.1:43618: EOF level=info timestamp=2018-08-02T10:54:40.902319Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:54:46 http: TLS handshake error from 10.129.0.1:43630: EOF 2018/08/02 10:54:56 http: TLS handshake error from 10.129.0.1:43642: EOF 2018/08/02 10:55:06 http: TLS handshake error from 10.129.0.1:43654: EOF 2018/08/02 10:55:16 http: TLS handshake error from 10.129.0.1:43666: EOF 2018/08/02 10:55:26 http: TLS handshake error from 10.129.0.1:43680: EOF 2018/08/02 10:55:36 http: TLS handshake error from 10.129.0.1:43692: EOF level=info timestamp=2018-08-02T10:55:40.938692Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:55:46 http: TLS handshake error from 10.129.0.1:43704: EOF 2018/08/02 10:55:56 http: TLS handshake error from 10.129.0.1:43716: EOF 2018/08/02 10:56:06 http: TLS handshake error from 10.129.0.1:43728: EOF 2018/08/02 10:56:16 http: TLS handshake error from 10.129.0.1:43740: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:46:53.766673Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:46:53.772028Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:49:54.821955Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8rxl6 kind= uid=c9cb7aee-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:49:54.822757Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8rxl6 kind= uid=c9cb7aee-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:49:56.244104Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:49:56.244482Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:57.321874Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:57.322977Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:58.762403Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9vs9b\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9vs9b, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3694bff7-9642-11e8-bba8-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9vs9b" level=info timestamp=2018-08-02T10:52:58.953779Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:58.954288Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:55:59.901624Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:55:59.902608Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:56:00.153663Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:56:00.222982Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T10:48:36.756537Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.449061Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: true\n" level=info timestamp=2018-08-02T10:49:54.450937Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:49:54.451075Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.451835Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=error timestamp=2018-08-02T10:49:54.687110Z pos=vm.go:433 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T10:49:54.687566Z pos=vm.go:253 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.688079Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:54.688192Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.688350Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:54.688572Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T10:49:58.677616Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:58.677891Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:58.678041Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:58.678470Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T10:56:15.748310Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:56:15.752304Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:56:15.753856Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:56:15.834255Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmizbz7c, existing: true\n" level=info timestamp=2018-08-02T10:56:15.834444Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:56:15.836508Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:56:15.838181Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:56:15.842514Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:56:15.842907Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:56:16.008221Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmizbz7c, existing: true\n" level=info timestamp=2018-08-02T10:56:16.036932Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:56:16.037353Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:56:16.042529Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:56:16.046544Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:56:16.047217Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" Pod name: virt-launcher-testvmizbz7c-rzn77 Pod phase: Running level=info timestamp=2018-08-02T10:56:03.909365Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T10:56:03.909718Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T10:56:03.911702Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T10:56:13.924199Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T10:56:14.025521Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmizbz7c" level=info timestamp=2018-08-02T10:56:14.033493Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T10:56:14.034583Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T10:58:23.614228Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:58:23.660461Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:58:23.699773Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:58:24.346115Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:58:31 http: TLS handshake error from 10.129.0.1:34178: EOF level=info timestamp=2018-08-02T10:58:33.238564Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:58:34.581361Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:58:34.922700Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T10:58:34.928088Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/08/02 10:58:41 http: TLS handshake error from 10.129.0.1:34190: EOF level=info timestamp=2018-08-02T10:58:44.683322Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:58:44.768768Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:58:48.487571Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:58:51 http: TLS handshake error from 10.129.0.1:34202: EOF level=info timestamp=2018-08-02T10:58:54.973833Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 10:57:06 http: TLS handshake error from 10.129.0.1:43800: EOF 2018/08/02 10:57:16 http: TLS handshake error from 10.129.0.1:43812: EOF 2018/08/02 10:57:26 http: TLS handshake error from 10.129.0.1:43824: EOF 2018/08/02 10:57:36 http: TLS handshake error from 10.129.0.1:43836: EOF level=info timestamp=2018-08-02T10:57:40.974947Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:57:46 http: TLS handshake error from 10.129.0.1:43848: EOF 2018/08/02 10:57:56 http: TLS handshake error from 10.129.0.1:43860: EOF 2018/08/02 10:58:06 http: TLS handshake error from 10.129.0.1:43872: EOF level=info timestamp=2018-08-02T10:58:10.921732Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:58:16 http: TLS handshake error from 10.129.0.1:43884: EOF 2018/08/02 10:58:26 http: TLS handshake error from 10.129.0.1:43898: EOF 2018/08/02 10:58:36 http: TLS handshake error from 10.129.0.1:43910: EOF level=info timestamp=2018-08-02T10:58:40.978606Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:58:46 http: TLS handshake error from 10.129.0.1:43922: EOF 2018/08/02 10:58:56 http: TLS handshake error from 10.129.0.1:43934: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:46:53.766673Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:46:53.772028Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:49:54.821955Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8rxl6 kind= uid=c9cb7aee-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:49:54.822757Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8rxl6 kind= uid=c9cb7aee-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:49:56.244104Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:49:56.244482Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:57.321874Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:57.322977Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:58.762403Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9vs9b\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9vs9b, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3694bff7-9642-11e8-bba8-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9vs9b" level=info timestamp=2018-08-02T10:52:58.953779Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:58.954288Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:55:59.901624Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:55:59.902608Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:56:00.153663Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:56:00.222982Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T10:48:36.756537Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.449061Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: true\n" level=info timestamp=2018-08-02T10:49:54.450937Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:49:54.451075Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.451835Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=error timestamp=2018-08-02T10:49:54.687110Z pos=vm.go:433 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T10:49:54.687566Z pos=vm.go:253 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.688079Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:54.688192Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.688350Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:54.688572Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T10:49:58.677616Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:58.677891Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:58.678041Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:58.678470Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T10:56:36.268081Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:56:36.271278Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:56:36.272048Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:56:56.753693Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmizbz7c, existing: true\n" level=info timestamp=2018-08-02T10:56:56.756351Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:56:56.757820Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:56:56.758388Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:56:56.771893Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:56:56.774457Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:57:37.736476Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmizbz7c, existing: true\n" level=info timestamp=2018-08-02T10:57:37.737155Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:57:37.737278Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:57:37.737737Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:57:37.740570Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:57:37.743246Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" Pod name: virt-launcher-testvmizbz7c-rzn77 Pod phase: Running level=info timestamp=2018-08-02T10:56:03.909365Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T10:56:03.909718Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T10:56:03.911702Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T10:56:13.924199Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T10:56:14.025521Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmizbz7c" level=info timestamp=2018-08-02T10:56:14.033493Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T10:56:14.034583Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [181.056 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model equals to passthrough [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:368 should report exactly the same model as node CPU /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:369 Unexpected Warning event received: testvmizbz7c,a3644963-9642-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T10:55:59.762457Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmizbz7c kind=VirtualMachineInstance uid=a3644963-9642-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmizbz7c-rzn77" level=info timestamp=2018-08-02T10:56:16.283937Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmizbz7c kind=VirtualMachineInstance uid=a3644963-9642-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmizbz7c-rzn77" level=error timestamp=2018-08-02T10:56:16.387402Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmizbz7c kind=VirtualMachineInstance uid=a3644963-9642-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T10:58:34.928088Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/08/02 10:58:41 http: TLS handshake error from 10.129.0.1:34190: EOF level=info timestamp=2018-08-02T10:58:44.683322Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:58:44.768768Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:58:48.487571Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:58:51 http: TLS handshake error from 10.129.0.1:34202: EOF level=info timestamp=2018-08-02T10:58:54.973833Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 10:59:01 http: TLS handshake error from 10.129.0.1:34214: EOF level=info timestamp=2018-08-02T10:59:03.356547Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:59:05.222447Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:59:11.008715Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:59:11 http: TLS handshake error from 10.129.0.1:34226: EOF level=info timestamp=2018-08-02T10:59:14.896112Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:59:15.480272Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T10:59:18.965689Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 10:57:26 http: TLS handshake error from 10.129.0.1:43824: EOF 2018/08/02 10:57:36 http: TLS handshake error from 10.129.0.1:43836: EOF level=info timestamp=2018-08-02T10:57:40.974947Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:57:46 http: TLS handshake error from 10.129.0.1:43848: EOF 2018/08/02 10:57:56 http: TLS handshake error from 10.129.0.1:43860: EOF 2018/08/02 10:58:06 http: TLS handshake error from 10.129.0.1:43872: EOF level=info timestamp=2018-08-02T10:58:10.921732Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:58:16 http: TLS handshake error from 10.129.0.1:43884: EOF 2018/08/02 10:58:26 http: TLS handshake error from 10.129.0.1:43898: EOF 2018/08/02 10:58:36 http: TLS handshake error from 10.129.0.1:43910: EOF level=info timestamp=2018-08-02T10:58:40.978606Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 10:58:46 http: TLS handshake error from 10.129.0.1:43922: EOF 2018/08/02 10:58:56 http: TLS handshake error from 10.129.0.1:43934: EOF 2018/08/02 10:59:06 http: TLS handshake error from 10.129.0.1:43946: EOF 2018/08/02 10:59:16 http: TLS handshake error from 10.129.0.1:43958: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:49:54.822757Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8rxl6 kind= uid=c9cb7aee-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:49:56.244104Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:49:56.244482Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:57.321874Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:57.322977Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:58.762403Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9vs9b\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9vs9b, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3694bff7-9642-11e8-bba8-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9vs9b" level=info timestamp=2018-08-02T10:52:58.953779Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:58.954288Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:55:59.901624Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:55:59.902608Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:56:00.153663Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:56:00.222982Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:59:01.155837Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:59:01.158899Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:59:01.672579Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T10:48:36.756537Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.449061Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: true\n" level=info timestamp=2018-08-02T10:49:54.450937Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:49:54.451075Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.451835Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=error timestamp=2018-08-02T10:49:54.687110Z pos=vm.go:433 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T10:49:54.687566Z pos=vm.go:253 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.688079Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:54.688192Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.688350Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:54.688572Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T10:49:58.677616Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:58.677891Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:58.678041Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:58.678470Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T10:59:17.836331Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:59:17.839123Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:59:17.840038Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T10:59:17.921244Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: true\n" level=info timestamp=2018-08-02T10:59:17.921329Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:59:17.921359Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:59:17.921490Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:59:17.922237Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:59:17.922386Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T10:59:18.082844Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: true\n" level=info timestamp=2018-08-02T10:59:18.083491Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:59:18.083749Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:59:18.084186Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:59:18.086128Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:59:18.086769Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" Pod name: virt-launcher-testvmih78jm-sg99j Pod phase: Running level=info timestamp=2018-08-02T10:59:05.351079Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T10:59:05.351447Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T10:59:05.353046Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T10:59:15.364407Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T10:59:15.443597Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmih78jm" level=info timestamp=2018-08-02T10:59:15.445242Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T10:59:15.445502Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:01:24.526582Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:01:24.541952Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:01:24.555680Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:01:28.271293Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:01:31 http: TLS handshake error from 10.129.0.1:34396: EOF level=info timestamp=2018-08-02T11:01:34.623404Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:01:34.636097Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:01:34.812908Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:01:38.569568Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:01:41 http: TLS handshake error from 10.129.0.1:34408: EOF level=info timestamp=2018-08-02T11:01:46.233952Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:01:48.900280Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:01:50.154233Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:01:51 http: TLS handshake error from 10.129.0.1:34420: EOF level=info timestamp=2018-08-02T11:01:59.011598Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running level=info timestamp=2018-08-02T11:00:11.077303Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:00:11.117097Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:00:16 http: TLS handshake error from 10.129.0.1:44030: EOF 2018/08/02 11:00:26 http: TLS handshake error from 10.129.0.1:44042: EOF 2018/08/02 11:00:36 http: TLS handshake error from 10.129.0.1:44054: EOF 2018/08/02 11:00:46 http: TLS handshake error from 10.129.0.1:44066: EOF 2018/08/02 11:00:56 http: TLS handshake error from 10.129.0.1:44078: EOF 2018/08/02 11:01:06 http: TLS handshake error from 10.129.0.1:44090: EOF level=info timestamp=2018-08-02T11:01:11.314020Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:01:16 http: TLS handshake error from 10.129.0.1:44102: EOF 2018/08/02 11:01:26 http: TLS handshake error from 10.129.0.1:44116: EOF 2018/08/02 11:01:36 http: TLS handshake error from 10.129.0.1:44128: EOF level=info timestamp=2018-08-02T11:01:41.126285Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:01:46 http: TLS handshake error from 10.129.0.1:44140: EOF 2018/08/02 11:01:56 http: TLS handshake error from 10.129.0.1:44152: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:49:54.822757Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8rxl6 kind= uid=c9cb7aee-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:49:56.244104Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:49:56.244482Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:57.321874Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:57.322977Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:58.762403Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9vs9b\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9vs9b, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3694bff7-9642-11e8-bba8-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9vs9b" level=info timestamp=2018-08-02T10:52:58.953779Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:58.954288Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:55:59.901624Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:55:59.902608Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:56:00.153663Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:56:00.222982Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:59:01.155837Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:59:01.158899Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:59:01.672579Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T10:48:36.756537Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.449061Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: true\n" level=info timestamp=2018-08-02T10:49:54.450937Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:49:54.451075Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.451835Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=error timestamp=2018-08-02T10:49:54.687110Z pos=vm.go:433 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind= uid=5dbb121f-9641-11e8-bba8-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T10:49:54.687566Z pos=vm.go:253 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi8s6c6\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi8s6c6, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 5dbb121f-9641-11e8-bba8-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8s6c6" level=info timestamp=2018-08-02T10:49:54.688079Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:54.688192Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:54.688350Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:54.688572Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T10:49:58.677616Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8s6c6, existing: false\n" level=info timestamp=2018-08-02T10:49:58.677891Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:49:58.678041Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T10:49:58.678470Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi8s6c6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T10:59:58.804450Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T10:59:58.804487Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T10:59:58.804701Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T10:59:58.807127Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T10:59:58.807398Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T11:00:39.768393Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: true\n" level=info timestamp=2018-08-02T11:00:39.768890Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:00:39.768932Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:00:39.769182Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:00:39.773120Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:00:39.773460Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T11:01:43.533586Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmizbz7c, existing: false\n" level=info timestamp=2018-08-02T11:01:43.534356Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:01:43.534718Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:01:43.535347Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmih78jm-sg99j Pod phase: Running level=info timestamp=2018-08-02T10:59:05.351079Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T10:59:05.351447Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T10:59:05.353046Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T10:59:15.364407Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T10:59:15.443597Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmih78jm" level=info timestamp=2018-08-02T10:59:15.445242Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T10:59:15.445502Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [181.239 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:294 when CPU model not defined [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:392 should report CPU model from libvirt capabilities /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:393 Unexpected Warning event received: testvmih78jm,0f6f8a26-9643-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T10:59:01.133973Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmih78jm-sg99j" level=info timestamp=2018-08-02T10:59:18.189346Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmih78jm-sg99j" level=error timestamp=2018-08-02T10:59:18.494763Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid=0f6f8a26-9643-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:01:34.636097Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:01:34.812908Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:01:38.569568Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:01:41 http: TLS handshake error from 10.129.0.1:34408: EOF level=info timestamp=2018-08-02T11:01:46.233952Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:01:48.900280Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:01:50.154233Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:01:51 http: TLS handshake error from 10.129.0.1:34420: EOF level=info timestamp=2018-08-02T11:01:59.011598Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:02:01 http: TLS handshake error from 10.129.0.1:34432: EOF level=info timestamp=2018-08-02T11:02:05.108435Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:02:09.248242Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:02:11 http: TLS handshake error from 10.129.0.1:34444: EOF level=info timestamp=2018-08-02T11:02:16.501977Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:02:19.494243Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:00:26 http: TLS handshake error from 10.129.0.1:44042: EOF 2018/08/02 11:00:36 http: TLS handshake error from 10.129.0.1:44054: EOF 2018/08/02 11:00:46 http: TLS handshake error from 10.129.0.1:44066: EOF 2018/08/02 11:00:56 http: TLS handshake error from 10.129.0.1:44078: EOF 2018/08/02 11:01:06 http: TLS handshake error from 10.129.0.1:44090: EOF level=info timestamp=2018-08-02T11:01:11.314020Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:01:16 http: TLS handshake error from 10.129.0.1:44102: EOF 2018/08/02 11:01:26 http: TLS handshake error from 10.129.0.1:44116: EOF 2018/08/02 11:01:36 http: TLS handshake error from 10.129.0.1:44128: EOF level=info timestamp=2018-08-02T11:01:41.126285Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:01:46 http: TLS handshake error from 10.129.0.1:44140: EOF 2018/08/02 11:01:56 http: TLS handshake error from 10.129.0.1:44152: EOF 2018/08/02 11:02:06 http: TLS handshake error from 10.129.0.1:44164: EOF level=info timestamp=2018-08-02T11:02:11.009884Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:02:16 http: TLS handshake error from 10.129.0.1:44176: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:49:56.244482Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:57.321874Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:57.322977Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:58.762403Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9vs9b\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9vs9b, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3694bff7-9642-11e8-bba8-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9vs9b" level=info timestamp=2018-08-02T10:52:58.953779Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:58.954288Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:55:59.901624Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:55:59.902608Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:56:00.153663Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:56:00.222982Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:59:01.155837Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:59:01.158899Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:59:01.672579Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T11:02:02.053618Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:02:02.054824Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:02:19.754008Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:02:19.757196Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:02:19.757534Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4s2bk" level=info timestamp=2018-08-02T11:02:19.798221Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4s2bk, existing: true\n" level=info timestamp=2018-08-02T11:02:19.798364Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:02:19.798475Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:19.798711Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:02:19.800525Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:02:19.800895Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4s2bk" level=info timestamp=2018-08-02T11:02:19.881347Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4s2bk, existing: true\n" level=info timestamp=2018-08-02T11:02:19.881547Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:02:19.881620Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:19.881863Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:02:19.883517Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:02:19.883883Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4s2bk" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:01:43.535347Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:02:00.773204Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: true\n" level=info timestamp=2018-08-02T11:02:00.775851Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:02:00.778234Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:00.778763Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=error timestamp=2018-08-02T11:02:00.875838Z pos=vm.go:433 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmih78jm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0f6f8a26-9643-11e8-bba8-525500d15501, UID in object meta: " msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T11:02:00.876026Z pos=vm.go:253 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmih78jm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0f6f8a26-9643-11e8-bba8-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T11:02:00.876157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: false\n" level=info timestamp=2018-08-02T11:02:00.876194Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:00.876263Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:02:00.876345Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:02:01.704701Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: false\n" level=info timestamp=2018-08-02T11:02:01.705888Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:01.707524Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:02:01.708624Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi4s2bk-p4dqc Pod phase: Running level=info timestamp=2018-08-02T11:02:08.249802Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:02:08.250117Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:02:08.252006Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:02:18.259490Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:02:18.326567Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi4s2bk" level=info timestamp=2018-08-02T11:02:18.329453Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:02:18.332945Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:04:21.936830Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:04:25.303735Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:04:25.336460Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:04:25.387674Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:04:31 http: TLS handshake error from 10.129.0.1:34614: EOF level=info timestamp=2018-08-02T11:04:32.144088Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:04:36.602643Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:04:41.108067Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:04:41 http: TLS handshake error from 10.129.0.1:34626: EOF level=info timestamp=2018-08-02T11:04:42.358442Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:04:48.025339Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:04:51.067535Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:04:51 http: TLS handshake error from 10.129.0.1:34638: EOF level=info timestamp=2018-08-02T11:04:52.613803Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:05:01 http: TLS handshake error from 10.129.0.1:34650: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:02:56 http: TLS handshake error from 10.129.0.1:44224: EOF 2018/08/02 11:03:06 http: TLS handshake error from 10.129.0.1:44236: EOF level=info timestamp=2018-08-02T11:03:11.000812Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:03:16 http: TLS handshake error from 10.129.0.1:44248: EOF 2018/08/02 11:03:26 http: TLS handshake error from 10.129.0.1:44260: EOF 2018/08/02 11:03:36 http: TLS handshake error from 10.129.0.1:44272: EOF level=info timestamp=2018-08-02T11:03:41.079324Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:03:46 http: TLS handshake error from 10.129.0.1:44284: EOF 2018/08/02 11:03:56 http: TLS handshake error from 10.129.0.1:44296: EOF 2018/08/02 11:04:06 http: TLS handshake error from 10.129.0.1:44308: EOF 2018/08/02 11:04:16 http: TLS handshake error from 10.129.0.1:44320: EOF 2018/08/02 11:04:26 http: TLS handshake error from 10.129.0.1:44334: EOF 2018/08/02 11:04:36 http: TLS handshake error from 10.129.0.1:44346: EOF 2018/08/02 11:04:46 http: TLS handshake error from 10.129.0.1:44358: EOF 2018/08/02 11:04:56 http: TLS handshake error from 10.129.0.1:44370: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:49:56.244482Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6gkrx kind= uid=caa5d74d-9641-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:57.321874Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:57.322977Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi9vs9b kind= uid=3694bff7-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:52:58.762403Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9vs9b\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9vs9b, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3694bff7-9642-11e8-bba8-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9vs9b" level=info timestamp=2018-08-02T10:52:58.953779Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:58.954288Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:55:59.901624Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:55:59.902608Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:56:00.153663Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:56:00.222982Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:59:01.155837Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:59:01.158899Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:59:01.672579Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T11:02:02.053618Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:02:02.054824Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:02:40.235956Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:02:40.241073Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:02:40.242099Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4s2bk" level=info timestamp=2018-08-02T11:03:00.723422Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4s2bk, existing: true\n" level=info timestamp=2018-08-02T11:03:00.723711Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:03:00.723870Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:03:00.724437Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:03:00.727015Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:03:00.727400Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4s2bk" level=info timestamp=2018-08-02T11:03:41.689020Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4s2bk, existing: true\n" level=info timestamp=2018-08-02T11:03:41.690198Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:03:41.690316Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:03:41.690925Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:03:41.698695Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:03:41.702993Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4s2bk" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:01:43.535347Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:02:00.773204Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: true\n" level=info timestamp=2018-08-02T11:02:00.775851Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:02:00.778234Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:00.778763Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=error timestamp=2018-08-02T11:02:00.875838Z pos=vm.go:433 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmih78jm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0f6f8a26-9643-11e8-bba8-525500d15501, UID in object meta: " msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T11:02:00.876026Z pos=vm.go:253 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmih78jm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0f6f8a26-9643-11e8-bba8-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T11:02:00.876157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: false\n" level=info timestamp=2018-08-02T11:02:00.876194Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:00.876263Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:02:00.876345Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:02:01.704701Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: false\n" level=info timestamp=2018-08-02T11:02:01.705888Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:01.707524Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:02:01.708624Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi4s2bk-p4dqc Pod phase: Running level=info timestamp=2018-08-02T11:02:08.249802Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:02:08.250117Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:02:08.252006Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:02:18.259490Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:02:18.326567Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi4s2bk" level=info timestamp=2018-08-02T11:02:18.329453Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:02:18.332945Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.861 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 New VirtualMachineInstance with all supported drives /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:413 should have all the device nodes [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:436 Unexpected Warning event received: testvmi4s2bk,7b439a11-9643-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:02:01.968381Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi4s2bk kind=VirtualMachineInstance uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi4s2bk-p4dqc" level=info timestamp=2018-08-02T11:02:19.226279Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi4s2bk kind=VirtualMachineInstance uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi4s2bk-p4dqc" level=error timestamp=2018-08-02T11:02:19.329258Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi4s2bk kind=VirtualMachineInstance uid=7b439a11-9643-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:05:12.856476Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmip6jfq/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=info timestamp=2018-08-02T11:05:12.969953Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-08-02T11:05:16.728297Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:16.728538Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmip6jfq/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:05:17.861235Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:17.861649Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmip6jfq/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=info timestamp=2018-08-02T11:05:18.150943Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:05:21.385420Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:05:21 http: TLS handshake error from 10.129.0.1:34674: EOF level=info timestamp=2018-08-02T11:05:23.783981Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-08-02T11:05:25.904472Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:25.904705Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmip6jfq/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:05:30.607090Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:30.608228Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmip6jfq/console proto=HTTP/1.1 statusCode=400 contentLength=89 2018/08/02 11:05:31 http: TLS handshake error from 10.129.0.1:34686: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running level=error timestamp=2018-08-02T11:05:23.680529Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:23.680872Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmip6jfq/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:05:24.777855Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:24.778078Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmip6jfq/console proto=HTTP/1.1 statusCode=400 contentLength=89 2018/08/02 11:05:26 http: TLS handshake error from 10.129.0.1:44406: EOF level=error timestamp=2018-08-02T11:05:27.096707Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:27.098249Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmip6jfq/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:05:28.322082Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:28.322340Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmip6jfq/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:05:29.448056Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:29.449608Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmip6jfq/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:05:31.758345Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:31.758689Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmip6jfq/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:05:32.905031Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:32.906277Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmip6jfq/console proto=HTTP/1.1 statusCode=400 contentLength=89 Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:52:58.762403Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi9vs9b\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi9vs9b, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 3694bff7-9642-11e8-bba8-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi9vs9b" level=info timestamp=2018-08-02T10:52:58.953779Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:52:58.954288Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:55:59.901624Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:55:59.902608Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:56:00.153663Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:56:00.222982Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:59:01.155837Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:59:01.158899Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:59:01.672579Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T11:02:02.053618Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:02:02.054824Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:05:03.016444Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip6jfq kind= uid=e71f9242-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:05:03.017310Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip6jfq kind= uid=e71f9242-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:05:03.226613Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmip6jfq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmip6jfq" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:05:21.118594Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmip6jfq kind= uid=e71f9242-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:05:21.121703Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmip6jfq kind= uid=e71f9242-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:05:21.122223Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmip6jfq" level=info timestamp=2018-08-02T11:05:23.682652Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmip6jfq, existing: true\n" level=info timestamp=2018-08-02T11:05:23.682981Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:05:23.683068Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:05:23.683448Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmip6jfq kind= uid=e71f9242-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:05:23.687070Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmip6jfq kind= uid=e71f9242-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:05:23.687512Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmip6jfq" level=info timestamp=2018-08-02T11:05:28.808013Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmip6jfq, existing: true\n" level=info timestamp=2018-08-02T11:05:28.808512Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:05:28.808596Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:05:28.808946Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmip6jfq kind= uid=e71f9242-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:05:28.811957Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmip6jfq kind= uid=e71f9242-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:05:28.812499Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmip6jfq" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:01:43.535347Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:02:00.773204Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: true\n" level=info timestamp=2018-08-02T11:02:00.775851Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:02:00.778234Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:00.778763Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=error timestamp=2018-08-02T11:02:00.875838Z pos=vm.go:433 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmih78jm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0f6f8a26-9643-11e8-bba8-525500d15501, UID in object meta: " msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T11:02:00.876026Z pos=vm.go:253 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmih78jm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0f6f8a26-9643-11e8-bba8-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T11:02:00.876157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: false\n" level=info timestamp=2018-08-02T11:02:00.876194Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:00.876263Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:02:00.876345Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:02:01.704701Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: false\n" level=info timestamp=2018-08-02T11:02:01.705888Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:01.707524Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:02:01.708624Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmip6jfq-j2gzj Pod phase: Running level=info timestamp=2018-08-02T11:05:07.256866Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:05:07.257171Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:05:07.259109Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:05:17.273117Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:05:17.395578Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmip6jfq" level=info timestamp=2018-08-02T11:05:17.397941Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:05:17.398469Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [30.908 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a cirros image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:67 should return that we are running cirros [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:68 Expected error: <*errors.errorString | 0xc42039aa00>: { s: "Timeout trying to connect to the virtual machine instance", } Timeout trying to connect to the virtual machine instance not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:55 ------------------------------ STEP: Creating a new VirtualMachineInstance STEP: Expecting the VirtualMachineInstance console Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:05:53.106554Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmids92n/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=info timestamp=2018-08-02T11:05:54.427652Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-08-02T11:05:55.534973Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:55.537744Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmids92n/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:05:56.690509Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:56.691307Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmids92n/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:05:57.874370Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:57.876100Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmids92n/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:05:58.986596Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:58.988267Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmids92n/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:06:01.358579Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:01.358921Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmids92n/console proto=HTTP/1.1 statusCode=400 contentLength=89 2018/08/02 11:06:01 http: TLS handshake error from 10.129.0.1:34722: EOF level=error timestamp=2018-08-02T11:06:02.497885Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:02.498693Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmids92n/console proto=HTTP/1.1 statusCode=400 contentLength=89 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running level=info timestamp=2018-08-02T11:05:41.362007Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmids92n/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/02 11:05:46 http: TLS handshake error from 10.129.0.1:44430: EOF level=error timestamp=2018-08-02T11:05:46.890949Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:46.891438Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmids92n/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:05:48.105285Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:48.105540Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmids92n/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:05:50.830882Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:50.831019Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmids92n/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:05:54.365772Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:05:54.365963Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmids92n/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/02 11:05:56 http: TLS handshake error from 10.129.0.1:44442: EOF level=error timestamp=2018-08-02T11:06:00.203746Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:00.204234Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmids92n/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:06:03.666983Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:03.669248Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmids92n/console proto=HTTP/1.1 statusCode=400 contentLength=89 Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:52:58.954288Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4wzkn kind= uid=378dba91-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:55:59.901624Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:55:59.902608Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizbz7c kind= uid=a3644963-9642-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:56:00.153663Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:56:00.222982Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:59:01.155837Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:59:01.158899Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:59:01.672579Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T11:02:02.053618Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:02:02.054824Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:05:03.016444Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip6jfq kind= uid=e71f9242-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:05:03.017310Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip6jfq kind= uid=e71f9242-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:05:03.226613Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmip6jfq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmip6jfq" level=info timestamp=2018-08-02T11:05:33.856954Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmids92n kind= uid=f981a6aa-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:05:33.858342Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmids92n kind= uid=f981a6aa-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:06:01.376342Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmids92n kind= uid=f981a6aa-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:06:01.382573Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmids92n kind= uid=f981a6aa-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:06:01.383128Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmids92n" level=info timestamp=2018-08-02T11:06:02.023800Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmids92n, existing: true\n" level=info timestamp=2018-08-02T11:06:02.023965Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:06:02.024060Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:06:02.024357Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmids92n kind= uid=f981a6aa-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:06:02.027064Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmids92n kind= uid=f981a6aa-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:06:02.027506Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmids92n" level=info timestamp=2018-08-02T11:06:03.308104Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmids92n, existing: true\n" level=info timestamp=2018-08-02T11:06:03.308681Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:06:03.308767Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:06:03.309232Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmids92n kind= uid=f981a6aa-9643-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:06:03.311734Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmids92n kind= uid=f981a6aa-9643-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:06:03.312306Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmids92n" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:01:43.535347Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:02:00.773204Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: true\n" level=info timestamp=2018-08-02T11:02:00.775851Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:02:00.778234Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:00.778763Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=error timestamp=2018-08-02T11:02:00.875838Z pos=vm.go:433 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmih78jm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0f6f8a26-9643-11e8-bba8-525500d15501, UID in object meta: " msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T11:02:00.876026Z pos=vm.go:253 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmih78jm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0f6f8a26-9643-11e8-bba8-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T11:02:00.876157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: false\n" level=info timestamp=2018-08-02T11:02:00.876194Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:00.876263Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:02:00.876345Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:02:01.704701Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: false\n" level=info timestamp=2018-08-02T11:02:01.705888Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:01.707524Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:02:01.708624Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmids92n-fzpf2 Pod phase: Running level=info timestamp=2018-08-02T11:05:44.053477Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:05:44.054056Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:05:44.169026Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:05:59.928840Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:06:00.072398Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmids92n" level=info timestamp=2018-08-02T11:06:00.081771Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:06:00.082677Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [30.909 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a fedora image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:77 should return that we are running fedora [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:78 Expected error: <*errors.errorString | 0xc4200ada00>: { s: "Timeout trying to connect to the virtual machine instance", } Timeout trying to connect to the virtual machine instance not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:55 ------------------------------ STEP: Creating a new VirtualMachineInstance STEP: Expecting the VirtualMachineInstance console Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:06:21.995788Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-08-02T11:06:22.900901Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:22.901390Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmis8rw5/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=info timestamp=2018-08-02T11:06:25.053292Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=error timestamp=2018-08-02T11:06:25.149892Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:25.151029Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmis8rw5/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:06:26.316122Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:26.316552Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmis8rw5/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:06:28.639666Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:28.641021Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmis8rw5/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:06:31.054781Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:31.054996Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmis8rw5/console proto=HTTP/1.1 statusCode=400 contentLength=89 2018/08/02 11:06:31 http: TLS handshake error from 10.129.0.1:34758: EOF level=error timestamp=2018-08-02T11:06:32.209460Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:32.209797Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmis8rw5/console proto=HTTP/1.1 statusCode=400 contentLength=89 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running level=error timestamp=2018-08-02T11:06:19.437228Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:19.437536Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmis8rw5/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:06:20.623991Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:20.624096Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmis8rw5/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:06:23.992459Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:23.992801Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmis8rw5/console proto=HTTP/1.1 statusCode=400 contentLength=89 2018/08/02 11:06:26 http: TLS handshake error from 10.129.0.1:44478: EOF level=error timestamp=2018-08-02T11:06:27.448936Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:27.449408Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmis8rw5/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:06:29.854976Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:29.855242Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmis8rw5/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:06:33.396755Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:33.397052Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmis8rw5/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:06:34.525495Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:34.525706Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmis8rw5/console proto=HTTP/1.1 statusCode=400 contentLength=89 Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:56:00.222982Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizbz7c\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizbz7c" level=info timestamp=2018-08-02T10:59:01.155837Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T10:59:01.158899Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:59:01.672579Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T11:02:02.053618Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:02:02.054824Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:05:03.016444Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip6jfq kind= uid=e71f9242-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:05:03.017310Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip6jfq kind= uid=e71f9242-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:05:03.226613Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmip6jfq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmip6jfq" level=info timestamp=2018-08-02T11:05:33.856954Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmids92n kind= uid=f981a6aa-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:05:33.858342Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmids92n kind= uid=f981a6aa-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:06:04.758641Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis8rw5 kind= uid=0becc0b7-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:06:04.760404Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis8rw5 kind= uid=0becc0b7-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:06:04.873803Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis8rw5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis8rw5" level=info timestamp=2018-08-02T11:06:04.922460Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis8rw5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis8rw5" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:06:24.648674Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmis8rw5 kind= uid=0becc0b7-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:06:24.651832Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmis8rw5 kind= uid=0becc0b7-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:06:24.652332Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmis8rw5" level=info timestamp=2018-08-02T11:06:27.212904Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmis8rw5, existing: true\n" level=info timestamp=2018-08-02T11:06:27.213101Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:06:27.213233Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:06:27.213439Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmis8rw5 kind= uid=0becc0b7-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:06:27.216255Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmis8rw5 kind= uid=0becc0b7-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:06:27.216622Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmis8rw5" level=info timestamp=2018-08-02T11:06:32.337896Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmis8rw5, existing: true\n" level=info timestamp=2018-08-02T11:06:32.338436Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:06:32.339399Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:06:32.340068Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmis8rw5 kind= uid=0becc0b7-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:06:32.345609Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmis8rw5 kind= uid=0becc0b7-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:06:32.346702Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmis8rw5" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:01:43.535347Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:02:00.773204Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: true\n" level=info timestamp=2018-08-02T11:02:00.775851Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:02:00.778234Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:00.778763Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=error timestamp=2018-08-02T11:02:00.875838Z pos=vm.go:433 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmih78jm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0f6f8a26-9643-11e8-bba8-525500d15501, UID in object meta: " msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T11:02:00.876026Z pos=vm.go:253 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmih78jm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0f6f8a26-9643-11e8-bba8-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T11:02:00.876157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: false\n" level=info timestamp=2018-08-02T11:02:00.876194Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:00.876263Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:02:00.876345Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:02:01.704701Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: false\n" level=info timestamp=2018-08-02T11:02:01.705888Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:01.707524Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:02:01.708624Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmis8rw5-q5khq Pod phase: Running level=info timestamp=2018-08-02T11:06:10.069900Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:06:10.074822Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:06:10.080701Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:06:20.602269Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:06:20.630675Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmis8rw5" level=info timestamp=2018-08-02T11:06:20.631874Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:06:20.632368Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [30.905 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should be able to reconnect to console multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:87 Expected error: <*errors.errorString | 0xc42066ac80>: { s: "Timeout trying to connect to the virtual machine instance", } Timeout trying to connect to the virtual machine instance not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:100 ------------------------------ STEP: Creating a new VirtualMachineInstance STEP: Checking that the console output equals to expected one Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:06:56.031348Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4tp66/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:06:57.181575Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:57.181926Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4tp66/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:06:59.502011Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:59.505961Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4tp66/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:07:00.620520Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:07:00.620718Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4tp66/console proto=HTTP/1.1 statusCode=400 contentLength=89 2018/08/02 11:07:01 http: TLS handshake error from 10.129.0.1:34794: EOF level=error timestamp=2018-08-02T11:07:01.769187Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:07:01.769401Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4tp66/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:07:02.926991Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:07:02.928958Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4tp66/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:07:05.242634Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:07:05.243192Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4tp66/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=info timestamp=2018-08-02T11:07:05.929315Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running level=error timestamp=2018-08-02T11:06:49.087449Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:49.088535Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4tp66/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:06:50.366863Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:50.367217Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4tp66/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:06:52.591843Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:52.592259Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4tp66/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:06:53.725647Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:53.726031Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4tp66/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:06:54.892484Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:54.892763Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4tp66/console proto=HTTP/1.1 statusCode=400 contentLength=89 2018/08/02 11:06:56 http: TLS handshake error from 10.129.0.1:44514: EOF level=error timestamp=2018-08-02T11:06:58.362379Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:06:58.362681Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4tp66/console proto=HTTP/1.1 statusCode=400 contentLength=89 level=error timestamp=2018-08-02T11:07:04.090648Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduled instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:07:04.092823Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi4tp66/console proto=HTTP/1.1 statusCode=400 contentLength=89 Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T10:59:01.158899Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T10:59:01.672579Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T11:02:02.053618Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:02:02.054824Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4s2bk kind= uid=7b439a11-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:05:03.016444Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip6jfq kind= uid=e71f9242-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:05:03.017310Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip6jfq kind= uid=e71f9242-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:05:03.226613Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmip6jfq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmip6jfq" level=info timestamp=2018-08-02T11:05:33.856954Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmids92n kind= uid=f981a6aa-9643-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:05:33.858342Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmids92n kind= uid=f981a6aa-9643-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:06:04.758641Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis8rw5 kind= uid=0becc0b7-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:06:04.760404Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmis8rw5 kind= uid=0becc0b7-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:06:04.873803Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis8rw5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis8rw5" level=info timestamp=2018-08-02T11:06:04.922460Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmis8rw5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmis8rw5" level=info timestamp=2018-08-02T11:06:35.824876Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4tp66 kind= uid=1e70c8d9-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:06:35.826561Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4tp66 kind= uid=1e70c8d9-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:06:54.761076Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4tp66 kind= uid=1e70c8d9-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:06:54.764105Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4tp66 kind= uid=1e70c8d9-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:06:54.764534Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4tp66" level=info timestamp=2018-08-02T11:06:57.326792Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4tp66, existing: true\n" level=info timestamp=2018-08-02T11:06:57.326987Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:06:57.327061Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:06:57.327406Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4tp66 kind= uid=1e70c8d9-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:06:57.330080Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4tp66 kind= uid=1e70c8d9-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:06:57.330536Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4tp66" level=info timestamp=2018-08-02T11:07:02.451422Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4tp66, existing: true\n" level=info timestamp=2018-08-02T11:07:02.451720Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:07:02.451798Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:07:02.452265Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4tp66 kind= uid=1e70c8d9-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:07:02.455709Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4tp66 kind= uid=1e70c8d9-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:07:02.456280Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4tp66" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:01:43.535347Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmizbz7c kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:02:00.773204Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: true\n" level=info timestamp=2018-08-02T11:02:00.775851Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:02:00.778234Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:00.778763Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=error timestamp=2018-08-02T11:02:00.875838Z pos=vm.go:433 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind= uid=0f6f8a26-9643-11e8-bba8-525500d15501 reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmih78jm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0f6f8a26-9643-11e8-bba8-525500d15501, UID in object meta: " msg="Updating the VirtualMachineInstance status failed." level=info timestamp=2018-08-02T11:02:00.876026Z pos=vm.go:253 component=virt-handler reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmih78jm\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmih78jm, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0f6f8a26-9643-11e8-bba8-525500d15501, UID in object meta: " msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmih78jm" level=info timestamp=2018-08-02T11:02:00.876157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: false\n" level=info timestamp=2018-08-02T11:02:00.876194Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:00.876263Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:02:00.876345Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:02:01.704701Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmih78jm, existing: false\n" level=info timestamp=2018-08-02T11:02:01.705888Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:02:01.707524Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:02:01.708624Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmih78jm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi4tp66-b5hlp Pod phase: Running level=info timestamp=2018-08-02T11:06:41.046044Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:06:41.046328Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:06:41.048126Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:06:51.129527Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:06:51.184343Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi4tp66" level=info timestamp=2018-08-02T11:06:51.185755Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:06:51.185979Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [30.957 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should wait until the virtual machine is in running state and return a stream interface [It] /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:103 Expected error: <*errors.errorString | 0xc420685730>: { s: "Timeout trying to connect to the virtual machine instance", } Timeout trying to connect to the virtual machine instance not to have occurred /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:109 ------------------------------ STEP: Creating a new VirtualMachineInstance • [SLOW TEST:30.522 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the virtual machine instance to be running /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:111 ------------------------------ • [SLOW TEST:30.323 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the expecter /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:134 ------------------------------ • ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.015 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to start a vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:133 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.007 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to stop a running vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:139 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.008 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have correct UUID /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:192 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.007 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have pod IP /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:208 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.008 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to start a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:242 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.006 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to stop a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:250 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1384 ------------------------------ Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 11:08:01 http: TLS handshake error from 10.129.0.1:34868: EOF level=error timestamp=2018-08-02T11:08:02.610123Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:08:02.610373Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi5llpq/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:08:04.897871Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:08:04.898887Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi5llpq/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:08:07.324100Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:08:07.325365Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi5llpq/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=info timestamp=2018-08-02T11:08:07.514118Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:08:07.701082Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/version proto=HTTP/2.0 statusCode=200 contentLength=247 level=info timestamp=2018-08-02T11:08:08.606736Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:08:11 http: TLS handshake error from 10.129.0.1:34880: EOF level=info timestamp=2018-08-02T11:08:17.793598Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:08:19.926783Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:08:21 http: TLS handshake error from 10.129.0.1:34892: EOF level=info timestamp=2018-08-02T11:08:23.309691Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:07:56 http: TLS handshake error from 10.129.0.1:44588: EOF level=error timestamp=2018-08-02T11:07:56.862756Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:07:56.862941Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi5llpq/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:07:58.001814Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:07:58.002560Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi5llpq/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:07:59.098644Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:07:59.099875Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi5llpq/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:08:03.741124Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:08:03.741427Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi5llpq/console proto=HTTP/1.1 statusCode=400 contentLength=90 level=error timestamp=2018-08-02T11:08:06.063117Z pos=subresource.go:54 component=virt-api reason="Unable to connect to VirtualMachineInstance because phase is Scheduling instead of Running" msg="Failed to gather remote exec info for subresource request." level=info timestamp=2018-08-02T11:08:06.063408Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/apis/subresources.kubevirt.io/v1alpha2/namespaces/kubevirt-test-default/virtualmachineinstances/testvmi5llpq/console proto=HTTP/1.1 statusCode=400 contentLength=90 2018/08/02 11:08:06 http: TLS handshake error from 10.129.0.1:44600: EOF level=info timestamp=2018-08-02T11:08:11.236651Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:08:17 http: TLS handshake error from 10.129.0.1:44612: EOF 2018/08/02 11:08:27 http: TLS handshake error from 10.129.0.1:44624: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:07:07.012697Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitg6xk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitg6xk" level=info timestamp=2018-08-02T11:07:37.141793Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5llpq kind= uid=42fee2a2-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:07:37.142983Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5llpq kind= uid=42fee2a2-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:08:07.460887Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5llpq\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi5llpq, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 42fee2a2-9644-11e8-bba8-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5llpq" level=info timestamp=2018-08-02T11:08:07.506091Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5llpq\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi5llpq, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 42fee2a2-9644-11e8-bba8-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5llpq" level=info timestamp=2018-08-02T11:08:07.905603Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik4k7c kind= uid=5555090f-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:08:07.906305Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik4k7c kind= uid=5555090f-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:08:07.931506Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim8hq8 kind= uid=55587338-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:08:07.931840Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim8hq8 kind= uid=55587338-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:08:07.978007Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8jqmn kind= uid=555c2e2a-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:08:07.996809Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8jqmn kind= uid=555c2e2a-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:08:08.027392Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:08:08.027635Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:08:08.312700Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigbgtv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigbgtv" level=info timestamp=2018-08-02T11:08:08.713854Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigbgtv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigbgtv" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:08:27.992063Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmik4k7c kind= uid=5555090f-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:08:27.994692Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmik4k7c kind= uid=5555090f-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:08:27.996535Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmik4k7c" level=info timestamp=2018-08-02T11:08:28.081258Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmik4k7c, existing: true\n" level=info timestamp=2018-08-02T11:08:28.081743Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:08:28.081952Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:08:28.082426Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmik4k7c kind= uid=5555090f-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:08:28.084868Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmik4k7c kind= uid=5555090f-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:08:28.085437Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmik4k7c" level=info timestamp=2018-08-02T11:08:28.246034Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmik4k7c, existing: true\n" level=info timestamp=2018-08-02T11:08:28.246256Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:08:28.246360Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:08:28.246541Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmik4k7c kind= uid=5555090f-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:08:28.247457Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmik4k7c kind= uid=5555090f-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:08:28.247701Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmik4k7c" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:08:25.905043Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:08:25.907856Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:08:25.913038Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmigbgtv" level=info timestamp=2018-08-02T11:08:26.234350Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmigbgtv, existing: true\n" level=info timestamp=2018-08-02T11:08:26.234539Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:08:26.234611Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:08:26.234866Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:08:26.237418Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:08:26.237796Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmigbgtv" level=info timestamp=2018-08-02T11:08:26.880895Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmigbgtv, existing: true\n" level=info timestamp=2018-08-02T11:08:26.881179Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:08:26.881256Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:08:26.881502Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:08:26.884501Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:08:26.884869Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmigbgtv" Pod name: virt-launcher-testvmi8jqmn-tvb72 Pod phase: Running level=info timestamp=2018-08-02T11:08:13.164532Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:08:13.164825Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:08:13.166880Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:08:27.625056Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:08:27.698036Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi8jqmn" level=info timestamp=2018-08-02T11:08:27.700699Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:08:27.700913Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmigbgtv-fdns6 Pod phase: Running level=info timestamp=2018-08-02T11:08:12.302213Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:08:12.302711Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:08:12.304610Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:08:22.315521Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:08:22.432567Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmigbgtv" level=info timestamp=2018-08-02T11:08:22.439196Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:08:22.439666Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmik4k7c-gm2r8 Pod phase: Running level=info timestamp=2018-08-02T11:08:13.629364Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:08:13.629501Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:08:13.645314Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:08:25.952034Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:08:26.274808Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmik4k7c" level=info timestamp=2018-08-02T11:08:26.278004Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:08:26.278473Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmim8hq8-p7clc Pod phase: Running level=info timestamp=2018-08-02T11:08:14.210557Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:08:14.210965Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:08:14.217809Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:08:28.225596Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:08:28.261050Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmim8hq8" level=info timestamp=2018-08-02T11:08:28.262769Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:08:28.262972Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:10:28.594111Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:10:28.622552Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:10:31.162287Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:10:31 http: TLS handshake error from 10.129.0.1:35050: EOF level=info timestamp=2018-08-02T11:10:35.208556Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:10:35.212834Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:10:40.055497Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:10:41.334484Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:10:41 http: TLS handshake error from 10.129.0.1:35062: EOF 2018/08/02 11:10:51 http: TLS handshake error from 10.129.0.1:35074: EOF level=info timestamp=2018-08-02T11:10:51.546924Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:10:51.761645Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:10:54.213845Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:11:01 http: TLS handshake error from 10.129.0.1:35086: EOF level=info timestamp=2018-08-02T11:11:01.820913Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running level=info timestamp=2018-08-02T11:09:11.271765Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:09:16 http: TLS handshake error from 10.129.0.1:44684: EOF 2018/08/02 11:09:26 http: TLS handshake error from 10.129.0.1:44696: EOF 2018/08/02 11:09:36 http: TLS handshake error from 10.129.0.1:44708: EOF 2018/08/02 11:09:46 http: TLS handshake error from 10.129.0.1:44720: EOF 2018/08/02 11:09:56 http: TLS handshake error from 10.129.0.1:44732: EOF 2018/08/02 11:10:06 http: TLS handshake error from 10.129.0.1:44744: EOF level=info timestamp=2018-08-02T11:10:11.280958Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:10:16 http: TLS handshake error from 10.129.0.1:44756: EOF 2018/08/02 11:10:26 http: TLS handshake error from 10.129.0.1:44768: EOF 2018/08/02 11:10:36 http: TLS handshake error from 10.129.0.1:44782: EOF level=info timestamp=2018-08-02T11:10:41.083970Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:10:46 http: TLS handshake error from 10.129.0.1:44794: EOF 2018/08/02 11:10:56 http: TLS handshake error from 10.129.0.1:44806: EOF 2018/08/02 11:11:06 http: TLS handshake error from 10.129.0.1:44818: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:07:07.012697Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmitg6xk\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmitg6xk" level=info timestamp=2018-08-02T11:07:37.141793Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5llpq kind= uid=42fee2a2-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:07:37.142983Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5llpq kind= uid=42fee2a2-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:08:07.460887Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5llpq\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi5llpq, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 42fee2a2-9644-11e8-bba8-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5llpq" level=info timestamp=2018-08-02T11:08:07.506091Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5llpq\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmi5llpq, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 42fee2a2-9644-11e8-bba8-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5llpq" level=info timestamp=2018-08-02T11:08:07.905603Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik4k7c kind= uid=5555090f-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:08:07.906305Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik4k7c kind= uid=5555090f-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:08:07.931506Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim8hq8 kind= uid=55587338-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:08:07.931840Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmim8hq8 kind= uid=55587338-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:08:07.978007Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8jqmn kind= uid=555c2e2a-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:08:07.996809Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi8jqmn kind= uid=555c2e2a-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:08:08.027392Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:08:08.027635Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:08:08.312700Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigbgtv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigbgtv" level=info timestamp=2018-08-02T11:08:08.713854Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigbgtv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigbgtv" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:09:49.897411Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmik4k7c kind= uid=5555090f-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:09:49.904687Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmik4k7c kind= uid=5555090f-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:09:49.905643Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmik4k7c" level=info timestamp=2018-08-02T11:09:51.776285Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi8jqmn, existing: true\n" level=info timestamp=2018-08-02T11:09:51.776528Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:09:51.776618Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:09:51.776938Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi8jqmn kind= uid=555c2e2a-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-02T11:09:51.779396Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmim8hq8, existing: true\n" level=info timestamp=2018-08-02T11:09:51.779511Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:09:51.779541Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=error timestamp=2018-08-02T11:09:51.779493Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi8jqmn kind= uid=555c2e2a-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:09:51.779674Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmim8hq8 kind= uid=55587338-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-02T11:09:51.779979Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi8jqmn" level=error timestamp=2018-08-02T11:09:51.781552Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmim8hq8 kind= uid=55587338-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:09:51.781725Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmim8hq8" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:08:46.142228Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:08:46.154787Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:08:46.157813Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmigbgtv" level=info timestamp=2018-08-02T11:09:06.638951Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmigbgtv, existing: true\n" level=info timestamp=2018-08-02T11:09:06.639746Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:09:06.639898Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:09:06.640448Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:09:06.646484Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:09:06.649035Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmigbgtv" level=info timestamp=2018-08-02T11:09:47.712780Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmigbgtv, existing: true\n" level=info timestamp=2018-08-02T11:09:47.725584Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:09:47.725682Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:09:47.725895Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:09:47.742766Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:09:47.743537Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmigbgtv" Pod name: virt-launcher-testvmi8jqmn-tvb72 Pod phase: Running level=info timestamp=2018-08-02T11:08:13.164532Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:08:13.164825Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:08:13.166880Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:08:27.625056Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:08:27.698036Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi8jqmn" level=info timestamp=2018-08-02T11:08:27.700699Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:08:27.700913Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmigbgtv-fdns6 Pod phase: Running level=info timestamp=2018-08-02T11:08:12.302213Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:08:12.302711Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:08:12.304610Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:08:22.315521Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:08:22.432567Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmigbgtv" level=info timestamp=2018-08-02T11:08:22.439196Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:08:22.439666Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmik4k7c-gm2r8 Pod phase: Running level=info timestamp=2018-08-02T11:08:13.629364Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:08:13.629501Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:08:13.645314Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:08:25.952034Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:08:26.274808Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmik4k7c" level=info timestamp=2018-08-02T11:08:26.278004Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:08:26.278473Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmim8hq8-p7clc Pod phase: Running level=info timestamp=2018-08-02T11:08:14.210557Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:08:14.210965Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:08:14.217809Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:08:28.225596Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:08:28.261050Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmim8hq8" level=info timestamp=2018-08-02T11:08:28.262769Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:08:28.262972Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [181.215 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received: testvmik4k7c,5555090f-9644-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:08:07.824660Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmik4k7c kind=VirtualMachineInstance uid=5555090f-9644-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmik4k7c-gm2r8" level=info timestamp=2018-08-02T11:08:27.381947Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmik4k7c kind=VirtualMachineInstance uid=5555090f-9644-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmik4k7c-gm2r8" level=error timestamp=2018-08-02T11:08:27.453326Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmik4k7c kind=VirtualMachineInstance uid=5555090f-9644-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:10:41.334484Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:10:41 http: TLS handshake error from 10.129.0.1:35062: EOF 2018/08/02 11:10:51 http: TLS handshake error from 10.129.0.1:35074: EOF level=info timestamp=2018-08-02T11:10:51.546924Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:10:51.761645Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:10:54.213845Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:11:01 http: TLS handshake error from 10.129.0.1:35086: EOF level=info timestamp=2018-08-02T11:11:01.820913Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:11:10.625396Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:11:11 http: TLS handshake error from 10.129.0.1:35098: EOF level=info timestamp=2018-08-02T11:11:12.164019Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:11:21 http: TLS handshake error from 10.129.0.1:35110: EOF level=info timestamp=2018-08-02T11:11:23.039725Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:11:23.318715Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:11:25.123569Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:09:26 http: TLS handshake error from 10.129.0.1:44696: EOF 2018/08/02 11:09:36 http: TLS handshake error from 10.129.0.1:44708: EOF 2018/08/02 11:09:46 http: TLS handshake error from 10.129.0.1:44720: EOF 2018/08/02 11:09:56 http: TLS handshake error from 10.129.0.1:44732: EOF 2018/08/02 11:10:06 http: TLS handshake error from 10.129.0.1:44744: EOF level=info timestamp=2018-08-02T11:10:11.280958Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:10:16 http: TLS handshake error from 10.129.0.1:44756: EOF 2018/08/02 11:10:26 http: TLS handshake error from 10.129.0.1:44768: EOF 2018/08/02 11:10:36 http: TLS handshake error from 10.129.0.1:44782: EOF level=info timestamp=2018-08-02T11:10:41.083970Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:10:46 http: TLS handshake error from 10.129.0.1:44794: EOF 2018/08/02 11:10:56 http: TLS handshake error from 10.129.0.1:44806: EOF 2018/08/02 11:11:06 http: TLS handshake error from 10.129.0.1:44818: EOF level=info timestamp=2018-08-02T11:11:11.532771Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:11:16 http: TLS handshake error from 10.129.0.1:44830: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:08:08.027635Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:08:08.312700Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigbgtv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigbgtv" level=info timestamp=2018-08-02T11:08:08.713854Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigbgtv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigbgtv" level=info timestamp=2018-08-02T11:11:09.847688Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7mct6 kind= uid=c1c6587c-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:11:09.848902Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7mct6 kind= uid=c1c6587c-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:11:09.872254Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibsx95 kind= uid=c1c9dab3-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:11:09.872596Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibsx95 kind= uid=c1c9dab3-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:11:09.906895Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi65q8r kind= uid=c1ce5890-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:11:09.907245Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi65q8r kind= uid=c1ce5890-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:11:09.957598Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidw9qr kind= uid=c1d2a6b8-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:11:09.957967Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidw9qr kind= uid=c1d2a6b8-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:11:10.329963Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi65q8r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi65q8r" level=info timestamp=2018-08-02T11:11:10.332344Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibsx95\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibsx95" level=info timestamp=2018-08-02T11:11:10.589106Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibsx95\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibsx95" level=info timestamp=2018-08-02T11:11:10.659378Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi65q8r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi65q8r" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:11:25.971721Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi7mct6 kind= uid=c1c6587c-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:11:25.974450Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi7mct6 kind= uid=c1c6587c-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:11:25.974845Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi7mct6" level=info timestamp=2018-08-02T11:11:26.015279Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi7mct6, existing: true\n" level=info timestamp=2018-08-02T11:11:26.015435Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:11:26.015506Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:11:26.015751Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi7mct6 kind= uid=c1c6587c-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:11:26.018228Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi7mct6 kind= uid=c1c6587c-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:11:26.018947Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi7mct6" level=info timestamp=2018-08-02T11:11:26.099493Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi7mct6, existing: true\n" level=info timestamp=2018-08-02T11:11:26.099714Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:11:26.099787Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:11:26.100026Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi7mct6 kind= uid=c1c6587c-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:11:26.102088Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi7mct6 kind= uid=c1c6587c-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:11:26.102539Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi7mct6" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:11:08.206587Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:11:08.299173Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:11:08.329290Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmigbgtv, existing: true\n" level=info timestamp=2018-08-02T11:11:08.329410Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T11:11:08.329442Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:11:08.329557Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:11:08.329743Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:11:08.525808Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmigbgtv, existing: false\n" level=info timestamp=2018-08-02T11:11:08.525895Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:11:08.526005Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:11:08.526099Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:11:09.664034Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmigbgtv, existing: false\n" level=info timestamp=2018-08-02T11:11:09.664109Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:11:09.664179Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:11:09.664273Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmigbgtv kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi65q8r-jxqx8 Pod phase: Running level=info timestamp=2018-08-02T11:11:15.662944Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:11:15.663143Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:11:15.664856Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" Pod name: virt-launcher-testvmi7mct6-v2ddw Pod phase: Running level=info timestamp=2018-08-02T11:11:15.132666Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:11:15.140710Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:11:15.145322Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:11:25.168333Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:11:25.243645Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi7mct6" level=info timestamp=2018-08-02T11:11:25.245475Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:11:25.245989Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmibsx95-l6266 Pod phase: Running level=info timestamp=2018-08-02T11:11:15.157383Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:11:15.157655Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:11:15.159806Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:11:25.169369Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:11:25.334301Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmibsx95" level=info timestamp=2018-08-02T11:11:25.336452Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:11:25.336934Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmidw9qr-j5xrt Pod phase: Running level=info timestamp=2018-08-02T11:11:15.115792Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:11:15.116178Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:11:15.117930Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:11:25.122217Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:11:25.160829Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmidw9qr" level=info timestamp=2018-08-02T11:11:25.162738Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:11:25.163590Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:13:29.745068Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:13:31 http: TLS handshake error from 10.129.0.1:35268: EOF level=info timestamp=2018-08-02T11:13:35.238586Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:13:35.245028Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:13:35.383808Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:13:41.117404Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:13:41 http: TLS handshake error from 10.129.0.1:35280: EOF level=info timestamp=2018-08-02T11:13:41.707894Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:13:45.583489Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:13:51 http: TLS handshake error from 10.129.0.1:35292: EOF level=info timestamp=2018-08-02T11:13:54.645050Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:13:55.829840Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:13:56.166596Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:14:01 http: TLS handshake error from 10.129.0.1:35304: EOF level=info timestamp=2018-08-02T11:14:06.073781Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:11:56 http: TLS handshake error from 10.129.0.1:44878: EOF 2018/08/02 11:12:06 http: TLS handshake error from 10.129.0.1:44890: EOF 2018/08/02 11:12:16 http: TLS handshake error from 10.129.0.1:44902: EOF 2018/08/02 11:12:26 http: TLS handshake error from 10.129.0.1:44914: EOF 2018/08/02 11:12:36 http: TLS handshake error from 10.129.0.1:44926: EOF 2018/08/02 11:12:46 http: TLS handshake error from 10.129.0.1:44938: EOF 2018/08/02 11:12:56 http: TLS handshake error from 10.129.0.1:44950: EOF 2018/08/02 11:13:06 http: TLS handshake error from 10.129.0.1:44962: EOF level=info timestamp=2018-08-02T11:13:11.049122Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:13:16 http: TLS handshake error from 10.129.0.1:44974: EOF 2018/08/02 11:13:26 http: TLS handshake error from 10.129.0.1:44986: EOF 2018/08/02 11:13:36 http: TLS handshake error from 10.129.0.1:45000: EOF 2018/08/02 11:13:46 http: TLS handshake error from 10.129.0.1:45012: EOF 2018/08/02 11:13:56 http: TLS handshake error from 10.129.0.1:45024: EOF 2018/08/02 11:14:06 http: TLS handshake error from 10.129.0.1:45036: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:08:08.027635Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigbgtv kind= uid=5560ea29-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:08:08.312700Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigbgtv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigbgtv" level=info timestamp=2018-08-02T11:08:08.713854Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmigbgtv\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmigbgtv" level=info timestamp=2018-08-02T11:11:09.847688Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7mct6 kind= uid=c1c6587c-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:11:09.848902Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi7mct6 kind= uid=c1c6587c-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:11:09.872254Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibsx95 kind= uid=c1c9dab3-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:11:09.872596Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibsx95 kind= uid=c1c9dab3-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:11:09.906895Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi65q8r kind= uid=c1ce5890-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:11:09.907245Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi65q8r kind= uid=c1ce5890-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:11:09.957598Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidw9qr kind= uid=c1d2a6b8-9644-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:11:09.957967Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidw9qr kind= uid=c1d2a6b8-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:11:10.329963Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi65q8r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi65q8r" level=info timestamp=2018-08-02T11:11:10.332344Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibsx95\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibsx95" level=info timestamp=2018-08-02T11:11:10.589106Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibsx95\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibsx95" level=info timestamp=2018-08-02T11:11:10.659378Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi65q8r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi65q8r" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:12:47.930034Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi7mct6 kind= uid=c1c6587c-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:12:47.942798Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi7mct6 kind= uid=c1c6587c-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:12:47.943669Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi7mct6" level=info timestamp=2018-08-02T11:12:49.362113Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmibsx95, existing: true\n" level=info timestamp=2018-08-02T11:12:49.362336Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:12:49.362411Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:12:49.362676Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmibsx95 kind= uid=c1c9dab3-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:12:49.365968Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmibsx95 kind= uid=c1c9dab3-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:12:49.371474Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmibsx95" level=info timestamp=2018-08-02T11:14:09.866087Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi7mct6, existing: true\n" level=info timestamp=2018-08-02T11:14:09.867067Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:14:09.867232Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:14:09.867893Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi7mct6 kind= uid=c1c6587c-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:14:09.873581Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi7mct6 kind= uid=c1c6587c-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:14:09.874419Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi7mct6" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:12:07.385481Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi65q8r kind= uid=c1ce5890-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:12:07.392816Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi65q8r kind= uid=c1ce5890-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:12:07.393113Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi65q8r" level=info timestamp=2018-08-02T11:12:48.282778Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmidw9qr, existing: true\n" level=info timestamp=2018-08-02T11:12:48.283670Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:12:48.283786Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:12:48.284487Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmidw9qr kind= uid=c1d2a6b8-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:12:48.290835Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmidw9qr kind= uid=c1d2a6b8-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:12:48.300327Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmidw9qr" level=info timestamp=2018-08-02T11:12:48.353537Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi65q8r, existing: true\n" level=info timestamp=2018-08-02T11:12:48.353909Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:12:48.354164Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:12:48.354450Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi65q8r kind= uid=c1ce5890-9644-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:12:48.359724Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi65q8r kind= uid=c1ce5890-9644-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1484/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:12:48.360102Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1484/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi65q8r" Pod name: virt-launcher-testvmi65q8r-jxqx8 Pod phase: Running level=info timestamp=2018-08-02T11:11:15.662944Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:11:15.663143Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:11:15.664856Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:11:25.673434Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:11:25.871213Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi65q8r" level=info timestamp=2018-08-02T11:11:25.879462Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:11:25.879877Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmi7mct6-v2ddw Pod phase: Running level=info timestamp=2018-08-02T11:11:15.132666Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:11:15.140710Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:11:15.145322Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:11:25.168333Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:11:25.243645Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi7mct6" level=info timestamp=2018-08-02T11:11:25.245475Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:11:25.245989Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmibsx95-l6266 Pod phase: Running level=info timestamp=2018-08-02T11:11:15.157383Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:11:15.157655Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:11:15.159806Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:11:25.169369Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:11:25.334301Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmibsx95" level=info timestamp=2018-08-02T11:11:25.336452Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:11:25.336934Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmidw9qr-j5xrt Pod phase: Running level=info timestamp=2018-08-02T11:11:15.115792Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:11:15.116178Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:11:15.117930Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:11:25.122217Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:11:25.160829Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmidw9qr" level=info timestamp=2018-08-02T11:11:25.162738Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:11:25.163590Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [182.247 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with pod network connectivity explicitly set /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received: testvmi7mct6,c1c6587c-9644-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:11:09.746545Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi7mct6 kind=VirtualMachineInstance uid=c1c6587c-9644-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi7mct6-v2ddw" level=info timestamp=2018-08-02T11:11:25.466827Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi7mct6 kind=VirtualMachineInstance uid=c1c6587c-9644-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi7mct6-v2ddw" level=error timestamp=2018-08-02T11:11:25.588240Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi7mct6 kind=VirtualMachineInstance uid=c1c6587c-9644-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:13:45.583489Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:13:51 http: TLS handshake error from 10.129.0.1:35292: EOF level=info timestamp=2018-08-02T11:13:54.645050Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:13:55.829840Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:13:56.166596Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:14:01 http: TLS handshake error from 10.129.0.1:35304: EOF level=info timestamp=2018-08-02T11:14:06.073781Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:14:11 http: TLS handshake error from 10.129.0.1:35316: EOF level=info timestamp=2018-08-02T11:14:11.913760Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:14:16.590991Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:14:21 http: TLS handshake error from 10.129.0.1:35328: EOF level=info timestamp=2018-08-02T11:14:27.874014Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:14:27.900643Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:14:28.030808Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:14:31 http: TLS handshake error from 10.129.0.1:35340: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:12:26 http: TLS handshake error from 10.129.0.1:44914: EOF 2018/08/02 11:12:36 http: TLS handshake error from 10.129.0.1:44926: EOF 2018/08/02 11:12:46 http: TLS handshake error from 10.129.0.1:44938: EOF 2018/08/02 11:12:56 http: TLS handshake error from 10.129.0.1:44950: EOF 2018/08/02 11:13:06 http: TLS handshake error from 10.129.0.1:44962: EOF level=info timestamp=2018-08-02T11:13:11.049122Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:13:16 http: TLS handshake error from 10.129.0.1:44974: EOF 2018/08/02 11:13:26 http: TLS handshake error from 10.129.0.1:44986: EOF 2018/08/02 11:13:36 http: TLS handshake error from 10.129.0.1:45000: EOF 2018/08/02 11:13:46 http: TLS handshake error from 10.129.0.1:45012: EOF 2018/08/02 11:13:56 http: TLS handshake error from 10.129.0.1:45024: EOF 2018/08/02 11:14:06 http: TLS handshake error from 10.129.0.1:45036: EOF level=info timestamp=2018-08-02T11:14:11.197833Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:14:16 http: TLS handshake error from 10.129.0.1:45048: EOF 2018/08/02 11:14:26 http: TLS handshake error from 10.129.0.1:45060: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:11:09.957967Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidw9qr kind= uid=c1d2a6b8-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:11:10.329963Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi65q8r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi65q8r" level=info timestamp=2018-08-02T11:11:10.332344Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibsx95\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibsx95" level=info timestamp=2018-08-02T11:11:10.589106Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibsx95\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibsx95" level=info timestamp=2018-08-02T11:11:10.659378Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi65q8r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi65q8r" level=info timestamp=2018-08-02T11:14:12.141287Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilmh7f kind= uid=2e6bf38e-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:14:12.142660Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilmh7f kind= uid=2e6bf38e-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:14:12.192826Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4dhs8 kind= uid=2e72591e-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:14:12.193256Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4dhs8 kind= uid=2e72591e-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:14:12.246952Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixhgkj kind= uid=2e7a3aff-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:14:12.247266Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixhgkj kind= uid=2e7a3aff-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:14:12.315814Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiksktb kind= uid=2e867f86-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:14:12.315973Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiksktb kind= uid=2e867f86-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:14:12.889100Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiksktb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiksktb" level=info timestamp=2018-08-02T11:14:12.971925Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixhgkj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixhgkj" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:14:31.232869Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmilmh7f kind= uid=2e6bf38e-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:14:31.235700Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmilmh7f kind= uid=2e6bf38e-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:14:31.236082Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmilmh7f" level=info timestamp=2018-08-02T11:14:31.396745Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmilmh7f, existing: true\n" level=info timestamp=2018-08-02T11:14:31.396940Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:14:31.397028Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:14:31.397324Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmilmh7f kind= uid=2e6bf38e-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:14:31.401874Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmilmh7f kind= uid=2e6bf38e-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:14:31.402557Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmilmh7f" level=info timestamp=2018-08-02T11:14:31.647617Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmixhgkj, existing: true\n" level=info timestamp=2018-08-02T11:14:31.647782Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:14:31.647852Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:14:31.648247Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmixhgkj kind= uid=2e7a3aff-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:14:31.650337Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmixhgkj kind= uid=2e7a3aff-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:14:31.650656Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmixhgkj" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:14:30.366089Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4dhs8 kind= uid=2e72591e-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:14:30.369408Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4dhs8 kind= uid=2e72591e-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:14:30.369780Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4dhs8" level=info timestamp=2018-08-02T11:14:30.463417Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4dhs8, existing: true\n" level=info timestamp=2018-08-02T11:14:30.463564Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:14:30.463637Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:14:30.463878Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4dhs8 kind= uid=2e72591e-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:14:30.468163Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4dhs8 kind= uid=2e72591e-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:14:30.469173Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4dhs8" level=info timestamp=2018-08-02T11:14:30.630097Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4dhs8, existing: true\n" level=info timestamp=2018-08-02T11:14:30.630563Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:14:30.630915Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:14:30.631404Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4dhs8 kind= uid=2e72591e-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:14:30.634797Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4dhs8 kind= uid=2e72591e-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:14:30.635756Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4dhs8" Pod name: virt-launcher-testvmi4dhs8-ln755 Pod phase: Running level=info timestamp=2018-08-02T11:14:17.610405Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:14:17.610626Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:14:17.647648Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:14:27.662885Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:14:27.753143Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi4dhs8" level=info timestamp=2018-08-02T11:14:27.754706Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:14:27.755157Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiksktb-5c4tg Pod phase: Running level=info timestamp=2018-08-02T11:14:17.162849Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:14:17.163185Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:14:17.170437Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:14:27.196197Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:14:27.261733Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiksktb" level=info timestamp=2018-08-02T11:14:27.268858Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:14:27.269532Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmilmh7f-99hfd Pod phase: Running level=info timestamp=2018-08-02T11:14:18.569918Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:14:18.570321Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:14:18.573407Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:14:28.582797Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:14:28.721702Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmilmh7f" level=info timestamp=2018-08-02T11:14:28.723841Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:14:28.724402Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmixhgkj-94vmf Pod phase: Running level=info timestamp=2018-08-02T11:14:19.316665Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:14:19.316806Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:14:19.318877Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:14:29.327649Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:14:29.417534Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmixhgkj" level=info timestamp=2018-08-02T11:14:29.423324Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:14:29.423841Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:16:30.904228Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:16:31 http: TLS handshake error from 10.129.0.1:35486: EOF level=info timestamp=2018-08-02T11:16:33.742432Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:16:33.756787Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:16:40.844573Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:16:41 http: TLS handshake error from 10.129.0.1:35498: EOF level=info timestamp=2018-08-02T11:16:43.355060Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:16:50.915563Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:16:51 http: TLS handshake error from 10.129.0.1:35510: EOF level=info timestamp=2018-08-02T11:16:59.294529Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:16:59.434980Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:17:00.987791Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:17:01 http: TLS handshake error from 10.129.0.1:35522: EOF level=info timestamp=2018-08-02T11:17:11.362111Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:17:11 http: TLS handshake error from 10.129.0.1:35534: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:15:06 http: TLS handshake error from 10.129.0.1:45108: EOF 2018/08/02 11:15:16 http: TLS handshake error from 10.129.0.1:45120: EOF 2018/08/02 11:15:26 http: TLS handshake error from 10.129.0.1:45132: EOF 2018/08/02 11:15:36 http: TLS handshake error from 10.129.0.1:45144: EOF 2018/08/02 11:15:46 http: TLS handshake error from 10.129.0.1:45156: EOF 2018/08/02 11:15:56 http: TLS handshake error from 10.129.0.1:45168: EOF 2018/08/02 11:16:06 http: TLS handshake error from 10.129.0.1:45180: EOF 2018/08/02 11:16:16 http: TLS handshake error from 10.129.0.1:45192: EOF 2018/08/02 11:16:26 http: TLS handshake error from 10.129.0.1:45204: EOF 2018/08/02 11:16:36 http: TLS handshake error from 10.129.0.1:45218: EOF level=info timestamp=2018-08-02T11:16:41.058358Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:16:46 http: TLS handshake error from 10.129.0.1:45230: EOF 2018/08/02 11:16:56 http: TLS handshake error from 10.129.0.1:45242: EOF 2018/08/02 11:17:06 http: TLS handshake error from 10.129.0.1:45254: EOF level=info timestamp=2018-08-02T11:17:11.140081Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:11:09.957967Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidw9qr kind= uid=c1d2a6b8-9644-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:11:10.329963Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi65q8r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi65q8r" level=info timestamp=2018-08-02T11:11:10.332344Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibsx95\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibsx95" level=info timestamp=2018-08-02T11:11:10.589106Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibsx95\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibsx95" level=info timestamp=2018-08-02T11:11:10.659378Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi65q8r\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi65q8r" level=info timestamp=2018-08-02T11:14:12.141287Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilmh7f kind= uid=2e6bf38e-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:14:12.142660Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmilmh7f kind= uid=2e6bf38e-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:14:12.192826Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4dhs8 kind= uid=2e72591e-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:14:12.193256Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4dhs8 kind= uid=2e72591e-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:14:12.246952Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixhgkj kind= uid=2e7a3aff-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:14:12.247266Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixhgkj kind= uid=2e7a3aff-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:14:12.315814Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiksktb kind= uid=2e867f86-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:14:12.315973Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiksktb kind= uid=2e867f86-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:14:12.889100Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiksktb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiksktb" level=info timestamp=2018-08-02T11:14:12.971925Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixhgkj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixhgkj" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:15:52.326072Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmixhgkj" level=info timestamp=2018-08-02T11:15:53.040753Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmilmh7f, existing: true\n" level=info timestamp=2018-08-02T11:15:53.041652Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:15:53.041760Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:15:53.042865Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmilmh7f kind= uid=2e6bf38e-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:15:53.049913Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmilmh7f kind= uid=2e6bf38e-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:15:53.051131Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmilmh7f" level=info timestamp=2018-08-02T11:16:53.717276Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi7mct6, existing: false\n" level=info timestamp=2018-08-02T11:16:53.717981Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:16:53.718496Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi7mct6 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:16:53.719100Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi7mct6 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:16:55.135359Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmibsx95, existing: false\n" level=info timestamp=2018-08-02T11:16:55.135495Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:16:55.135672Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmibsx95 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:16:55.135861Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmibsx95 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:15:50.548719Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiksktb" level=info timestamp=2018-08-02T11:15:52.305861Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4dhs8, existing: true\n" level=info timestamp=2018-08-02T11:15:52.306198Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:15:52.306278Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:15:52.306593Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4dhs8 kind= uid=2e72591e-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:15:52.312617Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4dhs8 kind= uid=2e72591e-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:15:52.319088Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4dhs8" level=info timestamp=2018-08-02T11:16:54.143287Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi65q8r, existing: false\n" level=info timestamp=2018-08-02T11:16:54.144018Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:16:54.144342Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi65q8r kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:16:54.144841Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi65q8r kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:16:54.155428Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmidw9qr, existing: false\n" level=info timestamp=2018-08-02T11:16:54.155606Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:16:54.156036Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmidw9qr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:16:54.156247Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmidw9qr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi4dhs8-ln755 Pod phase: Running level=info timestamp=2018-08-02T11:14:17.610405Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:14:17.610626Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:14:17.647648Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:14:27.662885Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:14:27.753143Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi4dhs8" level=info timestamp=2018-08-02T11:14:27.754706Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:14:27.755157Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiksktb-5c4tg Pod phase: Running level=info timestamp=2018-08-02T11:14:17.162849Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:14:17.163185Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:14:17.170437Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:14:27.196197Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:14:27.261733Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiksktb" level=info timestamp=2018-08-02T11:14:27.268858Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:14:27.269532Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmilmh7f-99hfd Pod phase: Running level=info timestamp=2018-08-02T11:14:18.569918Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:14:18.570321Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:14:18.573407Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:14:28.582797Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:14:28.721702Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmilmh7f" level=info timestamp=2018-08-02T11:14:28.723841Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:14:28.724402Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmixhgkj-94vmf Pod phase: Running level=info timestamp=2018-08-02T11:14:19.316665Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:14:19.316806Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:14:19.318877Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:14:29.327649Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:14:29.417534Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmixhgkj" level=info timestamp=2018-08-02T11:14:29.423324Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:14:29.423841Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [181.985 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the Inbound VirtualMachineInstance with custom MAC address /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received: testvmilmh7f,2e6bf38e-9645-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:14:12.338253Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmilmh7f kind=VirtualMachineInstance uid=2e6bf38e-9645-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmilmh7f-99hfd" level=info timestamp=2018-08-02T11:14:30.627898Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmilmh7f kind=VirtualMachineInstance uid=2e6bf38e-9645-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmilmh7f-99hfd" level=error timestamp=2018-08-02T11:14:30.849462Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmilmh7f kind=VirtualMachineInstance uid=2e6bf38e-9645-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:16:43.355060Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:16:50.915563Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:16:51 http: TLS handshake error from 10.129.0.1:35510: EOF level=info timestamp=2018-08-02T11:16:59.294529Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:16:59.434980Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:17:00.987791Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:17:01 http: TLS handshake error from 10.129.0.1:35522: EOF level=info timestamp=2018-08-02T11:17:11.362111Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:17:11 http: TLS handshake error from 10.129.0.1:35534: EOF level=info timestamp=2018-08-02T11:17:14.011226Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:17:21 http: TLS handshake error from 10.129.0.1:35546: EOF level=info timestamp=2018-08-02T11:17:21.609210Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:17:31 http: TLS handshake error from 10.129.0.1:35558: EOF level=info timestamp=2018-08-02T11:17:32.513454Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:17:32.551266Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:15:26 http: TLS handshake error from 10.129.0.1:45132: EOF 2018/08/02 11:15:36 http: TLS handshake error from 10.129.0.1:45144: EOF 2018/08/02 11:15:46 http: TLS handshake error from 10.129.0.1:45156: EOF 2018/08/02 11:15:56 http: TLS handshake error from 10.129.0.1:45168: EOF 2018/08/02 11:16:06 http: TLS handshake error from 10.129.0.1:45180: EOF 2018/08/02 11:16:16 http: TLS handshake error from 10.129.0.1:45192: EOF 2018/08/02 11:16:26 http: TLS handshake error from 10.129.0.1:45204: EOF 2018/08/02 11:16:36 http: TLS handshake error from 10.129.0.1:45218: EOF level=info timestamp=2018-08-02T11:16:41.058358Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:16:46 http: TLS handshake error from 10.129.0.1:45230: EOF 2018/08/02 11:16:56 http: TLS handshake error from 10.129.0.1:45242: EOF 2018/08/02 11:17:06 http: TLS handshake error from 10.129.0.1:45254: EOF level=info timestamp=2018-08-02T11:17:11.140081Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:17:16 http: TLS handshake error from 10.129.0.1:45266: EOF 2018/08/02 11:17:26 http: TLS handshake error from 10.129.0.1:45278: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:14:12.315814Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiksktb kind= uid=2e867f86-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:14:12.315973Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiksktb kind= uid=2e867f86-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:14:12.889100Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiksktb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiksktb" level=info timestamp=2018-08-02T11:14:12.971925Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixhgkj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixhgkj" level=info timestamp=2018-08-02T11:17:14.109271Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4st4q kind= uid=9ae4abe4-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:17:14.110817Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4st4q kind= uid=9ae4abe4-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:17:14.130117Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifhw5n kind= uid=9ae7c692-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:17:14.130554Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifhw5n kind= uid=9ae7c692-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:17:14.166938Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik4vbr kind= uid=9aea834d-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:17:14.167469Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik4vbr kind= uid=9aea834d-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:17:14.175586Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihd8k2 kind= uid=9aeee631-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:17:14.175771Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihd8k2 kind= uid=9aeee631-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:17:14.487316Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifhw5n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifhw5n" level=info timestamp=2018-08-02T11:17:14.566635Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik4vbr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik4vbr" level=info timestamp=2018-08-02T11:17:15.119016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihd8k2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihd8k2" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:17:33.119251Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4st4q kind= uid=9ae4abe4-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:17:33.121730Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4st4q kind= uid=9ae4abe4-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:17:33.122198Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4st4q" level=info timestamp=2018-08-02T11:17:33.210992Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmik4vbr, existing: true\n" level=info timestamp=2018-08-02T11:17:33.211292Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:17:33.211380Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:17:33.211705Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmik4vbr kind= uid=9aea834d-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:17:33.213860Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmik4vbr kind= uid=9aea834d-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:17:33.214320Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmik4vbr" level=info timestamp=2018-08-02T11:17:33.282762Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4st4q, existing: true\n" level=info timestamp=2018-08-02T11:17:33.282966Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:17:33.283040Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:17:33.283334Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4st4q kind= uid=9ae4abe4-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:17:33.285087Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4st4q kind= uid=9ae4abe4-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:17:33.285565Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4st4q" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:17:31.945292Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmifhw5n kind= uid=9ae7c692-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:17:31.946662Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmifhw5n kind= uid=9ae7c692-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:17:31.946808Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmifhw5n" level=info timestamp=2018-08-02T11:17:32.027943Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmifhw5n, existing: true\n" level=info timestamp=2018-08-02T11:17:32.028177Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:17:32.028351Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:17:32.028583Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmifhw5n kind= uid=9ae7c692-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:17:32.031332Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmifhw5n kind= uid=9ae7c692-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:17:32.032428Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmifhw5n" level=info timestamp=2018-08-02T11:17:32.210196Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmifhw5n, existing: true\n" level=info timestamp=2018-08-02T11:17:32.210433Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:17:32.210595Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:17:32.210846Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmifhw5n kind= uid=9ae7c692-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:17:32.211838Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmifhw5n kind= uid=9ae7c692-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:17:32.213740Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmifhw5n" Pod name: virt-launcher-testvmi4st4q-k64xg Pod phase: Running level=info timestamp=2018-08-02T11:17:18.539104Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:17:18.539399Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:17:18.541034Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:17:28.552348Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:17:28.614359Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi4st4q" level=info timestamp=2018-08-02T11:17:28.616305Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:17:28.616714Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmifhw5n-xjhlv Pod phase: Running level=info timestamp=2018-08-02T11:17:20.450275Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:17:20.450461Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:17:20.454053Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:17:31.058225Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:17:31.172574Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmifhw5n" level=info timestamp=2018-08-02T11:17:31.175119Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:17:31.175356Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmihd8k2-gdz6q Pod phase: Running level=info timestamp=2018-08-02T11:17:19.799082Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:17:19.799335Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:17:19.801630Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:17:31.283915Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:17:31.345382Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmihd8k2" level=info timestamp=2018-08-02T11:17:31.347040Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:17:31.347368Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmik4vbr-k9w7w Pod phase: Running level=info timestamp=2018-08-02T11:17:19.753434Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:17:19.753629Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:17:19.765466Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:17:29.775007Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:17:29.933828Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmik4vbr" level=info timestamp=2018-08-02T11:17:29.936469Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:17:29.938263Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:19:32.146966Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:19:32.173935Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:19:33.882362Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:19:33.914573Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:19:35.572836Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:19:41 http: TLS handshake error from 10.129.0.1:35716: EOF level=info timestamp=2018-08-02T11:19:44.932053Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:19:45.829015Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:19:51 http: TLS handshake error from 10.129.0.1:35728: EOF level=info timestamp=2018-08-02T11:19:56.077825Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:20:01 http: TLS handshake error from 10.129.0.1:35740: EOF level=info timestamp=2018-08-02T11:20:04.379370Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:20:04.438790Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:20:06.328645Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:20:11 http: TLS handshake error from 10.129.0.1:35752: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:18:26 http: TLS handshake error from 10.129.0.1:45350: EOF 2018/08/02 11:18:36 http: TLS handshake error from 10.129.0.1:45362: EOF level=info timestamp=2018-08-02T11:18:41.097536Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:18:46 http: TLS handshake error from 10.129.0.1:45374: EOF 2018/08/02 11:18:56 http: TLS handshake error from 10.129.0.1:45386: EOF 2018/08/02 11:19:06 http: TLS handshake error from 10.129.0.1:45398: EOF 2018/08/02 11:19:16 http: TLS handshake error from 10.129.0.1:45410: EOF 2018/08/02 11:19:26 http: TLS handshake error from 10.129.0.1:45422: EOF 2018/08/02 11:19:36 http: TLS handshake error from 10.129.0.1:45436: EOF level=info timestamp=2018-08-02T11:19:41.106412Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:19:46 http: TLS handshake error from 10.129.0.1:45448: EOF 2018/08/02 11:19:56 http: TLS handshake error from 10.129.0.1:45460: EOF 2018/08/02 11:20:06 http: TLS handshake error from 10.129.0.1:45472: EOF level=info timestamp=2018-08-02T11:20:09.555730Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:20:11.185004Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:14:12.315814Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiksktb kind= uid=2e867f86-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:14:12.315973Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiksktb kind= uid=2e867f86-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:14:12.889100Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiksktb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiksktb" level=info timestamp=2018-08-02T11:14:12.971925Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmixhgkj\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmixhgkj" level=info timestamp=2018-08-02T11:17:14.109271Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4st4q kind= uid=9ae4abe4-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:17:14.110817Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4st4q kind= uid=9ae4abe4-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:17:14.130117Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifhw5n kind= uid=9ae7c692-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:17:14.130554Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifhw5n kind= uid=9ae7c692-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:17:14.166938Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik4vbr kind= uid=9aea834d-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:17:14.167469Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik4vbr kind= uid=9aea834d-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:17:14.175586Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihd8k2 kind= uid=9aeee631-9645-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:17:14.175771Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihd8k2 kind= uid=9aeee631-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:17:14.487316Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifhw5n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifhw5n" level=info timestamp=2018-08-02T11:17:14.566635Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik4vbr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik4vbr" level=info timestamp=2018-08-02T11:17:15.119016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihd8k2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihd8k2" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:18:13.965988Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4st4q kind= uid=9ae4abe4-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:18:13.969191Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4st4q kind= uid=9ae4abe4-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:18:13.969626Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4st4q" level=info timestamp=2018-08-02T11:18:54.871317Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmik4vbr, existing: true\n" level=info timestamp=2018-08-02T11:18:54.872240Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:18:54.872333Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:18:54.872787Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmik4vbr kind= uid=9aea834d-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:18:54.879049Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmik4vbr kind= uid=9aea834d-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:18:54.880502Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmik4vbr" level=info timestamp=2018-08-02T11:18:54.932389Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4st4q, existing: true\n" level=info timestamp=2018-08-02T11:18:54.932850Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:18:54.933026Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:18:54.933510Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4st4q kind= uid=9ae4abe4-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:18:54.938696Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4st4q kind= uid=9ae4abe4-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:18:54.939100Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4st4q" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:18:14.243283Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmihd8k2 kind= uid=9aeee631-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:18:14.245715Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmihd8k2 kind= uid=9aeee631-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:18:14.246214Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmihd8k2" level=info timestamp=2018-08-02T11:18:53.942337Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmifhw5n, existing: true\n" level=info timestamp=2018-08-02T11:18:53.943303Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:18:53.943451Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:18:53.943817Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmifhw5n kind= uid=9ae7c692-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:18:53.947862Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmifhw5n kind= uid=9ae7c692-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:18:53.948378Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmifhw5n" level=info timestamp=2018-08-02T11:18:55.206811Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmihd8k2, existing: true\n" level=info timestamp=2018-08-02T11:18:55.207266Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:18:55.207331Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:18:55.207547Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmihd8k2 kind= uid=9aeee631-9645-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:18:55.210146Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmihd8k2 kind= uid=9aeee631-9645-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1484/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:18:55.210395Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1484/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmihd8k2" Pod name: virt-launcher-testvmi4st4q-k64xg Pod phase: Running level=info timestamp=2018-08-02T11:17:18.539104Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:17:18.539399Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:17:18.541034Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:17:28.552348Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:17:28.614359Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi4st4q" level=info timestamp=2018-08-02T11:17:28.616305Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:17:28.616714Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmifhw5n-xjhlv Pod phase: Running level=info timestamp=2018-08-02T11:17:20.450275Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:17:20.450461Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:17:20.454053Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:17:31.058225Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:17:31.172574Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmifhw5n" level=info timestamp=2018-08-02T11:17:31.175119Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:17:31.175356Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmihd8k2-gdz6q Pod phase: Running level=info timestamp=2018-08-02T11:17:19.799082Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:17:19.799335Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:17:19.801630Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:17:31.283915Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:17:31.345382Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmihd8k2" level=info timestamp=2018-08-02T11:17:31.347040Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:17:31.347368Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmik4vbr-k9w7w Pod phase: Running level=info timestamp=2018-08-02T11:17:19.753434Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:17:19.753629Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:17:19.765466Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:17:29.775007Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:17:29.933828Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmik4vbr" level=info timestamp=2018-08-02T11:17:29.936469Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:17:29.938263Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [181.876 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be able to reach [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 the internet /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received: testvmi4st4q,9ae4abe4-9645-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:17:14.017908Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi4st4q kind=VirtualMachineInstance uid=9ae4abe4-9645-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi4st4q-k64xg" level=info timestamp=2018-08-02T11:17:32.525528Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi4st4q kind=VirtualMachineInstance uid=9ae4abe4-9645-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi4st4q-k64xg" level=error timestamp=2018-08-02T11:17:32.569266Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi4st4q kind=VirtualMachineInstance uid=9ae4abe4-9645-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 11:19:41 http: TLS handshake error from 10.129.0.1:35716: EOF level=info timestamp=2018-08-02T11:19:44.932053Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:19:45.829015Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:19:51 http: TLS handshake error from 10.129.0.1:35728: EOF level=info timestamp=2018-08-02T11:19:56.077825Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:20:01 http: TLS handshake error from 10.129.0.1:35740: EOF level=info timestamp=2018-08-02T11:20:04.379370Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:20:04.438790Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:20:06.328645Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:20:11 http: TLS handshake error from 10.129.0.1:35752: EOF level=info timestamp=2018-08-02T11:20:15.126028Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:20:17.419676Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:20:21 http: TLS handshake error from 10.129.0.1:35764: EOF 2018/08/02 11:20:31 http: TLS handshake error from 10.129.0.1:35776: EOF level=info timestamp=2018-08-02T11:20:32.496650Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running level=info timestamp=2018-08-02T11:18:41.097536Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:18:46 http: TLS handshake error from 10.129.0.1:45374: EOF 2018/08/02 11:18:56 http: TLS handshake error from 10.129.0.1:45386: EOF 2018/08/02 11:19:06 http: TLS handshake error from 10.129.0.1:45398: EOF 2018/08/02 11:19:16 http: TLS handshake error from 10.129.0.1:45410: EOF 2018/08/02 11:19:26 http: TLS handshake error from 10.129.0.1:45422: EOF 2018/08/02 11:19:36 http: TLS handshake error from 10.129.0.1:45436: EOF level=info timestamp=2018-08-02T11:19:41.106412Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:19:46 http: TLS handshake error from 10.129.0.1:45448: EOF 2018/08/02 11:19:56 http: TLS handshake error from 10.129.0.1:45460: EOF 2018/08/02 11:20:06 http: TLS handshake error from 10.129.0.1:45472: EOF level=info timestamp=2018-08-02T11:20:09.555730Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:20:11.185004Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:20:16 http: TLS handshake error from 10.129.0.1:45484: EOF 2018/08/02 11:20:26 http: TLS handshake error from 10.129.0.1:45496: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:17:14.175771Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihd8k2 kind= uid=9aeee631-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:17:14.487316Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifhw5n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifhw5n" level=info timestamp=2018-08-02T11:17:14.566635Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik4vbr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik4vbr" level=info timestamp=2018-08-02T11:17:15.119016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihd8k2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihd8k2" level=info timestamp=2018-08-02T11:20:15.991693Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisw7jh kind= uid=0748b6a2-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:20:15.992789Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisw7jh kind= uid=0748b6a2-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:20:16.027038Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigpf6b kind= uid=075132ea-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:20:16.027328Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigpf6b kind= uid=075132ea-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:20:16.059821Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijkh9q kind= uid=0756cf0e-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:20:16.059930Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijkh9q kind= uid=0756cf0e-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:20:16.098988Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin69jw kind= uid=075df36e-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:20:16.099220Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin69jw kind= uid=075df36e-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:20:17.051582Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisw7jh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisw7jh" level=info timestamp=2018-08-02T11:20:17.085418Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmin69jw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmin69jw" level=info timestamp=2018-08-02T11:20:17.208597Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijkh9q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijkh9q" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:20:32.813903Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmisw7jh kind= uid=0748b6a2-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:20:32.816262Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmisw7jh kind= uid=0748b6a2-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:20:32.816652Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmisw7jh" level=info timestamp=2018-08-02T11:20:32.858445Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmijkh9q, existing: true\n" level=info timestamp=2018-08-02T11:20:32.858581Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:20:32.858656Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:20:32.858929Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmijkh9q kind= uid=0756cf0e-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:20:32.860722Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmijkh9q kind= uid=0756cf0e-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:20:32.861086Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmijkh9q" level=info timestamp=2018-08-02T11:20:32.897058Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmisw7jh, existing: true\n" level=info timestamp=2018-08-02T11:20:32.897280Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:20:32.897354Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:20:32.897574Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmisw7jh kind= uid=0748b6a2-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:20:32.900009Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmisw7jh kind= uid=0748b6a2-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:20:32.900421Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmisw7jh" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:20:14.501461Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:20:14.501601Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmifhw5n kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:20:14.501714Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmifhw5n kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:20:14.553716Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmihd8k2, existing: false\n" level=info timestamp=2018-08-02T11:20:14.553853Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:20:14.553947Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmihd8k2 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:20:14.554081Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmihd8k2 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:20:15.881668Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmifhw5n, existing: false\n" level=info timestamp=2018-08-02T11:20:15.883032Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:20:15.883207Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmifhw5n kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:20:15.883320Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmifhw5n kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:20:17.131232Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmihd8k2, existing: false\n" level=info timestamp=2018-08-02T11:20:17.131454Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:20:17.131560Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmihd8k2 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:20:17.131653Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmihd8k2 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmigpf6b-mzjjp Pod phase: Running level=info timestamp=2018-08-02T11:20:22.174514Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:20:22.174816Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:20:22.176468Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" Pod name: virt-launcher-testvmijkh9q-bghh9 Pod phase: Running level=info timestamp=2018-08-02T11:20:21.596653Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:20:21.596791Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:20:21.598251Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:20:31.607703Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:20:31.723787Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmijkh9q" level=info timestamp=2018-08-02T11:20:31.726639Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:20:31.728095Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmin69jw-swqkm Pod phase: Running level=info timestamp=2018-08-02T11:20:22.263645Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:20:22.263819Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:20:22.268853Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:20:32.279109Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:20:32.342074Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmin69jw" level=info timestamp=2018-08-02T11:20:32.342800Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:20:32.343391Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmisw7jh-8wdsq Pod phase: Running level=info timestamp=2018-08-02T11:20:20.658973Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:20:20.659679Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:20:20.661503Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:20:30.683633Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:20:30.749371Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmisw7jh" level=info timestamp=2018-08-02T11:20:30.752005Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:20:30.752507Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:22:34.756751Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:22:36.049627Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:22:36.104746Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:22:41.249922Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:22:41 http: TLS handshake error from 10.129.0.1:35934: EOF level=info timestamp=2018-08-02T11:22:44.993516Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:22:46.600500Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:22:51 http: TLS handshake error from 10.129.0.1:35946: EOF level=info timestamp=2018-08-02T11:22:55.255921Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:23:01 http: TLS handshake error from 10.129.0.1:35958: EOF level=info timestamp=2018-08-02T11:23:05.515739Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:23:06.438839Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:23:06.556330Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:23:11 http: TLS handshake error from 10.129.0.1:35970: EOF level=info timestamp=2018-08-02T11:23:15.665538Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:21:06 http: TLS handshake error from 10.129.0.1:45544: EOF level=info timestamp=2018-08-02T11:21:11.232419Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:21:16 http: TLS handshake error from 10.129.0.1:45556: EOF 2018/08/02 11:21:26 http: TLS handshake error from 10.129.0.1:45568: EOF 2018/08/02 11:21:36 http: TLS handshake error from 10.129.0.1:45580: EOF 2018/08/02 11:21:46 http: TLS handshake error from 10.129.0.1:45592: EOF 2018/08/02 11:21:56 http: TLS handshake error from 10.129.0.1:45604: EOF 2018/08/02 11:22:06 http: TLS handshake error from 10.129.0.1:45616: EOF 2018/08/02 11:22:16 http: TLS handshake error from 10.129.0.1:45628: EOF 2018/08/02 11:22:26 http: TLS handshake error from 10.129.0.1:45640: EOF 2018/08/02 11:22:36 http: TLS handshake error from 10.129.0.1:45654: EOF 2018/08/02 11:22:46 http: TLS handshake error from 10.129.0.1:45666: EOF 2018/08/02 11:22:56 http: TLS handshake error from 10.129.0.1:45678: EOF 2018/08/02 11:23:06 http: TLS handshake error from 10.129.0.1:45690: EOF level=info timestamp=2018-08-02T11:23:11.375917Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:17:14.175771Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihd8k2 kind= uid=9aeee631-9645-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:17:14.487316Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifhw5n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifhw5n" level=info timestamp=2018-08-02T11:17:14.566635Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmik4vbr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmik4vbr" level=info timestamp=2018-08-02T11:17:15.119016Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihd8k2\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihd8k2" level=info timestamp=2018-08-02T11:20:15.991693Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisw7jh kind= uid=0748b6a2-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:20:15.992789Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisw7jh kind= uid=0748b6a2-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:20:16.027038Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigpf6b kind= uid=075132ea-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:20:16.027328Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmigpf6b kind= uid=075132ea-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:20:16.059821Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijkh9q kind= uid=0756cf0e-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:20:16.059930Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijkh9q kind= uid=0756cf0e-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:20:16.098988Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin69jw kind= uid=075df36e-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:20:16.099220Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin69jw kind= uid=075df36e-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:20:17.051582Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisw7jh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisw7jh" level=info timestamp=2018-08-02T11:20:17.085418Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmin69jw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmin69jw" level=info timestamp=2018-08-02T11:20:17.208597Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijkh9q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijkh9q" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:21:54.712953Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmijkh9q" level=error timestamp=2018-08-02T11:21:54.716621Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmisw7jh kind= uid=0748b6a2-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:21:54.717077Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmisw7jh" level=info timestamp=2018-08-02T11:23:16.635252Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmijkh9q, existing: true\n" level=info timestamp=2018-08-02T11:23:16.636033Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:23:16.636197Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:23:16.636595Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmijkh9q kind= uid=0756cf0e-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:23:16.643287Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmijkh9q kind= uid=0756cf0e-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:23:16.644195Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmijkh9q" level=info timestamp=2018-08-02T11:23:16.644671Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmisw7jh, existing: true\n" level=info timestamp=2018-08-02T11:23:16.644791Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:23:16.644861Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:23:16.645065Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmisw7jh kind= uid=0748b6a2-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:23:16.649388Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmisw7jh kind= uid=0748b6a2-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:23:16.649813Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmisw7jh" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:21:14.688773Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmin69jw kind= uid=075df36e-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:21:14.690683Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmin69jw kind= uid=075df36e-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:21:14.692291Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmin69jw" level=info timestamp=2018-08-02T11:21:55.527429Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmigpf6b, existing: true\n" level=info timestamp=2018-08-02T11:21:55.528423Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:21:55.528526Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:21:55.530739Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmigpf6b kind= uid=075132ea-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:21:55.541871Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmigpf6b kind= uid=075132ea-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:21:55.558617Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmigpf6b" level=info timestamp=2018-08-02T11:21:55.653282Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmin69jw, existing: true\n" level=info timestamp=2018-08-02T11:21:55.653463Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:21:55.653538Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:21:55.653761Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmin69jw kind= uid=075df36e-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:21:55.657134Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmin69jw kind= uid=075df36e-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:21:55.658014Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmin69jw" Pod name: virt-launcher-testvmigpf6b-mzjjp Pod phase: Running level=info timestamp=2018-08-02T11:20:22.174514Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:20:22.174816Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:20:22.176468Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:20:32.183096Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:20:32.244193Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmigpf6b" level=info timestamp=2018-08-02T11:20:32.251621Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:20:32.251864Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmijkh9q-bghh9 Pod phase: Running level=info timestamp=2018-08-02T11:20:21.596653Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:20:21.596791Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:20:21.598251Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:20:31.607703Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:20:31.723787Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmijkh9q" level=info timestamp=2018-08-02T11:20:31.726639Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:20:31.728095Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmin69jw-swqkm Pod phase: Running level=info timestamp=2018-08-02T11:20:22.263645Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:20:22.263819Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:20:22.268853Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:20:32.279109Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:20:32.342074Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmin69jw" level=info timestamp=2018-08-02T11:20:32.342800Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:20:32.343391Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmisw7jh-8wdsq Pod phase: Running level=info timestamp=2018-08-02T11:20:20.658973Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:20:20.659679Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:20:20.661503Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:20:30.683633Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:20:30.749371Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmisw7jh" level=info timestamp=2018-08-02T11:20:30.752005Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:20:30.752507Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [181.956 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on the same node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received: testvmisw7jh,0748b6a2-9646-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:20:16.212526Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmisw7jh kind=VirtualMachineInstance uid=0748b6a2-9646-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmisw7jh-8wdsq" level=info timestamp=2018-08-02T11:20:32.300805Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmisw7jh kind=VirtualMachineInstance uid=0748b6a2-9646-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmisw7jh-8wdsq" level=error timestamp=2018-08-02T11:20:32.352367Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmisw7jh kind=VirtualMachineInstance uid=0748b6a2-9646-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 11:22:41 http: TLS handshake error from 10.129.0.1:35934: EOF level=info timestamp=2018-08-02T11:22:44.993516Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:22:46.600500Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:22:51 http: TLS handshake error from 10.129.0.1:35946: EOF level=info timestamp=2018-08-02T11:22:55.255921Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:23:01 http: TLS handshake error from 10.129.0.1:35958: EOF level=info timestamp=2018-08-02T11:23:05.515739Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:23:06.438839Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:23:06.556330Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:23:11 http: TLS handshake error from 10.129.0.1:35970: EOF level=info timestamp=2018-08-02T11:23:15.665538Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:23:16.834043Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:23:21 http: TLS handshake error from 10.129.0.1:35982: EOF level=info timestamp=2018-08-02T11:23:28.222941Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:23:31 http: TLS handshake error from 10.129.0.1:35994: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:21:16 http: TLS handshake error from 10.129.0.1:45556: EOF 2018/08/02 11:21:26 http: TLS handshake error from 10.129.0.1:45568: EOF 2018/08/02 11:21:36 http: TLS handshake error from 10.129.0.1:45580: EOF 2018/08/02 11:21:46 http: TLS handshake error from 10.129.0.1:45592: EOF 2018/08/02 11:21:56 http: TLS handshake error from 10.129.0.1:45604: EOF 2018/08/02 11:22:06 http: TLS handshake error from 10.129.0.1:45616: EOF 2018/08/02 11:22:16 http: TLS handshake error from 10.129.0.1:45628: EOF 2018/08/02 11:22:26 http: TLS handshake error from 10.129.0.1:45640: EOF 2018/08/02 11:22:36 http: TLS handshake error from 10.129.0.1:45654: EOF 2018/08/02 11:22:46 http: TLS handshake error from 10.129.0.1:45666: EOF 2018/08/02 11:22:56 http: TLS handshake error from 10.129.0.1:45678: EOF 2018/08/02 11:23:06 http: TLS handshake error from 10.129.0.1:45690: EOF level=info timestamp=2018-08-02T11:23:11.375917Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:23:16 http: TLS handshake error from 10.129.0.1:45702: EOF 2018/08/02 11:23:26 http: TLS handshake error from 10.129.0.1:45714: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:20:16.099220Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin69jw kind= uid=075df36e-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:20:17.051582Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisw7jh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisw7jh" level=info timestamp=2018-08-02T11:20:17.085418Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmin69jw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmin69jw" level=info timestamp=2018-08-02T11:20:17.208597Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijkh9q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijkh9q" level=info timestamp=2018-08-02T11:23:17.924808Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqbxqm kind= uid=73bcfe02-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:23:17.926292Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqbxqm kind= uid=73bcfe02-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:23:17.970948Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimpj6k kind= uid=73c1d4f3-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:23:17.971407Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimpj6k kind= uid=73c1d4f3-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:23:18.021761Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqspnm kind= uid=73c890e0-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:23:18.022871Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqspnm kind= uid=73c890e0-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:23:18.064042Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib6lzr kind= uid=73cf7b4b-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:23:18.064313Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib6lzr kind= uid=73cf7b4b-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:23:18.371185Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqbxqm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqbxqm" level=info timestamp=2018-08-02T11:23:18.420850Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqspnm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqspnm" level=info timestamp=2018-08-02T11:23:19.042989Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqspnm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqspnm" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:23:33.980333Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:23:33.980385Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:23:33.980623Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiqbxqm kind= uid=73bcfe02-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:23:33.982013Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiqbxqm kind= uid=73bcfe02-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:23:33.982340Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiqbxqm" level=info timestamp=2018-08-02T11:23:33.985006Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiqspnm, existing: true\n" level=info timestamp=2018-08-02T11:23:33.985179Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:23:33.985230Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:23:33.985510Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiqspnm kind= uid=73c890e0-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:23:33.988741Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiqspnm kind= uid=73c890e0-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:23:33.989369Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiqspnm" level=info timestamp=2018-08-02T11:23:34.143419Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiqbxqm, existing: true\n" level=info timestamp=2018-08-02T11:23:34.143630Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:23:34.143704Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:23:34.143908Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiqbxqm kind= uid=73bcfe02-9646-11e8-bba8-525500d15501 msg="Processing vmi update" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:23:16.461278Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:23:16.461359Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmigpf6b kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:23:16.461447Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmigpf6b kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:23:16.508587Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmin69jw, existing: false\n" level=info timestamp=2018-08-02T11:23:16.508677Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:23:16.508755Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmin69jw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:23:16.508849Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmin69jw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:23:17.479625Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmigpf6b, existing: false\n" level=info timestamp=2018-08-02T11:23:17.479704Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:23:17.479783Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmigpf6b kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:23:17.479868Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmigpf6b kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:23:17.578657Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmin69jw, existing: false\n" level=info timestamp=2018-08-02T11:23:17.578731Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:23:17.578803Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmin69jw kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:23:17.578885Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmin69jw kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmib6lzr-7kvdn Pod phase: Running level=info timestamp=2018-08-02T11:23:23.418615Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:23:23.418786Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:23:23.420628Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" Pod name: virt-launcher-testvmimpj6k-bqh7z Pod phase: Running level=info timestamp=2018-08-02T11:23:21.741017Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:23:21.741259Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:23:21.758024Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:23:31.762627Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:23:31.798939Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmimpj6k" level=info timestamp=2018-08-02T11:23:31.799823Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:23:31.800045Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiqbxqm-zdxcb Pod phase: Running level=info timestamp=2018-08-02T11:23:22.938860Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:23:22.939173Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:23:22.940919Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:23:32.951713Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:23:33.054728Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiqbxqm" level=info timestamp=2018-08-02T11:23:33.062742Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:23:33.063654Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiqspnm-wj2cx Pod phase: Running level=info timestamp=2018-08-02T11:23:23.306738Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:23:23.306905Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:23:23.308588Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:23:33.317898Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:23:33.525031Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiqspnm" level=info timestamp=2018-08-02T11:23:33.527795Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:23:33.528572Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:25:34.717488Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:25:38.180331Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:25:38.210564Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:25:41 http: TLS handshake error from 10.129.0.1:36152: EOF level=info timestamp=2018-08-02T11:25:41.704729Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:25:48.218333Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:25:51 http: TLS handshake error from 10.129.0.1:36164: EOF level=info timestamp=2018-08-02T11:25:51.891680Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:26:01 http: TLS handshake error from 10.129.0.1:36176: EOF level=info timestamp=2018-08-02T11:26:02.045045Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:26:08.663871Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:26:08.699109Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:26:11.104770Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:26:11 http: TLS handshake error from 10.129.0.1:36188: EOF level=info timestamp=2018-08-02T11:26:12.171506Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:24:06 http: TLS handshake error from 10.129.0.1:45762: EOF 2018/08/02 11:24:16 http: TLS handshake error from 10.129.0.1:45774: EOF 2018/08/02 11:24:26 http: TLS handshake error from 10.129.0.1:45786: EOF 2018/08/02 11:24:36 http: TLS handshake error from 10.129.0.1:45798: EOF 2018/08/02 11:24:46 http: TLS handshake error from 10.129.0.1:45810: EOF 2018/08/02 11:24:56 http: TLS handshake error from 10.129.0.1:45822: EOF 2018/08/02 11:25:06 http: TLS handshake error from 10.129.0.1:45834: EOF 2018/08/02 11:25:16 http: TLS handshake error from 10.129.0.1:45846: EOF 2018/08/02 11:25:26 http: TLS handshake error from 10.129.0.1:45858: EOF 2018/08/02 11:25:36 http: TLS handshake error from 10.129.0.1:45872: EOF level=info timestamp=2018-08-02T11:25:41.276312Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:25:46 http: TLS handshake error from 10.129.0.1:45884: EOF 2018/08/02 11:25:56 http: TLS handshake error from 10.129.0.1:45896: EOF 2018/08/02 11:26:06 http: TLS handshake error from 10.129.0.1:45908: EOF 2018/08/02 11:26:16 http: TLS handshake error from 10.129.0.1:45920: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:20:16.099220Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmin69jw kind= uid=075df36e-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:20:17.051582Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmisw7jh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmisw7jh" level=info timestamp=2018-08-02T11:20:17.085418Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmin69jw\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmin69jw" level=info timestamp=2018-08-02T11:20:17.208597Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijkh9q\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijkh9q" level=info timestamp=2018-08-02T11:23:17.924808Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqbxqm kind= uid=73bcfe02-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:23:17.926292Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqbxqm kind= uid=73bcfe02-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:23:17.970948Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimpj6k kind= uid=73c1d4f3-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:23:17.971407Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimpj6k kind= uid=73c1d4f3-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:23:18.021761Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqspnm kind= uid=73c890e0-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:23:18.022871Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiqspnm kind= uid=73c890e0-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:23:18.064042Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib6lzr kind= uid=73cf7b4b-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:23:18.064313Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib6lzr kind= uid=73cf7b4b-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:23:18.371185Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqbxqm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqbxqm" level=info timestamp=2018-08-02T11:23:18.420850Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqspnm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqspnm" level=info timestamp=2018-08-02T11:23:19.042989Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqspnm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqspnm" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:26:00.490547Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:26:00.490712Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmisw7jh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:26:00.491018Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmisw7jh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:26:17.739607Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiqspnm, existing: true\n" level=info timestamp=2018-08-02T11:26:17.741412Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:26:17.741581Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:26:17.742494Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiqspnm kind= uid=73c890e0-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-02T11:26:17.748518Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiqbxqm, existing: true\n" level=info timestamp=2018-08-02T11:26:17.748774Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:26:17.748871Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:26:17.749112Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiqbxqm kind= uid=73bcfe02-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:26:17.753922Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiqbxqm kind= uid=73bcfe02-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:26:17.754995Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiqbxqm" level=error timestamp=2018-08-02T11:26:17.764688Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiqspnm kind= uid=73c890e0-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:26:17.765338Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiqspnm" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:24:55.357689Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmimpj6k kind= uid=73c1d4f3-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:24:55.367387Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmimpj6k kind= uid=73c1d4f3-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:24:55.373454Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmimpj6k" level=info timestamp=2018-08-02T11:24:56.843829Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmib6lzr, existing: true\n" level=info timestamp=2018-08-02T11:24:56.844056Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:24:56.844143Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:24:56.844349Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmib6lzr kind= uid=73cf7b4b-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:24:56.846374Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmib6lzr kind= uid=73cf7b4b-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:24:56.846664Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmib6lzr" level=info timestamp=2018-08-02T11:26:17.294854Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpj6k, existing: true\n" level=info timestamp=2018-08-02T11:26:17.295483Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:26:17.295606Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:26:17.301751Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmimpj6k kind= uid=73c1d4f3-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:26:17.305369Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmimpj6k kind= uid=73c1d4f3-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:26:17.305692Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmimpj6k" Pod name: virt-launcher-testvmib6lzr-7kvdn Pod phase: Running level=info timestamp=2018-08-02T11:23:23.418615Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:23:23.418786Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:23:23.420628Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:23:33.433870Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:23:33.482302Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmib6lzr" level=info timestamp=2018-08-02T11:23:33.484278Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:23:33.484503Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmimpj6k-bqh7z Pod phase: Running level=info timestamp=2018-08-02T11:23:21.741017Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:23:21.741259Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:23:21.758024Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:23:31.762627Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:23:31.798939Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmimpj6k" level=info timestamp=2018-08-02T11:23:31.799823Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:23:31.800045Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiqbxqm-zdxcb Pod phase: Running level=info timestamp=2018-08-02T11:23:22.938860Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:23:22.939173Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:23:22.940919Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:23:32.951713Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:23:33.054728Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiqbxqm" level=info timestamp=2018-08-02T11:23:33.062742Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:23:33.063654Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiqspnm-wj2cx Pod phase: Running level=info timestamp=2018-08-02T11:23:23.306738Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:23:23.306905Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:23:23.308588Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:23:33.317898Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:23:33.525031Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiqspnm" level=info timestamp=2018-08-02T11:23:33.527795Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:23:33.528572Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [181.927 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on a different node from Pod /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received: testvmiqbxqm,73bcfe02-9646-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:23:18.105788Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiqbxqm kind=VirtualMachineInstance uid=73bcfe02-9646-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiqbxqm-zdxcb" level=info timestamp=2018-08-02T11:23:33.376010Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiqbxqm kind=VirtualMachineInstance uid=73bcfe02-9646-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmiqbxqm-zdxcb" level=error timestamp=2018-08-02T11:23:33.422681Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmiqbxqm kind=VirtualMachineInstance uid=73bcfe02-9646-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:25:51.891680Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:26:01 http: TLS handshake error from 10.129.0.1:36176: EOF level=info timestamp=2018-08-02T11:26:02.045045Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:26:08.663871Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:26:08.699109Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:26:11.104770Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:26:11 http: TLS handshake error from 10.129.0.1:36188: EOF level=info timestamp=2018-08-02T11:26:12.171506Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:26:18.490420Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:26:21 http: TLS handshake error from 10.129.0.1:36200: EOF level=info timestamp=2018-08-02T11:26:22.356374Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:26:31 http: TLS handshake error from 10.129.0.1:36212: EOF level=info timestamp=2018-08-02T11:26:35.491755Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:26:35.714599Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:26:35.739095Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:24:16 http: TLS handshake error from 10.129.0.1:45774: EOF 2018/08/02 11:24:26 http: TLS handshake error from 10.129.0.1:45786: EOF 2018/08/02 11:24:36 http: TLS handshake error from 10.129.0.1:45798: EOF 2018/08/02 11:24:46 http: TLS handshake error from 10.129.0.1:45810: EOF 2018/08/02 11:24:56 http: TLS handshake error from 10.129.0.1:45822: EOF 2018/08/02 11:25:06 http: TLS handshake error from 10.129.0.1:45834: EOF 2018/08/02 11:25:16 http: TLS handshake error from 10.129.0.1:45846: EOF 2018/08/02 11:25:26 http: TLS handshake error from 10.129.0.1:45858: EOF 2018/08/02 11:25:36 http: TLS handshake error from 10.129.0.1:45872: EOF level=info timestamp=2018-08-02T11:25:41.276312Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:25:46 http: TLS handshake error from 10.129.0.1:45884: EOF 2018/08/02 11:25:56 http: TLS handshake error from 10.129.0.1:45896: EOF 2018/08/02 11:26:06 http: TLS handshake error from 10.129.0.1:45908: EOF 2018/08/02 11:26:16 http: TLS handshake error from 10.129.0.1:45920: EOF 2018/08/02 11:26:26 http: TLS handshake error from 10.129.0.1:45932: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:23:18.064042Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib6lzr kind= uid=73cf7b4b-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:23:18.064313Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib6lzr kind= uid=73cf7b4b-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:23:18.371185Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqbxqm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqbxqm" level=info timestamp=2018-08-02T11:23:18.420850Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqspnm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqspnm" level=info timestamp=2018-08-02T11:23:19.042989Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqspnm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqspnm" level=info timestamp=2018-08-02T11:26:19.840708Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif8pw8 kind= uid=e02d704f-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:26:19.841567Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif8pw8 kind= uid=e02d704f-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:26:19.867395Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2zj9w kind= uid=e02ff32b-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:26:19.867683Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2zj9w kind= uid=e02ff32b-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:26:19.897481Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidl5jt kind= uid=e03333d9-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:26:19.897719Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidl5jt kind= uid=e03333d9-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:26:20.160463Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig8sdp kind= uid=e03a343b-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:26:20.160683Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig8sdp kind= uid=e03a343b-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:26:20.531521Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif8pw8\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif8pw8" level=info timestamp=2018-08-02T11:26:20.831895Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidl5jt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidl5jt" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:26:36.271770Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmidl5jt kind= uid=e03333d9-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:26:36.273733Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmidl5jt kind= uid=e03333d9-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:26:36.273993Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmidl5jt" level=info timestamp=2018-08-02T11:26:36.326307Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif8pw8, existing: true\n" level=info timestamp=2018-08-02T11:26:36.326425Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:26:36.326477Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:26:36.326659Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmif8pw8 kind= uid=e02d704f-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:26:36.327804Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmif8pw8 kind= uid=e02d704f-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:26:36.328045Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmif8pw8" level=info timestamp=2018-08-02T11:26:36.354423Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmidl5jt, existing: true\n" level=info timestamp=2018-08-02T11:26:36.354537Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:26:36.354590Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:26:36.354744Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmidl5jt kind= uid=e03333d9-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:26:36.357361Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmidl5jt kind= uid=e03333d9-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:26:36.357753Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmidl5jt" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:26:18.266905Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:26:18.267171Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpj6k kind= uid=73c1d4f3-9646-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:26:18.267441Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpj6k kind= uid=73c1d4f3-9646-11e8-bba8-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:26:18.422698Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmib6lzr, existing: false\n" level=info timestamp=2018-08-02T11:26:18.422779Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:26:18.422866Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmib6lzr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:26:18.423063Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmib6lzr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:26:18.443676Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpj6k, existing: false\n" level=info timestamp=2018-08-02T11:26:18.443764Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:26:18.443849Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpj6k kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:26:18.443941Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpj6k kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:26:18.767083Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmib6lzr, existing: false\n" level=info timestamp=2018-08-02T11:26:18.767522Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:26:18.768041Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmib6lzr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:26:18.768465Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmib6lzr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2zj9w-kvchb Pod phase: Running level=info timestamp=2018-08-02T11:26:24.747528Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:26:24.747923Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:26:24.752235Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:26:34.762595Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:26:34.831076Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi2zj9w" level=info timestamp=2018-08-02T11:26:34.832169Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:26:34.832608Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmidl5jt-mgfjt Pod phase: Running level=info timestamp=2018-08-02T11:26:24.739553Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:26:24.739680Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:26:24.743227Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:26:34.751332Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:26:34.790464Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmidl5jt" level=info timestamp=2018-08-02T11:26:34.791927Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:26:34.792309Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmif8pw8-sl8x4 Pod phase: Running level=info timestamp=2018-08-02T11:26:24.708268Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:26:24.708893Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:26:24.711943Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:26:34.722271Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:26:34.767289Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmif8pw8" level=info timestamp=2018-08-02T11:26:34.768268Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:26:34.768570Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmig8sdp-jktb7 Pod phase: Running level=info timestamp=2018-08-02T11:26:24.818728Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:26:24.819410Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:26:24.822326Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:26:34.827972Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:26:34.855204Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmig8sdp" level=info timestamp=2018-08-02T11:26:34.856816Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:26:34.857316Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:28:40.337082Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:28:40.406281Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:28:41 http: TLS handshake error from 10.129.0.1:36370: EOF level=info timestamp=2018-08-02T11:28:48.471234Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:28:49.981008Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:28:51 http: TLS handshake error from 10.129.0.1:36382: EOF level=info timestamp=2018-08-02T11:28:58.657418Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:29:01 http: TLS handshake error from 10.129.0.1:36394: EOF level=info timestamp=2018-08-02T11:29:08.834403Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:29:10.670359Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:29:10.707732Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:29:11.131739Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:29:11 http: TLS handshake error from 10.129.0.1:36406: EOF level=info timestamp=2018-08-02T11:29:18.941828Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:29:20.117550Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:27:06 http: TLS handshake error from 10.129.0.1:45980: EOF 2018/08/02 11:27:16 http: TLS handshake error from 10.129.0.1:45992: EOF 2018/08/02 11:27:26 http: TLS handshake error from 10.129.0.1:46004: EOF 2018/08/02 11:27:36 http: TLS handshake error from 10.129.0.1:46016: EOF 2018/08/02 11:27:46 http: TLS handshake error from 10.129.0.1:46028: EOF 2018/08/02 11:27:56 http: TLS handshake error from 10.129.0.1:46040: EOF 2018/08/02 11:28:06 http: TLS handshake error from 10.129.0.1:46052: EOF 2018/08/02 11:28:16 http: TLS handshake error from 10.129.0.1:46064: EOF 2018/08/02 11:28:26 http: TLS handshake error from 10.129.0.1:46076: EOF 2018/08/02 11:28:36 http: TLS handshake error from 10.129.0.1:46090: EOF level=info timestamp=2018-08-02T11:28:41.406829Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:28:46 http: TLS handshake error from 10.129.0.1:46102: EOF 2018/08/02 11:28:56 http: TLS handshake error from 10.129.0.1:46114: EOF 2018/08/02 11:29:06 http: TLS handshake error from 10.129.0.1:46126: EOF 2018/08/02 11:29:16 http: TLS handshake error from 10.129.0.1:46138: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:23:18.064042Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib6lzr kind= uid=73cf7b4b-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:23:18.064313Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmib6lzr kind= uid=73cf7b4b-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:23:18.371185Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqbxqm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqbxqm" level=info timestamp=2018-08-02T11:23:18.420850Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqspnm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqspnm" level=info timestamp=2018-08-02T11:23:19.042989Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiqspnm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiqspnm" level=info timestamp=2018-08-02T11:26:19.840708Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif8pw8 kind= uid=e02d704f-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:26:19.841567Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif8pw8 kind= uid=e02d704f-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:26:19.867395Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2zj9w kind= uid=e02ff32b-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:26:19.867683Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2zj9w kind= uid=e02ff32b-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:26:19.897481Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidl5jt kind= uid=e03333d9-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:26:19.897719Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidl5jt kind= uid=e03333d9-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:26:20.160463Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig8sdp kind= uid=e03a343b-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:26:20.160683Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig8sdp kind= uid=e03a343b-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:26:20.531521Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif8pw8\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif8pw8" level=info timestamp=2018-08-02T11:26:20.831895Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidl5jt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidl5jt" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:29:01.606849Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:29:01.607020Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmiqspnm kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:29:01.607435Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmiqspnm kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:29:20.071483Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif8pw8, existing: true\n" level=info timestamp=2018-08-02T11:29:20.072573Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:29:20.072679Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:29:20.073365Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmif8pw8 kind= uid=e02d704f-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:29:20.079743Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmif8pw8 kind= uid=e02d704f-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:29:20.080523Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmif8pw8" level=info timestamp=2018-08-02T11:29:20.108646Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmidl5jt, existing: true\n" level=info timestamp=2018-08-02T11:29:20.108807Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:29:20.108887Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:29:20.109111Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmidl5jt kind= uid=e03333d9-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:29:20.112358Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmidl5jt kind= uid=e03333d9-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:29:20.112824Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmidl5jt" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:27:58.049180Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:27:58.049278Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:27:58.052834Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmig8sdp kind= uid=e03a343b-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:27:58.073296Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmig8sdp kind= uid=e03a343b-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:27:58.082094Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmig8sdp" level=info timestamp=2018-08-02T11:27:58.834284Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi2zj9w, existing: true\n" level=info timestamp=2018-08-02T11:27:58.834443Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:27:58.834544Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:27:58.834755Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi2zj9w kind= uid=e02ff32b-9646-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:27:58.838302Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi2zj9w kind= uid=e02ff32b-9646-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:27:58.838624Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2zj9w" level=info timestamp=2018-08-02T11:29:01.147552Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpj6k, existing: false\n" level=info timestamp=2018-08-02T11:29:01.148118Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:29:01.148563Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpj6k kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:29:01.150737Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpj6k kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2zj9w-kvchb Pod phase: Running level=info timestamp=2018-08-02T11:26:24.747528Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:26:24.747923Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:26:24.752235Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:26:34.762595Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:26:34.831076Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi2zj9w" level=info timestamp=2018-08-02T11:26:34.832169Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:26:34.832608Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmidl5jt-mgfjt Pod phase: Running level=info timestamp=2018-08-02T11:26:24.739553Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:26:24.739680Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:26:24.743227Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:26:34.751332Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:26:34.790464Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmidl5jt" level=info timestamp=2018-08-02T11:26:34.791927Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:26:34.792309Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmif8pw8-sl8x4 Pod phase: Running level=info timestamp=2018-08-02T11:26:24.708268Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:26:24.708893Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:26:24.711943Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:26:34.722271Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:26:34.767289Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmif8pw8" level=info timestamp=2018-08-02T11:26:34.768268Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:26:34.768570Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmig8sdp-jktb7 Pod phase: Running level=info timestamp=2018-08-02T11:26:24.818728Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:26:24.819410Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:26:24.822326Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:26:34.827972Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:26:34.855204Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmig8sdp" level=info timestamp=2018-08-02T11:26:34.856816Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:26:34.857316Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [182.173 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on the same node from Node /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received: testvmif8pw8,e02d704f-9646-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:26:20.234668Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmif8pw8 kind=VirtualMachineInstance uid=e02d704f-9646-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmif8pw8-sl8x4" level=info timestamp=2018-08-02T11:26:35.671349Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmif8pw8 kind=VirtualMachineInstance uid=e02d704f-9646-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmif8pw8-sl8x4" level=error timestamp=2018-08-02T11:26:35.775257Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmif8pw8 kind=VirtualMachineInstance uid=e02d704f-9646-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:28:48.471234Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:28:49.981008Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:28:51 http: TLS handshake error from 10.129.0.1:36382: EOF level=info timestamp=2018-08-02T11:28:58.657418Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:29:01 http: TLS handshake error from 10.129.0.1:36394: EOF level=info timestamp=2018-08-02T11:29:08.834403Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:29:10.670359Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:29:10.707732Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:29:11.131739Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:29:11 http: TLS handshake error from 10.129.0.1:36406: EOF level=info timestamp=2018-08-02T11:29:18.941828Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:29:20.117550Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:29:21 http: TLS handshake error from 10.129.0.1:36418: EOF level=info timestamp=2018-08-02T11:29:30.042643Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:29:31 http: TLS handshake error from 10.129.0.1:36430: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:27:26 http: TLS handshake error from 10.129.0.1:46004: EOF 2018/08/02 11:27:36 http: TLS handshake error from 10.129.0.1:46016: EOF 2018/08/02 11:27:46 http: TLS handshake error from 10.129.0.1:46028: EOF 2018/08/02 11:27:56 http: TLS handshake error from 10.129.0.1:46040: EOF 2018/08/02 11:28:06 http: TLS handshake error from 10.129.0.1:46052: EOF 2018/08/02 11:28:16 http: TLS handshake error from 10.129.0.1:46064: EOF 2018/08/02 11:28:26 http: TLS handshake error from 10.129.0.1:46076: EOF 2018/08/02 11:28:36 http: TLS handshake error from 10.129.0.1:46090: EOF level=info timestamp=2018-08-02T11:28:41.406829Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:28:46 http: TLS handshake error from 10.129.0.1:46102: EOF 2018/08/02 11:28:56 http: TLS handshake error from 10.129.0.1:46114: EOF 2018/08/02 11:29:06 http: TLS handshake error from 10.129.0.1:46126: EOF 2018/08/02 11:29:16 http: TLS handshake error from 10.129.0.1:46138: EOF 2018/08/02 11:29:26 http: TLS handshake error from 10.129.0.1:46150: EOF 2018/08/02 11:29:36 http: TLS handshake error from 10.129.0.1:46162: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:26:19.897719Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidl5jt kind= uid=e03333d9-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:26:20.160463Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig8sdp kind= uid=e03a343b-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:26:20.160683Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig8sdp kind= uid=e03a343b-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:26:20.531521Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif8pw8\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif8pw8" level=info timestamp=2018-08-02T11:26:20.831895Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidl5jt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidl5jt" level=info timestamp=2018-08-02T11:29:22.025649Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirrxhn kind= uid=4cc3c1e8-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:29:22.026069Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirrxhn kind= uid=4cc3c1e8-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:29:22.046982Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidphwg kind= uid=4cc7efea-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:29:22.047196Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidphwg kind= uid=4cc7efea-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:29:22.083635Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik8j4p kind= uid=4cca8983-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:29:22.083742Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik8j4p kind= uid=4cca8983-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:29:22.124688Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidh4mx kind= uid=4ccf5591-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:29:22.124968Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidh4mx kind= uid=4ccf5591-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:29:22.617414Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirrxhn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirrxhn" level=info timestamp=2018-08-02T11:29:22.800335Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidphwg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidphwg" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:29:39.470942Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmik8j4p kind= uid=4cca8983-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:29:39.474474Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmik8j4p kind= uid=4cca8983-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:29:39.475224Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmik8j4p" level=info timestamp=2018-08-02T11:29:39.494036Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmirrxhn, existing: true\n" level=info timestamp=2018-08-02T11:29:39.494242Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:29:39.494335Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:29:39.494545Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmirrxhn kind= uid=4cc3c1e8-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:29:39.496038Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmirrxhn kind= uid=4cc3c1e8-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:29:39.496448Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirrxhn" level=info timestamp=2018-08-02T11:29:39.656913Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmirrxhn, existing: true\n" level=info timestamp=2018-08-02T11:29:39.657071Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:29:39.657211Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:29:39.657430Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmirrxhn kind= uid=4cc3c1e8-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:29:39.659022Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmirrxhn kind= uid=4cc3c1e8-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:29:39.659485Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirrxhn" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:29:38.660811Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmidh4mx kind= uid=4ccf5591-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:29:38.664214Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmidh4mx kind= uid=4ccf5591-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1484/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:29:38.665355Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1484/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmidh4mx" level=info timestamp=2018-08-02T11:29:38.687357Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmidphwg, existing: true\n" level=info timestamp=2018-08-02T11:29:38.688267Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:29:38.689112Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:29:38.693226Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmidphwg kind= uid=4cc7efea-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:29:38.703370Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmidphwg kind= uid=4cc7efea-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:29:38.704267Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmidphwg" level=info timestamp=2018-08-02T11:29:38.706631Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmidh4mx, existing: true\n" level=info timestamp=2018-08-02T11:29:38.707464Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:29:38.707758Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:29:38.708231Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmidh4mx kind= uid=4ccf5591-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:29:38.711115Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmidh4mx kind= uid=4ccf5591-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:29:38.712808Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmidh4mx" Pod name: virt-launcher-testvmidh4mx-bjwwc Pod phase: Running level=info timestamp=2018-08-02T11:29:26.562322Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:29:26.562465Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:29:26.564030Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:29:36.574004Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:29:36.681661Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmidh4mx" level=info timestamp=2018-08-02T11:29:36.689126Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:29:36.690224Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmidphwg-l8sf8 Pod phase: Running level=info timestamp=2018-08-02T11:29:26.055475Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:29:26.055745Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:29:26.057336Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:29:36.099099Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:29:36.124999Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmidphwg" level=info timestamp=2018-08-02T11:29:36.126682Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:29:36.127464Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmik8j4p-7n6qk Pod phase: Running level=info timestamp=2018-08-02T11:29:27.776077Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:29:27.776286Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:29:27.778359Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:29:37.786213Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:29:37.896455Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmik8j4p" level=info timestamp=2018-08-02T11:29:37.898503Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:29:37.898946Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmirrxhn-4l5d9 Pod phase: Running level=info timestamp=2018-08-02T11:29:27.684605Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:29:27.684908Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:29:27.686928Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:29:37.698656Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:29:37.777242Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmirrxhn" level=info timestamp=2018-08-02T11:29:37.779101Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:29:37.779723Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:31:37.337078Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:31:41 http: TLS handshake error from 10.129.0.1:36588: EOF level=info timestamp=2018-08-02T11:31:42.786193Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:31:42.823276Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:31:43.764347Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:31:51 http: TLS handshake error from 10.129.0.1:36600: EOF level=info timestamp=2018-08-02T11:31:52.419694Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:31:53.976262Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:32:01 http: TLS handshake error from 10.129.0.1:36612: EOF level=info timestamp=2018-08-02T11:32:04.246216Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:32:11 http: TLS handshake error from 10.129.0.1:36624: EOF level=info timestamp=2018-08-02T11:32:13.234606Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:32:13.293549Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:32:14.433073Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:32:21 http: TLS handshake error from 10.129.0.1:36636: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:30:36 http: TLS handshake error from 10.129.0.1:46234: EOF level=info timestamp=2018-08-02T11:30:41.111976Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:30:46 http: TLS handshake error from 10.129.0.1:46246: EOF 2018/08/02 11:30:56 http: TLS handshake error from 10.129.0.1:46258: EOF 2018/08/02 11:31:06 http: TLS handshake error from 10.129.0.1:46270: EOF level=info timestamp=2018-08-02T11:31:11.263599Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:31:16 http: TLS handshake error from 10.129.0.1:46282: EOF 2018/08/02 11:31:26 http: TLS handshake error from 10.129.0.1:46294: EOF 2018/08/02 11:31:36 http: TLS handshake error from 10.129.0.1:46306: EOF level=info timestamp=2018-08-02T11:31:41.091943Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:31:46 http: TLS handshake error from 10.129.0.1:46320: EOF 2018/08/02 11:31:56 http: TLS handshake error from 10.129.0.1:46332: EOF 2018/08/02 11:32:06 http: TLS handshake error from 10.129.0.1:46344: EOF level=info timestamp=2018-08-02T11:32:11.141620Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:32:16 http: TLS handshake error from 10.129.0.1:46356: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:26:19.897719Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidl5jt kind= uid=e03333d9-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:26:20.160463Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig8sdp kind= uid=e03a343b-9646-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:26:20.160683Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig8sdp kind= uid=e03a343b-9646-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:26:20.531521Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif8pw8\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif8pw8" level=info timestamp=2018-08-02T11:26:20.831895Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidl5jt\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidl5jt" level=info timestamp=2018-08-02T11:29:22.025649Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirrxhn kind= uid=4cc3c1e8-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:29:22.026069Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirrxhn kind= uid=4cc3c1e8-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:29:22.046982Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidphwg kind= uid=4cc7efea-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:29:22.047196Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidphwg kind= uid=4cc7efea-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:29:22.083635Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik8j4p kind= uid=4cca8983-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:29:22.083742Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmik8j4p kind= uid=4cca8983-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:29:22.124688Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidh4mx kind= uid=4ccf5591-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:29:22.124968Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidh4mx kind= uid=4ccf5591-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:29:22.617414Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirrxhn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirrxhn" level=info timestamp=2018-08-02T11:29:22.800335Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidphwg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidphwg" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:31:01.139368Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmik8j4p" level=info timestamp=2018-08-02T11:31:01.288755Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmirrxhn, existing: true\n" level=info timestamp=2018-08-02T11:31:01.289008Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:31:01.289118Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:31:01.289637Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmirrxhn kind= uid=4cc3c1e8-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:31:01.292101Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmirrxhn kind= uid=4cc3c1e8-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:31:01.292643Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirrxhn" level=info timestamp=2018-08-02T11:32:03.921995Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif8pw8, existing: false\n" level=info timestamp=2018-08-02T11:32:03.922695Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:32:03.923062Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif8pw8 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:32:03.923584Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif8pw8 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:32:03.953625Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmidl5jt, existing: false\n" level=info timestamp=2018-08-02T11:32:03.953773Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:32:03.953928Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmidl5jt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:32:03.954300Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmidl5jt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:31:00.862120Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:31:00.862222Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:31:00.863878Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmidphwg kind= uid=4cc7efea-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:31:00.871936Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmidphwg kind= uid=4cc7efea-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:31:00.878699Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmidphwg" level=info timestamp=2018-08-02T11:31:00.901680Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmidh4mx, existing: true\n" level=info timestamp=2018-08-02T11:31:00.902017Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:31:00.902099Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:31:00.902357Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmidh4mx kind= uid=4ccf5591-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:31:00.906552Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmidh4mx kind= uid=4ccf5591-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:31:00.907041Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmidh4mx" level=info timestamp=2018-08-02T11:32:03.879887Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmig8sdp, existing: false\n" level=info timestamp=2018-08-02T11:32:03.880582Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:32:03.880929Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmig8sdp kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:32:03.881488Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmig8sdp kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmidh4mx-bjwwc Pod phase: Running level=info timestamp=2018-08-02T11:29:26.562322Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:29:26.562465Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:29:26.564030Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:29:36.574004Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:29:36.681661Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmidh4mx" level=info timestamp=2018-08-02T11:29:36.689126Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:29:36.690224Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmidphwg-l8sf8 Pod phase: Running level=info timestamp=2018-08-02T11:29:26.055475Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:29:26.055745Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:29:26.057336Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:29:36.099099Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:29:36.124999Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmidphwg" level=info timestamp=2018-08-02T11:29:36.126682Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:29:36.127464Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmik8j4p-7n6qk Pod phase: Running level=info timestamp=2018-08-02T11:29:27.776077Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:29:27.776286Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:29:27.778359Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:29:37.786213Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:29:37.896455Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmik8j4p" level=info timestamp=2018-08-02T11:29:37.898503Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:29:37.898946Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmirrxhn-4l5d9 Pod phase: Running level=info timestamp=2018-08-02T11:29:27.684605Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:29:27.684908Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:29:27.686928Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:29:37.698656Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:29:37.777242Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmirrxhn" level=info timestamp=2018-08-02T11:29:37.779101Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:29:37.779723Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [181.845 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 should be reachable via the propagated IP from a Pod [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 on a different node from Node /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received: testvmirrxhn,4cc3c1e8-9647-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:29:22.631076Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmirrxhn kind=VirtualMachineInstance uid=4cc3c1e8-9647-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmirrxhn-4l5d9" level=info timestamp=2018-08-02T11:29:38.888386Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmirrxhn kind=VirtualMachineInstance uid=4cc3c1e8-9647-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmirrxhn-4l5d9" level=error timestamp=2018-08-02T11:29:38.944992Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmirrxhn kind=VirtualMachineInstance uid=4cc3c1e8-9647-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:31:52.419694Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:31:53.976262Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:32:01 http: TLS handshake error from 10.129.0.1:36612: EOF level=info timestamp=2018-08-02T11:32:04.246216Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:32:11 http: TLS handshake error from 10.129.0.1:36624: EOF level=info timestamp=2018-08-02T11:32:13.234606Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:32:13.293549Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:32:14.433073Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:32:21 http: TLS handshake error from 10.129.0.1:36636: EOF level=info timestamp=2018-08-02T11:32:22.691100Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:32:24.676585Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:32:31 http: TLS handshake error from 10.129.0.1:36648: EOF level=info timestamp=2018-08-02T11:32:34.793546Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:32:41.200816Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:32:41 http: TLS handshake error from 10.129.0.1:36660: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:30:46 http: TLS handshake error from 10.129.0.1:46246: EOF 2018/08/02 11:30:56 http: TLS handshake error from 10.129.0.1:46258: EOF 2018/08/02 11:31:06 http: TLS handshake error from 10.129.0.1:46270: EOF level=info timestamp=2018-08-02T11:31:11.263599Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:31:16 http: TLS handshake error from 10.129.0.1:46282: EOF 2018/08/02 11:31:26 http: TLS handshake error from 10.129.0.1:46294: EOF 2018/08/02 11:31:36 http: TLS handshake error from 10.129.0.1:46306: EOF level=info timestamp=2018-08-02T11:31:41.091943Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:31:46 http: TLS handshake error from 10.129.0.1:46320: EOF 2018/08/02 11:31:56 http: TLS handshake error from 10.129.0.1:46332: EOF 2018/08/02 11:32:06 http: TLS handshake error from 10.129.0.1:46344: EOF level=info timestamp=2018-08-02T11:32:11.141620Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:32:16 http: TLS handshake error from 10.129.0.1:46356: EOF 2018/08/02 11:32:26 http: TLS handshake error from 10.129.0.1:46368: EOF 2018/08/02 11:32:36 http: TLS handshake error from 10.129.0.1:46380: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:29:22.124968Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidh4mx kind= uid=4ccf5591-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:29:22.617414Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirrxhn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirrxhn" level=info timestamp=2018-08-02T11:29:22.800335Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidphwg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidphwg" level=info timestamp=2018-08-02T11:32:23.881590Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig85cz kind= uid=b927b95b-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:32:23.882779Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig85cz kind= uid=b927b95b-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:32:23.909850Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmid4gtf kind= uid=b92b6d28-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:32:23.910388Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmid4gtf kind= uid=b92b6d28-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:32:23.968213Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimf5ms kind= uid=b93196db-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:32:23.968978Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimf5ms kind= uid=b93196db-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:32:23.999550Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix67q5 kind= uid=b93890fc-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:32:24.000112Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix67q5 kind= uid=b93890fc-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:32:24.210524Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid4gtf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid4gtf" level=info timestamp=2018-08-02T11:32:24.216949Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmig85cz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmig85cz" level=info timestamp=2018-08-02T11:32:24.632186Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimf5ms\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimf5ms" level=info timestamp=2018-08-02T11:32:24.904587Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix67q5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix67q5" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:32:41.568679Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmig85cz kind= uid=b927b95b-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:32:41.571370Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmig85cz kind= uid=b927b95b-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:32:41.571785Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmig85cz" level=info timestamp=2018-08-02T11:32:41.584067Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimf5ms, existing: true\n" level=info timestamp=2018-08-02T11:32:41.584332Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:32:41.584640Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:32:41.584956Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmimf5ms kind= uid=b93196db-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:32:41.586767Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmimf5ms kind= uid=b93196db-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:32:41.587245Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmimf5ms" level=info timestamp=2018-08-02T11:32:41.732479Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmig85cz, existing: true\n" level=info timestamp=2018-08-02T11:32:41.732645Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:32:41.732728Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:32:41.733047Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmig85cz kind= uid=b927b95b-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:32:41.734703Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmig85cz kind= uid=b927b95b-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:32:41.735089Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmig85cz" level=info timestamp=2018-08-02T11:32:41.748700Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimf5ms, existing: true\n" level=info timestamp=2018-08-02T11:32:41.748946Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:32:41.749019Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:32:41.749368Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmimf5ms kind= uid=b93196db-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:32:41.751292Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmimf5ms kind= uid=b93196db-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:32:41.751963Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmimf5ms" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:32:22.375875Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmidphwg kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:32:22.376018Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmidphwg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:32:22.799158Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmidphwg, existing: false\n" level=info timestamp=2018-08-02T11:32:22.800207Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:32:22.800655Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmidphwg kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:32:22.802137Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmidphwg kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:32:22.827437Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmidh4mx, existing: false\n" level=info timestamp=2018-08-02T11:32:22.828133Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:32:22.828689Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmidh4mx kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:32:22.829123Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmidh4mx kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:32:40.764943Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmix67q5, existing: true\n" level=info timestamp=2018-08-02T11:32:40.765259Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:32:40.765299Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:32:40.765495Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmix67q5 kind= uid=b93890fc-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:32:40.774439Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmix67q5 kind= uid=b93890fc-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." Pod name: virt-launcher-testvmid4gtf-tkrhv Pod phase: Running level=info timestamp=2018-08-02T11:32:28.755032Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:32:28.755261Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:32:28.756783Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:32:40.026174Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:32:40.056813Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmid4gtf" level=info timestamp=2018-08-02T11:32:40.058319Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:32:40.059033Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmig85cz-l7ddj Pod phase: Running level=info timestamp=2018-08-02T11:32:29.682371Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:32:29.682533Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:32:29.684483Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:32:39.693518Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:32:39.792718Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmig85cz" level=info timestamp=2018-08-02T11:32:39.795806Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:32:39.796364Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmimf5ms-p9vwb Pod phase: Running level=info timestamp=2018-08-02T11:32:29.526935Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:32:29.527818Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:32:29.529507Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:32:39.540360Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:32:39.601501Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmimf5ms" level=info timestamp=2018-08-02T11:32:39.603269Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:32:39.603700Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmix67q5-7gjsw Pod phase: Running level=info timestamp=2018-08-02T11:32:28.903047Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:32:28.903221Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:32:28.904562Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:32:39.791081Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:32:39.853240Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmix67q5" level=info timestamp=2018-08-02T11:32:39.855462Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:32:39.856061Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 11:34:41 http: TLS handshake error from 10.129.0.1:36806: EOF level=info timestamp=2018-08-02T11:34:44.749851Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:34:44.789605Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:34:47.758321Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:34:51 http: TLS handshake error from 10.129.0.1:36818: EOF level=info timestamp=2018-08-02T11:34:53.899110Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:34:57.939415Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:35:01 http: TLS handshake error from 10.129.0.1:36830: EOF level=info timestamp=2018-08-02T11:35:08.059578Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:35:11 http: TLS handshake error from 10.129.0.1:36842: EOF level=info timestamp=2018-08-02T11:35:15.668417Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:35:15.730339Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:35:18.244096Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:35:21 http: TLS handshake error from 10.129.0.1:36854: EOF level=info timestamp=2018-08-02T11:35:24.504378Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:33:16 http: TLS handshake error from 10.129.0.1:46428: EOF 2018/08/02 11:33:26 http: TLS handshake error from 10.129.0.1:46440: EOF 2018/08/02 11:33:36 http: TLS handshake error from 10.129.0.1:46452: EOF level=info timestamp=2018-08-02T11:33:41.276059Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:33:46 http: TLS handshake error from 10.129.0.1:46464: EOF 2018/08/02 11:33:56 http: TLS handshake error from 10.129.0.1:46476: EOF 2018/08/02 11:34:06 http: TLS handshake error from 10.129.0.1:46488: EOF 2018/08/02 11:34:16 http: TLS handshake error from 10.129.0.1:46500: EOF 2018/08/02 11:34:26 http: TLS handshake error from 10.129.0.1:46512: EOF 2018/08/02 11:34:36 http: TLS handshake error from 10.129.0.1:46524: EOF 2018/08/02 11:34:46 http: TLS handshake error from 10.129.0.1:46538: EOF 2018/08/02 11:34:56 http: TLS handshake error from 10.129.0.1:46550: EOF 2018/08/02 11:35:06 http: TLS handshake error from 10.129.0.1:46562: EOF level=info timestamp=2018-08-02T11:35:11.374483Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:35:16 http: TLS handshake error from 10.129.0.1:46574: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:29:22.124968Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmidh4mx kind= uid=4ccf5591-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:29:22.617414Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirrxhn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirrxhn" level=info timestamp=2018-08-02T11:29:22.800335Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmidphwg\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmidphwg" level=info timestamp=2018-08-02T11:32:23.881590Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig85cz kind= uid=b927b95b-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:32:23.882779Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig85cz kind= uid=b927b95b-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:32:23.909850Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmid4gtf kind= uid=b92b6d28-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:32:23.910388Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmid4gtf kind= uid=b92b6d28-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:32:23.968213Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimf5ms kind= uid=b93196db-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:32:23.968978Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimf5ms kind= uid=b93196db-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:32:23.999550Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix67q5 kind= uid=b93890fc-9647-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:32:24.000112Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix67q5 kind= uid=b93890fc-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:32:24.210524Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid4gtf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid4gtf" level=info timestamp=2018-08-02T11:32:24.216949Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmig85cz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmig85cz" level=info timestamp=2018-08-02T11:32:24.632186Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimf5ms\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimf5ms" level=info timestamp=2018-08-02T11:32:24.904587Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix67q5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix67q5" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:34:03.403204Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:34:03.403316Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:34:03.403792Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmig85cz kind= uid=b927b95b-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:34:03.410416Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmig85cz kind= uid=b927b95b-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:34:03.411422Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmig85cz" level=error timestamp=2018-08-02T11:34:03.412087Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmimf5ms kind= uid=b93196db-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:34:03.412700Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmimf5ms" level=info timestamp=2018-08-02T11:35:06.916073Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmik8j4p, existing: false\n" level=info timestamp=2018-08-02T11:35:06.916627Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:35:06.916972Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmik8j4p kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:35:06.919728Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmik8j4p kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:35:07.057637Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmirrxhn, existing: false\n" level=info timestamp=2018-08-02T11:35:07.057831Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:35:07.058001Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmirrxhn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:35:07.058504Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmirrxhn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:33:22.598163Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmid4gtf kind= uid=b92b6d28-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:33:22.601837Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmid4gtf kind= uid=b92b6d28-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:33:22.602325Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmid4gtf" level=info timestamp=2018-08-02T11:34:02.801590Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmix67q5, existing: true\n" level=info timestamp=2018-08-02T11:34:02.802642Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:34:02.802757Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:34:02.803486Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmix67q5 kind= uid=b93890fc-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:34:02.815199Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmix67q5 kind= uid=b93890fc-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1484/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:34:02.816060Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1484/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmix67q5" level=info timestamp=2018-08-02T11:34:03.562893Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmid4gtf, existing: true\n" level=info timestamp=2018-08-02T11:34:03.563151Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:34:03.563264Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:34:03.563503Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmid4gtf kind= uid=b92b6d28-9647-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:34:03.565417Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmid4gtf kind= uid=b92b6d28-9647-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1484/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:34:03.565770Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1484/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmid4gtf" Pod name: virt-launcher-testvmid4gtf-tkrhv Pod phase: Running level=info timestamp=2018-08-02T11:32:28.755032Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:32:28.755261Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:32:28.756783Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:32:40.026174Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:32:40.056813Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmid4gtf" level=info timestamp=2018-08-02T11:32:40.058319Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:32:40.059033Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmig85cz-l7ddj Pod phase: Running level=info timestamp=2018-08-02T11:32:29.682371Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:32:29.682533Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:32:29.684483Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:32:39.693518Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:32:39.792718Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmig85cz" level=info timestamp=2018-08-02T11:32:39.795806Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:32:39.796364Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmimf5ms-p9vwb Pod phase: Running level=info timestamp=2018-08-02T11:32:29.526935Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:32:29.527818Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:32:29.529507Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:32:39.540360Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:32:39.601501Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmimf5ms" level=info timestamp=2018-08-02T11:32:39.603269Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:32:39.603700Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmix67q5-7gjsw Pod phase: Running level=info timestamp=2018-08-02T11:32:28.903047Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:32:28.903221Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:32:28.904562Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:32:39.791081Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:32:39.853240Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmix67q5" level=info timestamp=2018-08-02T11:32:39.855462Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:32:39.856061Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [182.059 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a service matching the vmi exposed [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:272 should be able to reach the vmi based on labels specified on the vmi /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:292 Unexpected Warning event received: testvmig85cz,b927b95b-9647-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:32:23.985651Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmig85cz kind=VirtualMachineInstance uid=b927b95b-9647-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmig85cz-l7ddj" level=info timestamp=2018-08-02T11:32:40.774724Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmig85cz kind=VirtualMachineInstance uid=b927b95b-9647-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmig85cz-l7ddj" level=error timestamp=2018-08-02T11:32:40.982511Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmig85cz kind=VirtualMachineInstance uid=b927b95b-9647-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 11:34:51 http: TLS handshake error from 10.129.0.1:36818: EOF level=info timestamp=2018-08-02T11:34:53.899110Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:34:57.939415Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:35:01 http: TLS handshake error from 10.129.0.1:36830: EOF level=info timestamp=2018-08-02T11:35:08.059578Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:35:11 http: TLS handshake error from 10.129.0.1:36842: EOF level=info timestamp=2018-08-02T11:35:15.668417Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:35:15.730339Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:35:18.244096Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:35:21 http: TLS handshake error from 10.129.0.1:36854: EOF level=info timestamp=2018-08-02T11:35:24.504378Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:35:28.676999Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:35:31 http: TLS handshake error from 10.129.0.1:36866: EOF level=info timestamp=2018-08-02T11:35:41.049246Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:35:41 http: TLS handshake error from 10.129.0.1:36878: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running level=info timestamp=2018-08-02T11:33:41.276059Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:33:46 http: TLS handshake error from 10.129.0.1:46464: EOF 2018/08/02 11:33:56 http: TLS handshake error from 10.129.0.1:46476: EOF 2018/08/02 11:34:06 http: TLS handshake error from 10.129.0.1:46488: EOF 2018/08/02 11:34:16 http: TLS handshake error from 10.129.0.1:46500: EOF 2018/08/02 11:34:26 http: TLS handshake error from 10.129.0.1:46512: EOF 2018/08/02 11:34:36 http: TLS handshake error from 10.129.0.1:46524: EOF 2018/08/02 11:34:46 http: TLS handshake error from 10.129.0.1:46538: EOF 2018/08/02 11:34:56 http: TLS handshake error from 10.129.0.1:46550: EOF 2018/08/02 11:35:06 http: TLS handshake error from 10.129.0.1:46562: EOF level=info timestamp=2018-08-02T11:35:11.374483Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:35:16 http: TLS handshake error from 10.129.0.1:46574: EOF 2018/08/02 11:35:26 http: TLS handshake error from 10.129.0.1:46586: EOF 2018/08/02 11:35:36 http: TLS handshake error from 10.129.0.1:46598: EOF level=info timestamp=2018-08-02T11:35:41.308681Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:32:24.000112Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix67q5 kind= uid=b93890fc-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:32:24.210524Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid4gtf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid4gtf" level=info timestamp=2018-08-02T11:32:24.216949Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmig85cz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmig85cz" level=info timestamp=2018-08-02T11:32:24.632186Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimf5ms\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimf5ms" level=info timestamp=2018-08-02T11:32:24.904587Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix67q5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix67q5" level=info timestamp=2018-08-02T11:35:26.100819Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikflgb kind= uid=25b3160f-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:35:26.101700Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikflgb kind= uid=25b3160f-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:35:26.168914Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4tkmj kind= uid=25ced27f-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:35:26.169398Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4tkmj kind= uid=25ced27f-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:35:26.229365Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig6gnk kind= uid=25d45bd1-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:35:26.229596Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig6gnk kind= uid=25d45bd1-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:35:26.281852Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirgfdp kind= uid=25ddb6b6-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:35:26.282354Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirgfdp kind= uid=25ddb6b6-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:35:26.999400Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirgfdp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirgfdp" level=info timestamp=2018-08-02T11:35:27.132746Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirgfdp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirgfdp" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:35:44.120792Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmig6gnk kind= uid=25d45bd1-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:35:44.123337Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmig6gnk kind= uid=25d45bd1-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:35:44.123744Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmig6gnk" level=info timestamp=2018-08-02T11:35:44.194009Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmikflgb, existing: true\n" level=info timestamp=2018-08-02T11:35:44.194212Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:35:44.194295Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:35:44.194515Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmikflgb kind= uid=25b3160f-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:35:44.196701Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmikflgb kind= uid=25b3160f-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:35:44.197470Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmikflgb" level=info timestamp=2018-08-02T11:35:44.284313Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmig6gnk, existing: true\n" level=info timestamp=2018-08-02T11:35:44.284476Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:35:44.284586Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:35:44.284821Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmig6gnk kind= uid=25d45bd1-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:35:44.286616Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmig6gnk kind= uid=25d45bd1-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:35:44.286977Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmig6gnk" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:35:24.385849Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:35:24.386129Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmid4gtf kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:35:24.386394Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmid4gtf kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:35:24.472200Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmix67q5, existing: false\n" level=info timestamp=2018-08-02T11:35:24.472319Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:35:24.472415Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmix67q5 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:35:24.472514Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmix67q5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:35:24.737459Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmix67q5, existing: false\n" level=info timestamp=2018-08-02T11:35:24.738055Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:35:24.738547Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmix67q5 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:35:24.739133Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmix67q5 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:35:25.490880Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmid4gtf, existing: false\n" level=info timestamp=2018-08-02T11:35:25.497049Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:35:25.497379Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmid4gtf kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:35:25.497604Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmid4gtf kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi4tkmj-stfz8 Pod phase: Running level=info timestamp=2018-08-02T11:35:31.845781Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:35:31.846094Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:35:31.847846Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:35:41.923830Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:35:41.971769Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi4tkmj" level=info timestamp=2018-08-02T11:35:41.972661Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:35:41.972870Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmig6gnk-rb4qt Pod phase: Running level=info timestamp=2018-08-02T11:35:31.926726Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:35:31.926886Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:35:31.928573Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:35:42.683162Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:35:42.721463Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmig6gnk" level=info timestamp=2018-08-02T11:35:42.722509Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:35:42.722697Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmikflgb-wrj7k Pod phase: Running level=info timestamp=2018-08-02T11:35:31.468647Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:35:31.468858Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:35:31.473900Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:35:41.484684Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:35:41.609794Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmikflgb" level=info timestamp=2018-08-02T11:35:41.614237Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:35:41.614718Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmirgfdp-wpqvp Pod phase: Running level=info timestamp=2018-08-02T11:35:32.667030Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:35:32.667196Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:35:32.668877Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:35:42.673805Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:35:42.742443Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmirgfdp" level=info timestamp=2018-08-02T11:35:42.744217Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:35:42.744688Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:37:44.158640Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:37:47.300491Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:37:47.338524Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:37:51 http: TLS handshake error from 10.129.0.1:37036: EOF level=info timestamp=2018-08-02T11:37:54.374227Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:37:56.174795Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:38:01 http: TLS handshake error from 10.129.0.1:37048: EOF level=info timestamp=2018-08-02T11:38:04.756527Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:38:11 http: TLS handshake error from 10.129.0.1:37060: EOF level=info timestamp=2018-08-02T11:38:14.850806Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:38:17.528477Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:38:17.600572Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:38:21 http: TLS handshake error from 10.129.0.1:37072: EOF level=info timestamp=2018-08-02T11:38:24.947215Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:38:26.456767Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:36:36 http: TLS handshake error from 10.129.0.1:46670: EOF level=info timestamp=2018-08-02T11:36:41.135643Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:36:46 http: TLS handshake error from 10.129.0.1:46682: EOF 2018/08/02 11:36:56 http: TLS handshake error from 10.129.0.1:46694: EOF 2018/08/02 11:37:06 http: TLS handshake error from 10.129.0.1:46706: EOF level=info timestamp=2018-08-02T11:37:11.217424Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:37:16 http: TLS handshake error from 10.129.0.1:46718: EOF 2018/08/02 11:37:26 http: TLS handshake error from 10.129.0.1:46730: EOF 2018/08/02 11:37:36 http: TLS handshake error from 10.129.0.1:46742: EOF level=info timestamp=2018-08-02T11:37:41.253609Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:37:46 http: TLS handshake error from 10.129.0.1:46756: EOF 2018/08/02 11:37:56 http: TLS handshake error from 10.129.0.1:46768: EOF 2018/08/02 11:38:06 http: TLS handshake error from 10.129.0.1:46780: EOF level=info timestamp=2018-08-02T11:38:11.160256Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:38:16 http: TLS handshake error from 10.129.0.1:46792: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:32:24.000112Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix67q5 kind= uid=b93890fc-9647-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:32:24.210524Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmid4gtf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmid4gtf" level=info timestamp=2018-08-02T11:32:24.216949Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmig85cz\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmig85cz" level=info timestamp=2018-08-02T11:32:24.632186Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimf5ms\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimf5ms" level=info timestamp=2018-08-02T11:32:24.904587Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix67q5\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix67q5" level=info timestamp=2018-08-02T11:35:26.100819Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikflgb kind= uid=25b3160f-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:35:26.101700Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikflgb kind= uid=25b3160f-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:35:26.168914Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4tkmj kind= uid=25ced27f-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:35:26.169398Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi4tkmj kind= uid=25ced27f-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:35:26.229365Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig6gnk kind= uid=25d45bd1-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:35:26.229596Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmig6gnk kind= uid=25d45bd1-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:35:26.281852Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirgfdp kind= uid=25ddb6b6-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:35:26.282354Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirgfdp kind= uid=25ddb6b6-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:35:26.999400Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirgfdp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirgfdp" level=info timestamp=2018-08-02T11:35:27.132746Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirgfdp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirgfdp" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:36:24.963787Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmig6gnk kind= uid=25d45bd1-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:36:24.971681Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmig6gnk kind= uid=25d45bd1-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:36:24.972556Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmig6gnk" level=info timestamp=2018-08-02T11:37:05.847973Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmikflgb, existing: true\n" level=info timestamp=2018-08-02T11:37:05.850088Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:37:05.850304Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:37:05.851051Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmikflgb kind= uid=25b3160f-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:37:05.859484Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmikflgb kind= uid=25b3160f-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:37:05.864730Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmikflgb" level=info timestamp=2018-08-02T11:37:05.933639Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmig6gnk, existing: true\n" level=info timestamp=2018-08-02T11:37:05.933940Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:37:05.934039Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:37:05.934398Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmig6gnk kind= uid=25d45bd1-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:37:05.937019Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmig6gnk kind= uid=25d45bd1-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:37:05.937493Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmig6gnk" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:36:25.352417Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmirgfdp kind= uid=25ddb6b6-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:36:25.353487Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmirgfdp kind= uid=25ddb6b6-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:36:25.353644Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirgfdp" level=info timestamp=2018-08-02T11:37:05.735365Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi4tkmj, existing: true\n" level=info timestamp=2018-08-02T11:37:05.735891Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:37:05.735946Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:37:05.736422Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi4tkmj kind= uid=25ced27f-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:37:05.740354Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi4tkmj kind= uid=25ced27f-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:37:05.742151Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi4tkmj" level=info timestamp=2018-08-02T11:37:06.314158Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmirgfdp, existing: true\n" level=info timestamp=2018-08-02T11:37:06.314341Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:37:06.314397Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:37:06.314648Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmirgfdp kind= uid=25ddb6b6-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:37:06.316388Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmirgfdp kind= uid=25ddb6b6-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:37:06.316722Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmirgfdp" Pod name: virt-launcher-testvmi4tkmj-stfz8 Pod phase: Running level=info timestamp=2018-08-02T11:35:31.845781Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:35:31.846094Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:35:31.847846Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:35:41.923830Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:35:41.971769Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi4tkmj" level=info timestamp=2018-08-02T11:35:41.972661Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:35:41.972870Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmig6gnk-rb4qt Pod phase: Running level=info timestamp=2018-08-02T11:35:31.926726Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:35:31.926886Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:35:31.928573Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:35:42.683162Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:35:42.721463Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmig6gnk" level=info timestamp=2018-08-02T11:35:42.722509Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:35:42.722697Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmikflgb-wrj7k Pod phase: Running level=info timestamp=2018-08-02T11:35:31.468647Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:35:31.468858Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:35:31.473900Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:35:41.484684Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:35:41.609794Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmikflgb" level=info timestamp=2018-08-02T11:35:41.614237Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:35:41.614718Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmirgfdp-wpqvp Pod phase: Running level=info timestamp=2018-08-02T11:35:32.667030Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:35:32.667196Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:35:32.668877Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:35:42.673805Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:35:42.742443Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmirgfdp" level=info timestamp=2018-08-02T11:35:42.744217Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:35:42.744688Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [182.212 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a service matching the vmi exposed [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:272 should fail to reach the vmi if an invalid servicename is used /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:303 Unexpected Warning event received: testvmikflgb,25b3160f-9648-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:35:26.383283Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmikflgb kind=VirtualMachineInstance uid=25b3160f-9648-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmikflgb-wrj7k" level=info timestamp=2018-08-02T11:35:43.207852Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmikflgb kind=VirtualMachineInstance uid=25b3160f-9648-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmikflgb-wrj7k" level=error timestamp=2018-08-02T11:35:43.449877Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmikflgb kind=VirtualMachineInstance uid=25b3160f-9648-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 11:37:51 http: TLS handshake error from 10.129.0.1:37036: EOF level=info timestamp=2018-08-02T11:37:54.374227Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:37:56.174795Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:38:01 http: TLS handshake error from 10.129.0.1:37048: EOF level=info timestamp=2018-08-02T11:38:04.756527Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:38:11 http: TLS handshake error from 10.129.0.1:37060: EOF level=info timestamp=2018-08-02T11:38:14.850806Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:38:17.528477Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:38:17.600572Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:38:21 http: TLS handshake error from 10.129.0.1:37072: EOF level=info timestamp=2018-08-02T11:38:24.947215Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:38:26.456767Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:38:31 http: TLS handshake error from 10.129.0.1:37084: EOF level=info timestamp=2018-08-02T11:38:37.736639Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:38:41 http: TLS handshake error from 10.129.0.1:37096: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:36:56 http: TLS handshake error from 10.129.0.1:46694: EOF 2018/08/02 11:37:06 http: TLS handshake error from 10.129.0.1:46706: EOF level=info timestamp=2018-08-02T11:37:11.217424Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:37:16 http: TLS handshake error from 10.129.0.1:46718: EOF 2018/08/02 11:37:26 http: TLS handshake error from 10.129.0.1:46730: EOF 2018/08/02 11:37:36 http: TLS handshake error from 10.129.0.1:46742: EOF level=info timestamp=2018-08-02T11:37:41.253609Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:37:46 http: TLS handshake error from 10.129.0.1:46756: EOF 2018/08/02 11:37:56 http: TLS handshake error from 10.129.0.1:46768: EOF 2018/08/02 11:38:06 http: TLS handshake error from 10.129.0.1:46780: EOF level=info timestamp=2018-08-02T11:38:11.160256Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:38:16 http: TLS handshake error from 10.129.0.1:46792: EOF 2018/08/02 11:38:26 http: TLS handshake error from 10.129.0.1:46804: EOF 2018/08/02 11:38:36 http: TLS handshake error from 10.129.0.1:46816: EOF level=info timestamp=2018-08-02T11:38:43.671015Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:35:26.282354Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirgfdp kind= uid=25ddb6b6-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:35:26.999400Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirgfdp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirgfdp" level=info timestamp=2018-08-02T11:35:27.132746Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirgfdp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirgfdp" level=info timestamp=2018-08-02T11:38:28.179263Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibmn9n kind= uid=924b6674-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:38:28.180117Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibmn9n kind= uid=924b6674-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:38:28.218305Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikzt7h kind= uid=925039f7-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:38:28.218684Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikzt7h kind= uid=925039f7-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:38:28.292108Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6sjp8 kind= uid=9255e9c2-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:38:28.292562Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6sjp8 kind= uid=9255e9c2-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:38:28.396301Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6vkdd kind= uid=925fed02-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:38:28.396610Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6vkdd kind= uid=925fed02-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:38:28.477444Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibmn9n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibmn9n" level=info timestamp=2018-08-02T11:38:28.525792Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikzt7h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikzt7h" level=info timestamp=2018-08-02T11:38:29.126992Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6vkdd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6vkdd" level=info timestamp=2018-08-02T11:38:29.389458Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6vkdd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6vkdd" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:38:45.378861Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6sjp8 kind= uid=9255e9c2-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:38:45.380533Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6sjp8 kind= uid=9255e9c2-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:38:45.380898Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6sjp8" level=info timestamp=2018-08-02T11:38:45.524255Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmibmn9n, existing: true\n" level=info timestamp=2018-08-02T11:38:45.524463Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:38:45.524537Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:38:45.524774Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmibmn9n kind= uid=924b6674-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:38:45.526789Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmibmn9n kind= uid=924b6674-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:38:45.527300Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmibmn9n" level=info timestamp=2018-08-02T11:38:45.541485Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6sjp8, existing: true\n" level=info timestamp=2018-08-02T11:38:45.541649Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:38:45.541768Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:38:45.541996Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6sjp8 kind= uid=9255e9c2-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:38:45.544719Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6sjp8 kind= uid=9255e9c2-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:38:45.545214Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6sjp8" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:38:44.090157Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmikzt7h kind= uid=925039f7-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:38:44.095147Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmikzt7h kind= uid=925039f7-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:38:44.095745Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmikzt7h" level=info timestamp=2018-08-02T11:38:44.176663Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmikzt7h, existing: true\n" level=info timestamp=2018-08-02T11:38:44.177551Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:38:44.178033Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:38:44.178759Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmikzt7h kind= uid=925039f7-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:38:44.186867Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmikzt7h kind= uid=925039f7-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:38:44.187102Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmikzt7h" level=info timestamp=2018-08-02T11:38:44.368321Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmikzt7h, existing: true\n" level=info timestamp=2018-08-02T11:38:44.370232Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:38:44.370419Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:38:44.370846Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmikzt7h kind= uid=925039f7-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:38:44.387794Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmikzt7h kind= uid=925039f7-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:38:44.388360Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmikzt7h" Pod name: virt-launcher-testvmi6sjp8-zlkrz Pod phase: Running level=info timestamp=2018-08-02T11:38:34.580655Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:38:34.580871Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:38:34.583259Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:38:44.590771Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:38:44.693680Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi6sjp8" level=info timestamp=2018-08-02T11:38:44.695442Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:38:44.696110Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmi6vkdd-nk5v8 Pod phase: Running level=info timestamp=2018-08-02T11:38:33.723851Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:38:33.724015Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:38:33.728906Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:38:43.759633Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:38:43.778348Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi6vkdd" level=info timestamp=2018-08-02T11:38:43.779126Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:38:43.779320Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmibmn9n-cv58r Pod phase: Running level=info timestamp=2018-08-02T11:38:33.478307Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:38:33.478828Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:38:33.484335Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:38:43.503711Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:38:43.646495Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmibmn9n" level=info timestamp=2018-08-02T11:38:43.649018Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:38:43.649772Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmikzt7h-tjt95 Pod phase: Running level=info timestamp=2018-08-02T11:38:33.025554Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:38:33.025828Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:38:33.028016Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:38:43.666224Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:38:43.690282Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmikzt7h" level=info timestamp=2018-08-02T11:38:43.691259Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:38:43.691503Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 11:40:41 http: TLS handshake error from 10.129.0.1:37242: EOF level=info timestamp=2018-08-02T11:40:49.950201Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:40:50.011031Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:40:50.158626Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:40:51 http: TLS handshake error from 10.129.0.1:37254: EOF level=info timestamp=2018-08-02T11:40:57.971738Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:41:00.604050Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:41:01 http: TLS handshake error from 10.129.0.1:37266: EOF level=info timestamp=2018-08-02T11:41:10.785879Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:41:11 http: TLS handshake error from 10.129.0.1:37278: EOF level=info timestamp=2018-08-02T11:41:20.063037Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:41:20.145953Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:41:21.167636Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:41:21 http: TLS handshake error from 10.129.0.1:37290: EOF level=info timestamp=2018-08-02T11:41:28.250388Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:39:26 http: TLS handshake error from 10.129.0.1:46876: EOF 2018/08/02 11:39:36 http: TLS handshake error from 10.129.0.1:46888: EOF 2018/08/02 11:39:46 http: TLS handshake error from 10.129.0.1:46900: EOF 2018/08/02 11:39:56 http: TLS handshake error from 10.129.0.1:46912: EOF 2018/08/02 11:40:06 http: TLS handshake error from 10.129.0.1:46924: EOF level=info timestamp=2018-08-02T11:40:11.371344Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:40:16 http: TLS handshake error from 10.129.0.1:46936: EOF 2018/08/02 11:40:26 http: TLS handshake error from 10.129.0.1:46948: EOF 2018/08/02 11:40:36 http: TLS handshake error from 10.129.0.1:46960: EOF 2018/08/02 11:40:46 http: TLS handshake error from 10.129.0.1:46974: EOF 2018/08/02 11:40:56 http: TLS handshake error from 10.129.0.1:46986: EOF 2018/08/02 11:41:06 http: TLS handshake error from 10.129.0.1:46998: EOF level=info timestamp=2018-08-02T11:41:11.341920Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:41:16 http: TLS handshake error from 10.129.0.1:47010: EOF 2018/08/02 11:41:26 http: TLS handshake error from 10.129.0.1:47022: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:35:26.282354Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmirgfdp kind= uid=25ddb6b6-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:35:26.999400Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirgfdp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirgfdp" level=info timestamp=2018-08-02T11:35:27.132746Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmirgfdp\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmirgfdp" level=info timestamp=2018-08-02T11:38:28.179263Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibmn9n kind= uid=924b6674-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:38:28.180117Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmibmn9n kind= uid=924b6674-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:38:28.218305Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikzt7h kind= uid=925039f7-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:38:28.218684Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmikzt7h kind= uid=925039f7-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:38:28.292108Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6sjp8 kind= uid=9255e9c2-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:38:28.292562Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6sjp8 kind= uid=9255e9c2-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:38:28.396301Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6vkdd kind= uid=925fed02-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:38:28.396610Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6vkdd kind= uid=925fed02-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:38:28.477444Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibmn9n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibmn9n" level=info timestamp=2018-08-02T11:38:28.525792Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikzt7h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikzt7h" level=info timestamp=2018-08-02T11:38:29.126992Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6vkdd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6vkdd" level=info timestamp=2018-08-02T11:38:29.389458Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6vkdd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6vkdd" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:39:26.239428Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmibmn9n kind= uid=924b6674-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:39:26.243024Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmibmn9n kind= uid=924b6674-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:39:26.243797Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmibmn9n" level=info timestamp=2018-08-02T11:40:07.198847Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6sjp8, existing: true\n" level=info timestamp=2018-08-02T11:40:07.199880Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:40:07.199980Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:40:07.200801Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6sjp8 kind= uid=9255e9c2-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-02T11:40:07.205360Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmibmn9n, existing: true\n" level=info timestamp=2018-08-02T11:40:07.205509Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:40:07.205684Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:40:07.205900Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmibmn9n kind= uid=924b6674-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:40:07.208748Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmibmn9n kind= uid=924b6674-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=error timestamp=2018-08-02T11:40:07.208886Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6sjp8 kind= uid=9255e9c2-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:40:07.209485Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmibmn9n" level=info timestamp=2018-08-02T11:40:07.209637Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6sjp8" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:39:25.968695Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6vkdd kind= uid=925fed02-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:39:25.977930Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6vkdd kind= uid=925fed02-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:39:25.978447Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6vkdd" level=info timestamp=2018-08-02T11:40:06.062742Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmikzt7h, existing: true\n" level=info timestamp=2018-08-02T11:40:06.063705Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:40:06.063901Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:40:06.064514Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmikzt7h kind= uid=925039f7-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:40:06.071279Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmikzt7h kind= uid=925039f7-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1484/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:40:06.077757Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1484/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmikzt7h" level=info timestamp=2018-08-02T11:40:06.939397Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6vkdd, existing: true\n" level=info timestamp=2018-08-02T11:40:06.939860Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:40:06.940062Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:40:06.940475Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6vkdd kind= uid=925fed02-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:40:06.944509Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6vkdd kind= uid=925fed02-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:40:06.945108Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6vkdd" Pod name: virt-launcher-testvmi6sjp8-zlkrz Pod phase: Running level=info timestamp=2018-08-02T11:38:34.580655Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:38:34.580871Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:38:34.583259Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:38:44.590771Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:38:44.693680Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi6sjp8" level=info timestamp=2018-08-02T11:38:44.695442Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:38:44.696110Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmi6vkdd-nk5v8 Pod phase: Running level=info timestamp=2018-08-02T11:38:33.723851Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:38:33.724015Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:38:33.728906Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:38:43.759633Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:38:43.778348Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi6vkdd" level=info timestamp=2018-08-02T11:38:43.779126Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:38:43.779320Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmibmn9n-cv58r Pod phase: Running level=info timestamp=2018-08-02T11:38:33.478307Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:38:33.478828Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:38:33.484335Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:38:43.503711Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:38:43.646495Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmibmn9n" level=info timestamp=2018-08-02T11:38:43.649018Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:38:43.649772Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmikzt7h-tjt95 Pod phase: Running level=info timestamp=2018-08-02T11:38:33.025554Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:38:33.025828Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:38:33.028016Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:38:43.666224Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:38:43.690282Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmikzt7h" level=info timestamp=2018-08-02T11:38:43.691259Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:38:43.691503Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [182.039 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 with a subdomain and a headless service given [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:319 should be able to reach the vmi via its unique fully qualified domain name /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:342 Unexpected Warning event received: testvmibmn9n,924b6674-9648-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:38:28.397093Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmibmn9n kind=VirtualMachineInstance uid=924b6674-9648-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmibmn9n-cv58r" level=info timestamp=2018-08-02T11:38:44.615186Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmibmn9n kind=VirtualMachineInstance uid=924b6674-9648-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmibmn9n-cv58r" level=error timestamp=2018-08-02T11:38:44.697329Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmibmn9n kind=VirtualMachineInstance uid=924b6674-9648-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:41:00.604050Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:41:01 http: TLS handshake error from 10.129.0.1:37266: EOF level=info timestamp=2018-08-02T11:41:10.785879Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:41:11 http: TLS handshake error from 10.129.0.1:37278: EOF level=info timestamp=2018-08-02T11:41:20.063037Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:41:20.145953Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:41:21.167636Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:41:21 http: TLS handshake error from 10.129.0.1:37290: EOF level=info timestamp=2018-08-02T11:41:28.250388Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:41:31 http: TLS handshake error from 10.129.0.1:37302: EOF level=info timestamp=2018-08-02T11:41:31.531407Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:41:36.041586Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:41:36.159893Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/08/02 11:41:41 http: TLS handshake error from 10.129.0.1:37314: EOF level=info timestamp=2018-08-02T11:41:41.966561Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:39:56 http: TLS handshake error from 10.129.0.1:46912: EOF 2018/08/02 11:40:06 http: TLS handshake error from 10.129.0.1:46924: EOF level=info timestamp=2018-08-02T11:40:11.371344Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:40:16 http: TLS handshake error from 10.129.0.1:46936: EOF 2018/08/02 11:40:26 http: TLS handshake error from 10.129.0.1:46948: EOF 2018/08/02 11:40:36 http: TLS handshake error from 10.129.0.1:46960: EOF 2018/08/02 11:40:46 http: TLS handshake error from 10.129.0.1:46974: EOF 2018/08/02 11:40:56 http: TLS handshake error from 10.129.0.1:46986: EOF 2018/08/02 11:41:06 http: TLS handshake error from 10.129.0.1:46998: EOF level=info timestamp=2018-08-02T11:41:11.341920Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:41:16 http: TLS handshake error from 10.129.0.1:47010: EOF 2018/08/02 11:41:26 http: TLS handshake error from 10.129.0.1:47022: EOF 2018/08/02 11:41:36 http: TLS handshake error from 10.129.0.1:47034: EOF level=info timestamp=2018-08-02T11:41:43.832890Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:41:46 http: TLS handshake error from 10.129.0.1:47046: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:38:28.477444Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibmn9n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibmn9n" level=info timestamp=2018-08-02T11:38:28.525792Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikzt7h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikzt7h" level=info timestamp=2018-08-02T11:38:29.126992Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6vkdd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6vkdd" level=info timestamp=2018-08-02T11:38:29.389458Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6vkdd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6vkdd" level=info timestamp=2018-08-02T11:41:30.270900Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip22m7 kind= uid=fed34b36-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:41:30.271636Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip22m7 kind= uid=fed34b36-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:41:30.374099Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi979nm kind= uid=fedb1adc-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:41:30.374498Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi979nm kind= uid=fedb1adc-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:41:30.439880Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv87kn kind= uid=fee189ce-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:41:30.440418Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv87kn kind= uid=fee189ce-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:41:30.514832Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifvr8x kind= uid=fef2749e-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:41:30.515234Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifvr8x kind= uid=fef2749e-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:41:31.113782Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv87kn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv87kn" level=info timestamp=2018-08-02T11:41:31.381734Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifvr8x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifvr8x" level=info timestamp=2018-08-02T11:41:31.483922Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifvr8x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifvr8x" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:41:46.828874Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmip22m7 kind= uid=fed34b36-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:41:46.830430Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmip22m7 kind= uid=fed34b36-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:41:46.830775Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmip22m7" level=info timestamp=2018-08-02T11:41:46.902838Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiv87kn, existing: true\n" level=info timestamp=2018-08-02T11:41:46.902983Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:41:46.903061Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:41:46.903409Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiv87kn kind= uid=fee189ce-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:41:46.906420Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiv87kn kind= uid=fee189ce-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:41:46.906802Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv87kn" level=info timestamp=2018-08-02T11:41:46.991506Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmip22m7, existing: true\n" level=info timestamp=2018-08-02T11:41:46.991654Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:41:46.991727Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:41:46.991942Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmip22m7 kind= uid=fed34b36-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:41:46.995255Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmip22m7 kind= uid=fed34b36-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:41:46.995616Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmip22m7" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:41:28.702204Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:41:28.702301Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmikzt7h kind= uid=925039f7-9648-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:41:28.702431Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmikzt7h kind= uid=925039f7-9648-11e8-bba8-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:41:28.793380Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6vkdd, existing: false\n" level=info timestamp=2018-08-02T11:41:28.793470Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:41:28.793558Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi6vkdd kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:41:28.793719Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi6vkdd kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:41:28.849537Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmikzt7h, existing: false\n" level=info timestamp=2018-08-02T11:41:28.849620Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:41:28.849708Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmikzt7h kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:41:28.849793Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmikzt7h kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:41:28.866127Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6vkdd, existing: false\n" level=info timestamp=2018-08-02T11:41:28.866221Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:41:28.866304Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi6vkdd kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:41:28.866418Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi6vkdd kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi979nm-942qt Pod phase: Running level=info timestamp=2018-08-02T11:41:35.714183Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:41:35.716832Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:41:35.728705Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:41:46.017724Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:41:46.059797Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi979nm" level=info timestamp=2018-08-02T11:41:46.061397Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:41:46.061880Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmifvr8x-c7gj4 Pod phase: Running level=info timestamp=2018-08-02T11:41:36.140035Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:41:36.140288Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:41:36.142356Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:41:46.147125Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" Pod name: virt-launcher-testvmip22m7-hg6lc Pod phase: Running level=info timestamp=2018-08-02T11:41:35.549193Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:41:35.549344Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:41:35.551474Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:41:45.562453Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:41:45.664656Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmip22m7" level=info timestamp=2018-08-02T11:41:45.666653Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:41:45.667226Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiv87kn-d8jpv Pod phase: Running level=info timestamp=2018-08-02T11:41:35.224781Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:41:35.225065Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:41:35.227053Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:41:45.236027Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:41:45.293555Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiv87kn" level=info timestamp=2018-08-02T11:41:45.295978Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:41:45.296939Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:43:45.574990Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:43:51 http: TLS handshake error from 10.129.0.1:37472: EOF level=info timestamp=2018-08-02T11:43:51.967677Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:43:51.992868Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:43:55.805802Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:43:59.219404Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:44:01 http: TLS handshake error from 10.129.0.1:37484: EOF level=info timestamp=2018-08-02T11:44:06.033537Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:44:11 http: TLS handshake error from 10.129.0.1:37496: EOF level=info timestamp=2018-08-02T11:44:16.160451Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:44:21 http: TLS handshake error from 10.129.0.1:37508: EOF level=info timestamp=2018-08-02T11:44:22.376728Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:44:22.494621Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:44:26.305481Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:44:29.548918Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:42:26 http: TLS handshake error from 10.129.0.1:47094: EOF 2018/08/02 11:42:36 http: TLS handshake error from 10.129.0.1:47106: EOF 2018/08/02 11:42:46 http: TLS handshake error from 10.129.0.1:47118: EOF 2018/08/02 11:42:56 http: TLS handshake error from 10.129.0.1:47130: EOF 2018/08/02 11:43:06 http: TLS handshake error from 10.129.0.1:47142: EOF level=info timestamp=2018-08-02T11:43:11.748067Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:43:16 http: TLS handshake error from 10.129.0.1:47154: EOF 2018/08/02 11:43:26 http: TLS handshake error from 10.129.0.1:47166: EOF 2018/08/02 11:43:36 http: TLS handshake error from 10.129.0.1:47178: EOF 2018/08/02 11:43:46 http: TLS handshake error from 10.129.0.1:47192: EOF 2018/08/02 11:43:56 http: TLS handshake error from 10.129.0.1:47204: EOF 2018/08/02 11:44:06 http: TLS handshake error from 10.129.0.1:47216: EOF level=info timestamp=2018-08-02T11:44:11.779996Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:44:16 http: TLS handshake error from 10.129.0.1:47228: EOF 2018/08/02 11:44:26 http: TLS handshake error from 10.129.0.1:47240: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:38:28.477444Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmibmn9n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmibmn9n" level=info timestamp=2018-08-02T11:38:28.525792Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmikzt7h\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmikzt7h" level=info timestamp=2018-08-02T11:38:29.126992Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6vkdd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6vkdd" level=info timestamp=2018-08-02T11:38:29.389458Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6vkdd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6vkdd" level=info timestamp=2018-08-02T11:41:30.270900Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip22m7 kind= uid=fed34b36-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:41:30.271636Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmip22m7 kind= uid=fed34b36-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:41:30.374099Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi979nm kind= uid=fedb1adc-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:41:30.374498Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi979nm kind= uid=fedb1adc-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:41:30.439880Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv87kn kind= uid=fee189ce-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:41:30.440418Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv87kn kind= uid=fee189ce-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:41:30.514832Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifvr8x kind= uid=fef2749e-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:41:30.515234Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifvr8x kind= uid=fef2749e-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:41:31.113782Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv87kn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv87kn" level=info timestamp=2018-08-02T11:41:31.381734Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifvr8x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifvr8x" level=info timestamp=2018-08-02T11:41:31.483922Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifvr8x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifvr8x" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:44:12.981455Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:44:12.981617Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmibmn9n kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:44:12.981787Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmibmn9n kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:44:30.569468Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmip22m7, existing: true\n" level=info timestamp=2018-08-02T11:44:30.570081Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:44:30.570214Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:44:30.570609Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmip22m7 kind= uid=fed34b36-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:44:30.576380Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmip22m7 kind= uid=fed34b36-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:44:30.577099Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmip22m7" level=info timestamp=2018-08-02T11:44:30.643657Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiv87kn, existing: true\n" level=info timestamp=2018-08-02T11:44:30.643840Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:44:30.643912Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:44:30.644366Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiv87kn kind= uid=fee189ce-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:44:30.647762Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiv87kn kind= uid=fee189ce-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:44:30.648342Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv87kn" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:43:09.443733Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:43:09.444228Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:43:09.445098Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmifvr8x kind= uid=fef2749e-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:43:09.450390Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmifvr8x kind= uid=fef2749e-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:43:09.451404Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1893/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmifvr8x" level=info timestamp=2018-08-02T11:43:09.461112Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi979nm, existing: true\n" level=info timestamp=2018-08-02T11:43:09.461213Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:43:09.461269Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:43:09.461395Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi979nm kind= uid=fedb1adc-9648-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:43:09.463146Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi979nm kind= uid=fedb1adc-9648-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:43:09.463335Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi979nm" level=info timestamp=2018-08-02T11:44:11.889828Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmikzt7h, existing: false\n" level=info timestamp=2018-08-02T11:44:11.890769Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:44:11.891245Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmikzt7h kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:44:11.891740Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmikzt7h kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi979nm-942qt Pod phase: Running level=info timestamp=2018-08-02T11:41:35.714183Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:41:35.716832Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:41:35.728705Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:41:46.017724Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:41:46.059797Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi979nm" level=info timestamp=2018-08-02T11:41:46.061397Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:41:46.061880Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmifvr8x-c7gj4 Pod phase: Running level=info timestamp=2018-08-02T11:41:36.140035Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:41:36.140288Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:41:36.142356Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:41:46.147125Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:41:46.215658Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmifvr8x" level=info timestamp=2018-08-02T11:41:46.217883Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:41:46.218555Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmip22m7-hg6lc Pod phase: Running level=info timestamp=2018-08-02T11:41:35.549193Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:41:35.549344Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:41:35.551474Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:41:45.562453Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:41:45.664656Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmip22m7" level=info timestamp=2018-08-02T11:41:45.666653Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:41:45.667226Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiv87kn-d8jpv Pod phase: Running level=info timestamp=2018-08-02T11:41:35.224781Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:41:35.225065Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:41:35.227053Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:41:45.236027Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:41:45.293555Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiv87kn" level=info timestamp=2018-08-02T11:41:45.295978Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:41:45.296939Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [182.053 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom interface model [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:368 should expose the right device type to the guest /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:369 Unexpected Warning event received: testvmip22m7,fed34b36-9648-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:41:30.696351Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmip22m7 kind=VirtualMachineInstance uid=fed34b36-9648-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmip22m7-hg6lc" level=info timestamp=2018-08-02T11:41:46.094163Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmip22m7 kind=VirtualMachineInstance uid=fed34b36-9648-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmip22m7-hg6lc" level=error timestamp=2018-08-02T11:41:46.215446Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmip22m7 kind=VirtualMachineInstance uid=fed34b36-9648-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:43:55.805802Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:43:59.219404Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:44:01 http: TLS handshake error from 10.129.0.1:37484: EOF level=info timestamp=2018-08-02T11:44:06.033537Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:44:11 http: TLS handshake error from 10.129.0.1:37496: EOF level=info timestamp=2018-08-02T11:44:16.160451Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:44:21 http: TLS handshake error from 10.129.0.1:37508: EOF level=info timestamp=2018-08-02T11:44:22.376728Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:44:22.494621Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:44:26.305481Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:44:29.548918Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:44:31 http: TLS handshake error from 10.129.0.1:37520: EOF level=info timestamp=2018-08-02T11:44:36.765442Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:44:41 http: TLS handshake error from 10.129.0.1:37532: EOF level=info timestamp=2018-08-02T11:44:48.617801Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:42:56 http: TLS handshake error from 10.129.0.1:47130: EOF 2018/08/02 11:43:06 http: TLS handshake error from 10.129.0.1:47142: EOF level=info timestamp=2018-08-02T11:43:11.748067Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:43:16 http: TLS handshake error from 10.129.0.1:47154: EOF 2018/08/02 11:43:26 http: TLS handshake error from 10.129.0.1:47166: EOF 2018/08/02 11:43:36 http: TLS handshake error from 10.129.0.1:47178: EOF 2018/08/02 11:43:46 http: TLS handshake error from 10.129.0.1:47192: EOF 2018/08/02 11:43:56 http: TLS handshake error from 10.129.0.1:47204: EOF 2018/08/02 11:44:06 http: TLS handshake error from 10.129.0.1:47216: EOF level=info timestamp=2018-08-02T11:44:11.779996Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:44:16 http: TLS handshake error from 10.129.0.1:47228: EOF 2018/08/02 11:44:26 http: TLS handshake error from 10.129.0.1:47240: EOF 2018/08/02 11:44:36 http: TLS handshake error from 10.129.0.1:47252: EOF level=info timestamp=2018-08-02T11:44:42.605490Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:44:46 http: TLS handshake error from 10.129.0.1:47264: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:41:30.514832Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifvr8x kind= uid=fef2749e-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:41:30.515234Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifvr8x kind= uid=fef2749e-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:41:31.113782Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv87kn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv87kn" level=info timestamp=2018-08-02T11:41:31.381734Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifvr8x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifvr8x" level=info timestamp=2018-08-02T11:41:31.483922Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifvr8x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifvr8x" level=info timestamp=2018-08-02T11:44:32.263841Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2wwlq kind= uid=6b4fe9e3-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:44:32.264203Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2wwlq kind= uid=6b4fe9e3-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:44:32.286867Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix77pt kind= uid=6b521fd2-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:44:32.287036Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix77pt kind= uid=6b521fd2-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:44:32.311375Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixk85v kind= uid=6b55b9f5-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:44:32.311616Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixk85v kind= uid=6b55b9f5-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:44:32.331821Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw699z kind= uid=6b597b02-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:44:32.331929Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw699z kind= uid=6b597b02-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:44:32.705421Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2wwlq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2wwlq" level=info timestamp=2018-08-02T11:44:32.872339Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiw699z\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiw699z" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:44:49.076097Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmixk85v kind= uid=6b55b9f5-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:44:49.078516Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmixk85v kind= uid=6b55b9f5-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:44:49.078920Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23754/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmixk85v" level=info timestamp=2018-08-02T11:44:49.159343Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmixk85v, existing: true\n" level=info timestamp=2018-08-02T11:44:49.159503Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:44:49.159575Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:44:49.159842Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmixk85v kind= uid=6b55b9f5-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:44:49.161541Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmixk85v kind= uid=6b55b9f5-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:44:49.161926Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmixk85v" level=info timestamp=2018-08-02T11:44:49.198985Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmix77pt, existing: true\n" level=info timestamp=2018-08-02T11:44:49.199192Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:44:49.199269Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:44:49.199485Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmix77pt kind= uid=6b521fd2-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:44:49.201856Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmix77pt kind= uid=6b521fd2-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:44:49.202290Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmix77pt" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:44:48.015254Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi2wwlq kind= uid=6b4fe9e3-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:44:48.016198Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi2wwlq kind= uid=6b4fe9e3-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:44:48.016551Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2wwlq" level=info timestamp=2018-08-02T11:44:48.057417Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi2wwlq, existing: true\n" level=info timestamp=2018-08-02T11:44:48.057515Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:44:48.057544Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:44:48.057648Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi2wwlq kind= uid=6b4fe9e3-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:44:48.058493Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi2wwlq kind= uid=6b4fe9e3-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:44:48.058646Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2wwlq" level=info timestamp=2018-08-02T11:44:48.140356Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi2wwlq, existing: true\n" level=info timestamp=2018-08-02T11:44:48.140535Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:44:48.140609Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:44:48.140918Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi2wwlq kind= uid=6b4fe9e3-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:44:48.142715Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi2wwlq kind= uid=6b4fe9e3-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:44:48.143151Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2wwlq" Pod name: virt-launcher-testvmi2wwlq-svh48 Pod phase: Running level=info timestamp=2018-08-02T11:44:36.912043Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:44:36.912305Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:44:36.914114Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:44:46.938012Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:44:47.069076Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi2wwlq" level=info timestamp=2018-08-02T11:44:47.074650Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:44:47.075752Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiv87kn-d8jpv Pod phase: Pending Pod name: virt-launcher-testvmiw699z-jc86w Pod phase: Running level=info timestamp=2018-08-02T11:44:37.234238Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:44:37.234435Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:44:37.236261Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:44:47.524134Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:44:47.582451Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiw699z" level=info timestamp=2018-08-02T11:44:47.583301Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:44:47.583513Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmix77pt-qzqj8 Pod phase: Running level=info timestamp=2018-08-02T11:44:36.945318Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:44:36.945564Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:44:36.947047Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:44:46.955395Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:44:47.063968Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmix77pt" level=info timestamp=2018-08-02T11:44:47.066016Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:44:47.066579Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmixk85v-799w2 Pod phase: Running level=info timestamp=2018-08-02T11:44:37.243080Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:44:37.243275Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:44:37.244921Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:44:47.251718Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:44:47.297561Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmixk85v" level=info timestamp=2018-08-02T11:44:47.299855Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:44:47.300332Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:46:54.202590Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:46:54.262797Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:47:01.059963Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:47:01.458982Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:47:01 http: TLS handshake error from 10.129.0.1:37702: EOF 2018/08/02 11:47:11 http: TLS handshake error from 10.129.0.1:37714: EOF level=info timestamp=2018-08-02T11:47:11.593742Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:47:11.731299Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:47:21 http: TLS handshake error from 10.129.0.1:37726: EOF level=info timestamp=2018-08-02T11:47:21.764744Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:47:24.644395Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:47:24.648992Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:47:31.410482Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:47:31 http: TLS handshake error from 10.129.0.1:37738: EOF level=info timestamp=2018-08-02T11:47:31.877942Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:45:16 http: TLS handshake error from 10.129.0.1:47300: EOF 2018/08/02 11:45:26 http: TLS handshake error from 10.129.0.1:47312: EOF 2018/08/02 11:45:36 http: TLS handshake error from 10.129.0.1:47324: EOF level=info timestamp=2018-08-02T11:45:41.819489Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:45:46 http: TLS handshake error from 10.129.0.1:47336: EOF 2018/08/02 11:45:56 http: TLS handshake error from 10.129.0.1:47348: EOF 2018/08/02 11:46:06 http: TLS handshake error from 10.129.0.1:47360: EOF 2018/08/02 11:46:16 http: TLS handshake error from 10.129.0.1:47372: EOF 2018/08/02 11:46:26 http: TLS handshake error from 10.129.0.1:47384: EOF 2018/08/02 11:46:36 http: TLS handshake error from 10.129.0.1:47396: EOF 2018/08/02 11:46:46 http: TLS handshake error from 10.129.0.1:47410: EOF 2018/08/02 11:46:56 http: TLS handshake error from 10.129.0.1:47422: EOF 2018/08/02 11:47:06 http: TLS handshake error from 10.129.0.1:47434: EOF 2018/08/02 11:47:16 http: TLS handshake error from 10.129.0.1:47446: EOF 2018/08/02 11:47:26 http: TLS handshake error from 10.129.0.1:47458: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:41:30.514832Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifvr8x kind= uid=fef2749e-9648-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:41:30.515234Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifvr8x kind= uid=fef2749e-9648-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:41:31.113782Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv87kn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv87kn" level=info timestamp=2018-08-02T11:41:31.381734Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifvr8x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifvr8x" level=info timestamp=2018-08-02T11:41:31.483922Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifvr8x\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifvr8x" level=info timestamp=2018-08-02T11:44:32.263841Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2wwlq kind= uid=6b4fe9e3-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:44:32.264203Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2wwlq kind= uid=6b4fe9e3-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:44:32.286867Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix77pt kind= uid=6b521fd2-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:44:32.287036Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix77pt kind= uid=6b521fd2-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:44:32.311375Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixk85v kind= uid=6b55b9f5-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:44:32.311616Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixk85v kind= uid=6b55b9f5-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:44:32.331821Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw699z kind= uid=6b597b02-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:44:32.331929Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw699z kind= uid=6b597b02-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:44:32.705421Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2wwlq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2wwlq" level=info timestamp=2018-08-02T11:44:32.872339Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiw699z\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiw699z" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:46:10.962877Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmixk85v" level=info timestamp=2018-08-02T11:47:14.418597Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmip22m7, existing: false\n" level=info timestamp=2018-08-02T11:47:14.419278Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:47:14.419567Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmip22m7 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:47:14.420015Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmip22m7 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:47:14.489311Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiv87kn, existing: false\n" level=info timestamp=2018-08-02T11:47:14.489439Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:47:14.489589Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmiv87kn kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:47:14.489779Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmiv87kn kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:47:32.784563Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmix77pt, existing: true\n" level=info timestamp=2018-08-02T11:47:32.785471Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:47:32.785633Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:47:32.786123Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmix77pt kind= uid=6b521fd2-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:47:32.800547Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmix77pt kind= uid=6b521fd2-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:47:32.801302Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmix77pt" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:46:09.973438Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi2wwlq kind= uid=6b4fe9e3-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:46:09.976051Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi2wwlq kind= uid=6b4fe9e3-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:46:09.976509Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2wwlq" level=info timestamp=2018-08-02T11:46:11.741877Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiw699z, existing: true\n" level=info timestamp=2018-08-02T11:46:11.742619Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:46:11.743462Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:46:11.744313Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiw699z kind= uid=6b597b02-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:46:11.748827Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiw699z kind= uid=6b597b02-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:46:11.750574Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiw699z" level=info timestamp=2018-08-02T11:47:31.898626Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi2wwlq, existing: true\n" level=info timestamp=2018-08-02T11:47:31.904930Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:47:31.905238Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:47:31.905815Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi2wwlq kind= uid=6b4fe9e3-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:47:31.922095Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi2wwlq kind= uid=6b4fe9e3-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:47:31.922832Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2wwlq" Pod name: virt-launcher-testvmi2wwlq-svh48 Pod phase: Running level=info timestamp=2018-08-02T11:44:36.912043Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:44:36.912305Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:44:36.914114Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:44:46.938012Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:44:47.069076Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi2wwlq" level=info timestamp=2018-08-02T11:44:47.074650Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:44:47.075752Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiw699z-jc86w Pod phase: Running level=info timestamp=2018-08-02T11:44:37.234238Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:44:37.234435Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:44:37.236261Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:44:47.524134Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:44:47.582451Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiw699z" level=info timestamp=2018-08-02T11:44:47.583301Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:44:47.583513Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmix77pt-qzqj8 Pod phase: Running level=info timestamp=2018-08-02T11:44:36.945318Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:44:36.945564Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:44:36.947047Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:44:46.955395Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:44:47.063968Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmix77pt" level=info timestamp=2018-08-02T11:44:47.066016Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:44:47.066579Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmixk85v-799w2 Pod phase: Running level=info timestamp=2018-08-02T11:44:37.243080Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:44:37.243275Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:44:37.244921Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:44:47.251718Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:44:47.297561Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmixk85v" level=info timestamp=2018-08-02T11:44:47.299855Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:44:47.300332Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [181.814 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with default interface model [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:382 should expose the right device type to the guest /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:383 Unexpected Warning event received: testvmi2wwlq,6b4fe9e3-9649-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:44:32.334695Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi2wwlq kind=VirtualMachineInstance uid=6b4fe9e3-9649-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi2wwlq-svh48" level=info timestamp=2018-08-02T11:44:48.566628Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi2wwlq kind=VirtualMachineInstance uid=6b4fe9e3-9649-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi2wwlq-svh48" level=error timestamp=2018-08-02T11:44:48.612658Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi2wwlq kind=VirtualMachineInstance uid=6b4fe9e3-9649-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:47:01.059963Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:47:01.458982Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:47:01 http: TLS handshake error from 10.129.0.1:37702: EOF 2018/08/02 11:47:11 http: TLS handshake error from 10.129.0.1:37714: EOF level=info timestamp=2018-08-02T11:47:11.593742Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:47:11.731299Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:47:21 http: TLS handshake error from 10.129.0.1:37726: EOF level=info timestamp=2018-08-02T11:47:21.764744Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:47:24.644395Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:47:24.648992Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:47:31.410482Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:47:31 http: TLS handshake error from 10.129.0.1:37738: EOF level=info timestamp=2018-08-02T11:47:31.877942Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:47:41 http: TLS handshake error from 10.129.0.1:37750: EOF level=info timestamp=2018-08-02T11:47:42.744853Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running level=info timestamp=2018-08-02T11:45:41.819489Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:45:46 http: TLS handshake error from 10.129.0.1:47336: EOF 2018/08/02 11:45:56 http: TLS handshake error from 10.129.0.1:47348: EOF 2018/08/02 11:46:06 http: TLS handshake error from 10.129.0.1:47360: EOF 2018/08/02 11:46:16 http: TLS handshake error from 10.129.0.1:47372: EOF 2018/08/02 11:46:26 http: TLS handshake error from 10.129.0.1:47384: EOF 2018/08/02 11:46:36 http: TLS handshake error from 10.129.0.1:47396: EOF 2018/08/02 11:46:46 http: TLS handshake error from 10.129.0.1:47410: EOF 2018/08/02 11:46:56 http: TLS handshake error from 10.129.0.1:47422: EOF 2018/08/02 11:47:06 http: TLS handshake error from 10.129.0.1:47434: EOF 2018/08/02 11:47:16 http: TLS handshake error from 10.129.0.1:47446: EOF 2018/08/02 11:47:26 http: TLS handshake error from 10.129.0.1:47458: EOF 2018/08/02 11:47:36 http: TLS handshake error from 10.129.0.1:47470: EOF level=info timestamp=2018-08-02T11:47:44.509979Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:47:46 http: TLS handshake error from 10.129.0.1:47482: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:44:32.311616Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixk85v kind= uid=6b55b9f5-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:44:32.331821Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw699z kind= uid=6b597b02-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:44:32.331929Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw699z kind= uid=6b597b02-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:44:32.705421Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2wwlq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2wwlq" level=info timestamp=2018-08-02T11:44:32.872339Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiw699z\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiw699z" level=info timestamp=2018-08-02T11:47:34.087093Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6pr9n kind= uid=d7af3a46-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:47:34.087686Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6pr9n kind= uid=d7af3a46-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:47:34.160061Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic2495 kind= uid=d7b37ffc-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:47:34.160447Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic2495 kind= uid=d7b37ffc-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:47:34.238118Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisfwgj kind= uid=d7c1d08d-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:47:34.238633Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisfwgj kind= uid=d7c1d08d-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:47:34.308236Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5vqgd kind= uid=d7cd8f64-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:47:34.308593Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5vqgd kind= uid=d7cd8f64-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:47:34.698949Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6pr9n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6pr9n" level=info timestamp=2018-08-02T11:47:35.075731Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5vqgd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5vqgd" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:47:49.974862Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6pr9n kind= uid=d7af3a46-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:47:49.977695Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6pr9n kind= uid=d7af3a46-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:47:49.978068Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6pr9n" level=info timestamp=2018-08-02T11:47:50.018576Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6pr9n, existing: true\n" level=info timestamp=2018-08-02T11:47:50.018724Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:47:50.018795Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:47:50.019085Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6pr9n kind= uid=d7af3a46-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:47:50.021078Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6pr9n kind= uid=d7af3a46-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:47:50.021747Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6pr9n" level=info timestamp=2018-08-02T11:47:50.102376Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6pr9n, existing: true\n" level=info timestamp=2018-08-02T11:47:50.102569Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:47:50.102654Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:47:50.102942Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6pr9n kind= uid=d7af3a46-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:47:50.106179Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6pr9n kind= uid=d7af3a46-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:47:50.106987Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6pr9n" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:47:32.370729Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:47:32.370783Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmiw699z kind= uid=6b597b02-9649-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:47:32.370888Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmiw699z kind= uid=6b597b02-9649-11e8-bba8-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:47:32.604782Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi2wwlq, existing: false\n" level=info timestamp=2018-08-02T11:47:32.604865Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:47:32.604952Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi2wwlq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:47:32.605116Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi2wwlq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:47:32.631779Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiw699z, existing: false\n" level=info timestamp=2018-08-02T11:47:32.631862Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:47:32.631944Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmiw699z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:47:32.632095Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmiw699z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:47:33.677452Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiw699z, existing: false\n" level=info timestamp=2018-08-02T11:47:33.677564Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:47:33.677654Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmiw699z kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:47:33.677756Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmiw699z kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi5vqgd-5nzb8 Pod phase: Running level=info timestamp=2018-08-02T11:47:39.662470Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:47:39.662636Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:47:39.664298Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" Pod name: virt-launcher-testvmi6pr9n-lfjxc Pod phase: Running level=info timestamp=2018-08-02T11:47:39.311659Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:47:39.312405Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:47:39.315677Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:47:49.370218Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:47:49.428870Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi6pr9n" level=info timestamp=2018-08-02T11:47:49.430754Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:47:49.431292Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmic2495-nqqx2 Pod phase: Running level=info timestamp=2018-08-02T11:47:38.136500Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:47:38.136775Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:47:38.142685Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:47:48.148108Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:47:48.407650Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmic2495" level=info timestamp=2018-08-02T11:47:48.414619Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:47:48.415160Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmisfwgj-6q554 Pod phase: Running level=info timestamp=2018-08-02T11:47:39.531856Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:47:39.531994Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:47:39.535676Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:47:49.545864Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:47:49.567004Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmisfwgj" level=info timestamp=2018-08-02T11:47:49.567640Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:47:49.567831Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:49:56.017930Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:49:56.058336Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:49:56.112403Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:50:01 http: TLS handshake error from 10.129.0.1:37920: EOF level=info timestamp=2018-08-02T11:50:02.799878Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:50:06.252317Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:50:11 http: TLS handshake error from 10.129.0.1:37932: EOF level=info timestamp=2018-08-02T11:50:11.936531Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:50:16.399849Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:50:21 http: TLS handshake error from 10.129.0.1:37944: EOF level=info timestamp=2018-08-02T11:50:26.348849Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:50:26.552415Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:50:26.580499Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:50:31 http: TLS handshake error from 10.129.0.1:37956: EOF level=info timestamp=2018-08-02T11:50:33.035222Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running level=info timestamp=2018-08-02T11:48:41.884726Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:48:46 http: TLS handshake error from 10.129.0.1:47554: EOF 2018/08/02 11:48:56 http: TLS handshake error from 10.129.0.1:47566: EOF 2018/08/02 11:49:06 http: TLS handshake error from 10.129.0.1:47578: EOF level=info timestamp=2018-08-02T11:49:11.834392Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:49:16 http: TLS handshake error from 10.129.0.1:47590: EOF 2018/08/02 11:49:26 http: TLS handshake error from 10.129.0.1:47602: EOF 2018/08/02 11:49:36 http: TLS handshake error from 10.129.0.1:47614: EOF 2018/08/02 11:49:46 http: TLS handshake error from 10.129.0.1:47628: EOF 2018/08/02 11:49:56 http: TLS handshake error from 10.129.0.1:47640: EOF 2018/08/02 11:50:06 http: TLS handshake error from 10.129.0.1:47652: EOF level=info timestamp=2018-08-02T11:50:09.537384Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:50:11.134995Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:50:16 http: TLS handshake error from 10.129.0.1:47664: EOF 2018/08/02 11:50:26 http: TLS handshake error from 10.129.0.1:47676: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:44:32.311616Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixk85v kind= uid=6b55b9f5-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:44:32.331821Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw699z kind= uid=6b597b02-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:44:32.331929Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiw699z kind= uid=6b597b02-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:44:32.705421Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi2wwlq\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi2wwlq" level=info timestamp=2018-08-02T11:44:32.872339Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiw699z\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiw699z" level=info timestamp=2018-08-02T11:47:34.087093Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6pr9n kind= uid=d7af3a46-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:47:34.087686Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6pr9n kind= uid=d7af3a46-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:47:34.160061Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic2495 kind= uid=d7b37ffc-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:47:34.160447Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic2495 kind= uid=d7b37ffc-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:47:34.238118Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisfwgj kind= uid=d7c1d08d-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:47:34.238633Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisfwgj kind= uid=d7c1d08d-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:47:34.308236Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5vqgd kind= uid=d7cd8f64-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:47:34.308593Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5vqgd kind= uid=d7cd8f64-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:47:34.698949Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6pr9n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6pr9n" level=info timestamp=2018-08-02T11:47:35.075731Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5vqgd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5vqgd" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:49:13.293974Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmisfwgj" level=info timestamp=2018-08-02T11:50:16.642784Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmix77pt, existing: false\n" level=info timestamp=2018-08-02T11:50:16.643448Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:50:16.643755Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmix77pt kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:50:16.644958Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmix77pt kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:50:16.730052Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmixk85v, existing: false\n" level=info timestamp=2018-08-02T11:50:16.730363Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:50:16.730559Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmixk85v kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:50:16.730868Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmixk85v kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:50:33.893769Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6pr9n, existing: true\n" level=info timestamp=2018-08-02T11:50:33.894447Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:50:33.894538Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:50:33.894949Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6pr9n kind= uid=d7af3a46-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:50:33.904299Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6pr9n kind= uid=d7af3a46-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:50:33.905176Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6pr9n" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:49:12.514106Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:49:12.514149Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:49:12.514362Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi5vqgd kind= uid=d7cd8f64-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:49:12.516812Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi5vqgd kind= uid=d7cd8f64-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:49:12.517155Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5vqgd" level=info timestamp=2018-08-02T11:49:12.604707Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmic2495, existing: true\n" level=info timestamp=2018-08-02T11:49:12.604817Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:49:12.604847Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:49:12.604984Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmic2495 kind= uid=d7b37ffc-9649-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:49:12.605917Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmic2495 kind= uid=d7b37ffc-9649-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:49:12.606569Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmic2495" level=info timestamp=2018-08-02T11:50:15.768066Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi2wwlq, existing: false\n" level=info timestamp=2018-08-02T11:50:15.769687Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:50:15.770675Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi2wwlq kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:50:15.773295Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi2wwlq kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi5vqgd-5nzb8 Pod phase: Running level=info timestamp=2018-08-02T11:47:39.662470Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:47:39.662636Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:47:39.664298Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:47:49.668460Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:47:49.735215Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi5vqgd" level=info timestamp=2018-08-02T11:47:49.736377Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:47:49.736807Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmi6pr9n-lfjxc Pod phase: Running level=info timestamp=2018-08-02T11:47:39.311659Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:47:39.312405Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:47:39.315677Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:47:49.370218Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:47:49.428870Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi6pr9n" level=info timestamp=2018-08-02T11:47:49.430754Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:47:49.431292Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmic2495-nqqx2 Pod phase: Running level=info timestamp=2018-08-02T11:47:38.136500Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:47:38.136775Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:47:38.142685Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:47:48.148108Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:47:48.407650Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmic2495" level=info timestamp=2018-08-02T11:47:48.414619Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:47:48.415160Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmisfwgj-6q554 Pod phase: Running level=info timestamp=2018-08-02T11:47:39.531856Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:47:39.531994Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:47:39.535676Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:47:49.545864Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:47:49.567004Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmisfwgj" level=info timestamp=2018-08-02T11:47:49.567640Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:47:49.567831Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [182.132 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:402 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:403 Unexpected Warning event received: testvmi6pr9n,d7af3a46-9649-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:47:34.425087Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi6pr9n kind=VirtualMachineInstance uid=d7af3a46-9649-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi6pr9n-lfjxc" level=info timestamp=2018-08-02T11:47:49.386759Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi6pr9n kind=VirtualMachineInstance uid=d7af3a46-9649-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi6pr9n-lfjxc" level=error timestamp=2018-08-02T11:47:49.428994Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi6pr9n kind=VirtualMachineInstance uid=d7af3a46-9649-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 11:50:11 http: TLS handshake error from 10.129.0.1:37932: EOF level=info timestamp=2018-08-02T11:50:11.936531Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:50:16.399849Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:50:21 http: TLS handshake error from 10.129.0.1:37944: EOF level=info timestamp=2018-08-02T11:50:26.348849Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:50:26.552415Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:50:26.580499Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:50:31 http: TLS handshake error from 10.129.0.1:37956: EOF level=info timestamp=2018-08-02T11:50:33.035222Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:50:36.197616Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:50:36.204628Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:50:37.594706Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:50:41 http: TLS handshake error from 10.129.0.1:37968: EOF level=info timestamp=2018-08-02T11:50:48.851589Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:50:51 http: TLS handshake error from 10.129.0.1:37980: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:49:06 http: TLS handshake error from 10.129.0.1:47578: EOF level=info timestamp=2018-08-02T11:49:11.834392Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:49:16 http: TLS handshake error from 10.129.0.1:47590: EOF 2018/08/02 11:49:26 http: TLS handshake error from 10.129.0.1:47602: EOF 2018/08/02 11:49:36 http: TLS handshake error from 10.129.0.1:47614: EOF 2018/08/02 11:49:46 http: TLS handshake error from 10.129.0.1:47628: EOF 2018/08/02 11:49:56 http: TLS handshake error from 10.129.0.1:47640: EOF 2018/08/02 11:50:06 http: TLS handshake error from 10.129.0.1:47652: EOF level=info timestamp=2018-08-02T11:50:09.537384Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:50:11.134995Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:50:16 http: TLS handshake error from 10.129.0.1:47664: EOF 2018/08/02 11:50:26 http: TLS handshake error from 10.129.0.1:47676: EOF 2018/08/02 11:50:36 http: TLS handshake error from 10.129.0.1:47688: EOF level=info timestamp=2018-08-02T11:50:42.919453Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:50:46 http: TLS handshake error from 10.129.0.1:47700: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:47:34.238118Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisfwgj kind= uid=d7c1d08d-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:47:34.238633Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisfwgj kind= uid=d7c1d08d-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:47:34.308236Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5vqgd kind= uid=d7cd8f64-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:47:34.308593Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5vqgd kind= uid=d7cd8f64-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:47:34.698949Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6pr9n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6pr9n" level=info timestamp=2018-08-02T11:47:35.075731Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5vqgd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5vqgd" level=info timestamp=2018-08-02T11:50:36.275603Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmichlc2 kind= uid=44451c42-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:50:36.276116Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmichlc2 kind= uid=44451c42-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:50:36.321933Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipj7v4 kind= uid=444b6f7b-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:50:36.322413Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipj7v4 kind= uid=444b6f7b-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:50:36.355675Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijsfwh kind= uid=4450e7ea-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:50:36.356343Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijsfwh kind= uid=4450e7ea-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:50:36.414409Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5xcf9 kind= uid=44570ee4-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:50:36.415180Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5xcf9 kind= uid=44570ee4-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:50:37.180043Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijsfwh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijsfwh" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:50:52.907469Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmichlc2 kind= uid=44451c42-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:50:52.909069Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmichlc2 kind= uid=44451c42-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:50:52.909720Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmichlc2" level=info timestamp=2018-08-02T11:50:52.990364Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmichlc2, existing: true\n" level=info timestamp=2018-08-02T11:50:52.990541Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:50:52.990615Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:50:52.990860Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmichlc2 kind= uid=44451c42-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:50:52.994487Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmichlc2 kind= uid=44451c42-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:50:52.995356Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmichlc2" level=info timestamp=2018-08-02T11:50:53.155749Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmichlc2, existing: true\n" level=info timestamp=2018-08-02T11:50:53.155951Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:50:53.156025Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:50:53.156364Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmichlc2 kind= uid=44451c42-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:50:53.158442Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmichlc2 kind= uid=44451c42-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:50:53.158821Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmichlc2" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:50:34.487526Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmic2495 kind= uid=d7b37ffc-9649-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:50:34.487640Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmic2495 kind= uid=d7b37ffc-9649-11e8-bba8-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:50:34.526831Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmic2495, existing: true\n" level=info timestamp=2018-08-02T11:50:34.526935Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Failed\n" level=info timestamp=2018-08-02T11:50:34.526991Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:50:34.527121Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmic2495 kind= uid=d7b37ffc-9649-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:50:34.527245Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmic2495 kind= uid=d7b37ffc-9649-11e8-bba8-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:50:34.607238Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi5vqgd, existing: false\n" level=info timestamp=2018-08-02T11:50:34.607333Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:50:34.607438Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi5vqgd kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:50:34.607544Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi5vqgd kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:50:34.637296Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmic2495, existing: false\n" level=info timestamp=2018-08-02T11:50:34.637543Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:50:34.637647Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmic2495 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:50:34.637738Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmic2495 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi5xcf9-4548z Pod phase: Running level=info timestamp=2018-08-02T11:50:42.176366Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:50:42.176531Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:50:42.178786Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:50:52.206837Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:50:52.333252Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi5xcf9" level=info timestamp=2018-08-02T11:50:52.334858Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:50:52.335312Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmichlc2-wm86d Pod phase: Running level=info timestamp=2018-08-02T11:50:41.617738Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:50:41.618397Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:50:41.639955Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:50:51.650091Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:50:51.724976Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmichlc2" level=info timestamp=2018-08-02T11:50:51.727206Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:50:51.727626Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmijsfwh-hr44f Pod phase: Running level=info timestamp=2018-08-02T11:50:41.022583Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:50:41.022888Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:50:41.025300Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:50:51.075312Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:50:51.132377Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmijsfwh" level=info timestamp=2018-08-02T11:50:51.135243Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:50:51.135679Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmipj7v4-2bwzc Pod phase: Running level=info timestamp=2018-08-02T11:50:42.070713Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:50:42.070934Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:50:42.072853Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:50:52.081909Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:50:52.203486Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmipj7v4" level=info timestamp=2018-08-02T11:50:52.208562Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:50:52.209200Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 11:53:01 http: TLS handshake error from 10.129.0.1:38138: EOF level=info timestamp=2018-08-02T11:53:02.155603Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:53:04.660748Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:53:11 http: TLS handshake error from 10.129.0.1:38150: EOF level=info timestamp=2018-08-02T11:53:12.050771Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:53:12.266346Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:53:21 http: TLS handshake error from 10.129.0.1:38162: EOF level=info timestamp=2018-08-02T11:53:22.483566Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:53:28.031257Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:53:28.056994Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:53:31 http: TLS handshake error from 10.129.0.1:38174: EOF level=info timestamp=2018-08-02T11:53:32.574005Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:53:34.986122Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:53:36.224473Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:53:36.228299Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:51:26 http: TLS handshake error from 10.129.0.1:47748: EOF 2018/08/02 11:51:36 http: TLS handshake error from 10.129.0.1:47760: EOF 2018/08/02 11:51:46 http: TLS handshake error from 10.129.0.1:47772: EOF 2018/08/02 11:51:56 http: TLS handshake error from 10.129.0.1:47784: EOF 2018/08/02 11:52:06 http: TLS handshake error from 10.129.0.1:47796: EOF level=info timestamp=2018-08-02T11:52:12.048678Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:52:16 http: TLS handshake error from 10.129.0.1:47808: EOF 2018/08/02 11:52:26 http: TLS handshake error from 10.129.0.1:47820: EOF 2018/08/02 11:52:36 http: TLS handshake error from 10.129.0.1:47832: EOF 2018/08/02 11:52:46 http: TLS handshake error from 10.129.0.1:47846: EOF 2018/08/02 11:52:56 http: TLS handshake error from 10.129.0.1:47858: EOF 2018/08/02 11:53:06 http: TLS handshake error from 10.129.0.1:47870: EOF 2018/08/02 11:53:16 http: TLS handshake error from 10.129.0.1:47882: EOF 2018/08/02 11:53:26 http: TLS handshake error from 10.129.0.1:47894: EOF 2018/08/02 11:53:36 http: TLS handshake error from 10.129.0.1:47906: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:47:34.238118Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisfwgj kind= uid=d7c1d08d-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:47:34.238633Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmisfwgj kind= uid=d7c1d08d-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:47:34.308236Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5vqgd kind= uid=d7cd8f64-9649-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:47:34.308593Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5vqgd kind= uid=d7cd8f64-9649-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:47:34.698949Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6pr9n\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6pr9n" level=info timestamp=2018-08-02T11:47:35.075731Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi5vqgd\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi5vqgd" level=info timestamp=2018-08-02T11:50:36.275603Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmichlc2 kind= uid=44451c42-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:50:36.276116Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmichlc2 kind= uid=44451c42-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:50:36.321933Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipj7v4 kind= uid=444b6f7b-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:50:36.322413Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipj7v4 kind= uid=444b6f7b-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:50:36.355675Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijsfwh kind= uid=4450e7ea-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:50:36.356343Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijsfwh kind= uid=4450e7ea-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:50:36.414409Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5xcf9 kind= uid=44570ee4-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:50:36.415180Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5xcf9 kind= uid=44570ee4-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:50:37.180043Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijsfwh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijsfwh" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:52:16.113583Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmipj7v4" level=info timestamp=2018-08-02T11:53:17.746548Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6pr9n, existing: false\n" level=info timestamp=2018-08-02T11:53:17.747186Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:53:17.747500Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi6pr9n kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:53:17.747926Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi6pr9n kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:53:19.060450Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmisfwgj, existing: false\n" level=info timestamp=2018-08-02T11:53:19.060646Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:53:19.060851Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmisfwgj kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:53:19.061072Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmisfwgj kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:53:36.753404Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmichlc2, existing: true\n" level=info timestamp=2018-08-02T11:53:36.754329Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:53:36.754424Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:53:36.754925Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmichlc2 kind= uid=44451c42-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:53:36.761408Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmichlc2 kind= uid=44451c42-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:53:36.767227Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmichlc2" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:51:34.102037Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi5xcf9 kind= uid=44570ee4-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:51:34.106162Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi5xcf9 kind= uid=44570ee4-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:51:34.106551Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5xcf9" level=info timestamp=2018-08-02T11:52:14.695447Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmijsfwh, existing: true\n" level=info timestamp=2018-08-02T11:52:14.696194Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:52:14.696288Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:52:14.696795Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmijsfwh kind= uid=4450e7ea-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:52:14.701422Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmijsfwh kind= uid=4450e7ea-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:52:14.702201Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1456/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmijsfwh" level=info timestamp=2018-08-02T11:52:15.076612Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi5xcf9, existing: true\n" level=info timestamp=2018-08-02T11:52:15.078826Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:52:15.079286Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:52:15.087377Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi5xcf9 kind= uid=44570ee4-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:52:15.130450Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi5xcf9 kind= uid=44570ee4-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:52:15.131665Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1483/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5xcf9" Pod name: virt-launcher-testvmi5xcf9-4548z Pod phase: Running level=info timestamp=2018-08-02T11:50:42.176366Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:50:42.176531Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:50:42.178786Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:50:52.206837Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:50:52.333252Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi5xcf9" level=info timestamp=2018-08-02T11:50:52.334858Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:50:52.335312Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmichlc2-wm86d Pod phase: Running level=info timestamp=2018-08-02T11:50:41.617738Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:50:41.618397Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:50:41.639955Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:50:51.650091Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:50:51.724976Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmichlc2" level=info timestamp=2018-08-02T11:50:51.727206Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:50:51.727626Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmijsfwh-hr44f Pod phase: Running level=info timestamp=2018-08-02T11:50:41.022583Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:50:41.022888Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:50:41.025300Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:50:51.075312Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:50:51.132377Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmijsfwh" level=info timestamp=2018-08-02T11:50:51.135243Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:50:51.135679Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmipj7v4-2bwzc Pod phase: Running level=info timestamp=2018-08-02T11:50:42.070713Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:50:42.070934Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:50:42.072853Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:50:52.081909Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:50:52.203486Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmipj7v4" level=info timestamp=2018-08-02T11:50:52.208562Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:50:52.209200Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [181.963 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address in non-conventional format [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:414 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:415 Unexpected Warning event received: testvmichlc2,44451c42-964a-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:50:36.403679Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmichlc2 kind=VirtualMachineInstance uid=44451c42-964a-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmichlc2-wm86d" level=info timestamp=2018-08-02T11:50:52.295060Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmichlc2 kind=VirtualMachineInstance uid=44451c42-964a-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmichlc2-wm86d" level=error timestamp=2018-08-02T11:50:52.402199Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmichlc2 kind=VirtualMachineInstance uid=44451c42-964a-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:53:12.050771Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:53:12.266346Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:53:21 http: TLS handshake error from 10.129.0.1:38162: EOF level=info timestamp=2018-08-02T11:53:22.483566Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:53:28.031257Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:53:28.056994Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:53:31 http: TLS handshake error from 10.129.0.1:38174: EOF level=info timestamp=2018-08-02T11:53:32.574005Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:53:34.986122Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:53:36.224473Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:53:36.228299Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/08/02 11:53:41 http: TLS handshake error from 10.129.0.1:38186: EOF level=info timestamp=2018-08-02T11:53:42.900926Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:53:51 http: TLS handshake error from 10.129.0.1:38198: EOF level=info timestamp=2018-08-02T11:53:54.335413Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:51:46 http: TLS handshake error from 10.129.0.1:47772: EOF 2018/08/02 11:51:56 http: TLS handshake error from 10.129.0.1:47784: EOF 2018/08/02 11:52:06 http: TLS handshake error from 10.129.0.1:47796: EOF level=info timestamp=2018-08-02T11:52:12.048678Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:52:16 http: TLS handshake error from 10.129.0.1:47808: EOF 2018/08/02 11:52:26 http: TLS handshake error from 10.129.0.1:47820: EOF 2018/08/02 11:52:36 http: TLS handshake error from 10.129.0.1:47832: EOF 2018/08/02 11:52:46 http: TLS handshake error from 10.129.0.1:47846: EOF 2018/08/02 11:52:56 http: TLS handshake error from 10.129.0.1:47858: EOF 2018/08/02 11:53:06 http: TLS handshake error from 10.129.0.1:47870: EOF 2018/08/02 11:53:16 http: TLS handshake error from 10.129.0.1:47882: EOF 2018/08/02 11:53:26 http: TLS handshake error from 10.129.0.1:47894: EOF 2018/08/02 11:53:36 http: TLS handshake error from 10.129.0.1:47906: EOF level=info timestamp=2018-08-02T11:53:42.013798Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:53:46 http: TLS handshake error from 10.129.0.1:47918: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:50:36.321933Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipj7v4 kind= uid=444b6f7b-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:50:36.322413Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipj7v4 kind= uid=444b6f7b-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:50:36.355675Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijsfwh kind= uid=4450e7ea-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:50:36.356343Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijsfwh kind= uid=4450e7ea-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:50:36.414409Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5xcf9 kind= uid=44570ee4-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:50:36.415180Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5xcf9 kind= uid=44570ee4-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:50:37.180043Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijsfwh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijsfwh" level=info timestamp=2018-08-02T11:53:38.277253Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwd26z kind= uid=b0b9f4b0-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:53:38.278017Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwd26z kind= uid=b0b9f4b0-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:53:38.316908Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic8m6x kind= uid=b0c7d876-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:53:38.317426Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic8m6x kind= uid=b0c7d876-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:53:38.375319Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5bddc kind= uid=b0cc6c42-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:53:38.375555Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5bddc kind= uid=b0cc6c42-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:53:38.611572Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipt8hn kind= uid=b0e33623-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:53:38.611826Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipt8hn kind= uid=b0e33623-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:53:56.304880Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiwd26z kind= uid=b0b9f4b0-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:53:56.306798Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiwd26z kind= uid=b0b9f4b0-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:53:56.307268Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiwd26z" level=info timestamp=2018-08-02T11:53:56.387853Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiwd26z, existing: true\n" level=info timestamp=2018-08-02T11:53:56.388006Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:53:56.388079Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:53:56.388417Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiwd26z kind= uid=b0b9f4b0-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:53:56.390915Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiwd26z kind= uid=b0b9f4b0-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:53:56.391388Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiwd26z" level=info timestamp=2018-08-02T11:53:56.552290Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiwd26z, existing: true\n" level=info timestamp=2018-08-02T11:53:56.552568Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:53:56.552641Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:53:56.553082Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiwd26z kind= uid=b0b9f4b0-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:53:56.559633Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiwd26z kind= uid=b0b9f4b0-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:53:56.560450Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiwd26z" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:53:55.596684Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmic8m6x kind= uid=b0c7d876-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:53:55.603606Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmic8m6x kind= uid=b0c7d876-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:53:55.604546Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmic8m6x" level=info timestamp=2018-08-02T11:53:55.605242Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmic8m6x, existing: true\n" level=info timestamp=2018-08-02T11:53:55.605746Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:53:55.606383Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:53:55.607532Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmic8m6x kind= uid=b0c7d876-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:53:55.612605Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmic8m6x kind= uid=b0c7d876-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:53:55.613731Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmic8m6x" level=info timestamp=2018-08-02T11:53:55.615265Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmic8m6x, existing: true\n" level=info timestamp=2018-08-02T11:53:55.615852Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:53:55.616945Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:53:55.623439Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmic8m6x kind= uid=b0c7d876-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:53:55.628098Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmic8m6x kind= uid=b0c7d876-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:53:55.628903Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmic8m6x" Pod name: virt-launcher-testvmi5bddc-jvxb8 Pod phase: Running level=info timestamp=2018-08-02T11:53:43.909060Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:53:43.909263Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:53:43.910669Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:53:53.928247Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:53:54.055412Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi5bddc" level=info timestamp=2018-08-02T11:53:54.057549Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:53:54.058371Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmic8m6x-9cb6c Pod phase: Running level=info timestamp=2018-08-02T11:53:43.727653Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:53:43.727910Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:53:43.730368Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:53:53.742664Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:53:53.843439Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmic8m6x" level=info timestamp=2018-08-02T11:53:53.848584Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:53:53.849897Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmipt8hn-b8rhj Pod phase: Running level=info timestamp=2018-08-02T11:53:44.113032Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:53:44.113178Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:53:44.120675Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:53:54.128477Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:53:54.214235Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmipt8hn" level=info timestamp=2018-08-02T11:53:54.215887Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:53:54.216677Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiwd26z-fq7f7 Pod phase: Running level=info timestamp=2018-08-02T11:53:43.857301Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:53:43.857559Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:53:43.859125Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:53:53.869559Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:53:53.989233Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiwd26z" level=info timestamp=2018-08-02T11:53:53.997093Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:53:53.997775Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 11:56:01 http: TLS handshake error from 10.129.0.1:38356: EOF level=info timestamp=2018-08-02T11:56:06.274246Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:56:06.990087Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:56:11 http: TLS handshake error from 10.129.0.1:38368: EOF level=info timestamp=2018-08-02T11:56:12.274665Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:56:17.214119Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:56:21 http: TLS handshake error from 10.129.0.1:38380: EOF level=info timestamp=2018-08-02T11:56:27.439877Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:56:29.829735Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:56:29.946559Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:56:31 http: TLS handshake error from 10.129.0.1:38392: EOF level=info timestamp=2018-08-02T11:56:36.421451Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:56:36.458823Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:56:36.462558Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:56:37.734646Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:54:26 http: TLS handshake error from 10.129.0.1:47966: EOF 2018/08/02 11:54:36 http: TLS handshake error from 10.129.0.1:47978: EOF 2018/08/02 11:54:46 http: TLS handshake error from 10.129.0.1:47990: EOF 2018/08/02 11:54:56 http: TLS handshake error from 10.129.0.1:48002: EOF 2018/08/02 11:55:06 http: TLS handshake error from 10.129.0.1:48014: EOF level=info timestamp=2018-08-02T11:55:12.193455Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:55:16 http: TLS handshake error from 10.129.0.1:48026: EOF 2018/08/02 11:55:26 http: TLS handshake error from 10.129.0.1:48038: EOF 2018/08/02 11:55:36 http: TLS handshake error from 10.129.0.1:48050: EOF 2018/08/02 11:55:46 http: TLS handshake error from 10.129.0.1:48064: EOF 2018/08/02 11:55:56 http: TLS handshake error from 10.129.0.1:48076: EOF 2018/08/02 11:56:06 http: TLS handshake error from 10.129.0.1:48088: EOF 2018/08/02 11:56:16 http: TLS handshake error from 10.129.0.1:48100: EOF 2018/08/02 11:56:26 http: TLS handshake error from 10.129.0.1:48112: EOF 2018/08/02 11:56:36 http: TLS handshake error from 10.129.0.1:48124: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:50:36.321933Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipj7v4 kind= uid=444b6f7b-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:50:36.322413Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipj7v4 kind= uid=444b6f7b-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:50:36.355675Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijsfwh kind= uid=4450e7ea-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:50:36.356343Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmijsfwh kind= uid=4450e7ea-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:50:36.414409Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5xcf9 kind= uid=44570ee4-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:50:36.415180Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5xcf9 kind= uid=44570ee4-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:50:37.180043Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmijsfwh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmijsfwh" level=info timestamp=2018-08-02T11:53:38.277253Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwd26z kind= uid=b0b9f4b0-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:53:38.278017Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiwd26z kind= uid=b0b9f4b0-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:53:38.316908Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic8m6x kind= uid=b0c7d876-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:53:38.317426Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmic8m6x kind= uid=b0c7d876-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:53:38.375319Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5bddc kind= uid=b0cc6c42-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:53:38.375555Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi5bddc kind= uid=b0cc6c42-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:53:38.611572Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipt8hn kind= uid=b0e33623-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:53:38.611826Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipt8hn kind= uid=b0e33623-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:55:17.446404Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:55:17.446505Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:55:17.446918Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi5bddc kind= uid=b0cc6c42-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:55:17.458603Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi5bddc kind= uid=b0cc6c42-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:55:17.459915Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi5bddc" level=info timestamp=2018-08-02T11:55:18.208227Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiwd26z, existing: true\n" level=info timestamp=2018-08-02T11:55:18.208454Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:55:18.208555Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:55:18.208829Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiwd26z kind= uid=b0b9f4b0-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:55:18.211083Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiwd26z kind= uid=b0b9f4b0-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:55:18.211697Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiwd26z" level=info timestamp=2018-08-02T11:56:20.608644Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmichlc2, existing: false\n" level=info timestamp=2018-08-02T11:56:20.609342Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:56:20.609670Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmichlc2 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:56:20.610129Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmichlc2 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:54:36.619658Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmic8m6x kind= uid=b0c7d876-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:54:36.630093Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmic8m6x kind= uid=b0c7d876-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:54:36.630708Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1485/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmic8m6x" level=info timestamp=2018-08-02T11:55:16.820506Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmipt8hn, existing: true\n" level=info timestamp=2018-08-02T11:55:16.821007Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:55:16.821055Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:55:16.821355Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmipt8hn kind= uid=b0e33623-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:55:16.827518Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmipt8hn kind= uid=b0e33623-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:55:16.828180Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmipt8hn" level=info timestamp=2018-08-02T11:55:17.593822Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmic8m6x, existing: true\n" level=info timestamp=2018-08-02T11:55:17.599488Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:55:17.603361Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:55:17.606202Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmic8m6x kind= uid=b0c7d876-964a-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:55:17.613667Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmic8m6x kind= uid=b0c7d876-964a-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/30047/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:55:17.617219Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/30047/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmic8m6x" Pod name: virt-launcher-testvmi5bddc-jvxb8 Pod phase: Running level=info timestamp=2018-08-02T11:53:43.909060Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:53:43.909263Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:53:43.910669Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:53:53.928247Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:53:54.055412Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi5bddc" level=info timestamp=2018-08-02T11:53:54.057549Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:53:54.058371Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmic8m6x-9cb6c Pod phase: Running level=info timestamp=2018-08-02T11:53:43.727653Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:53:43.727910Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:53:43.730368Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:53:53.742664Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:53:53.843439Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmic8m6x" level=info timestamp=2018-08-02T11:53:53.848584Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:53:53.849897Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmipt8hn-b8rhj Pod phase: Running level=info timestamp=2018-08-02T11:53:44.113032Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:53:44.113178Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:53:44.120675Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:53:54.128477Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:53:54.214235Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmipt8hn" level=info timestamp=2018-08-02T11:53:54.215887Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:53:54.216677Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmiwd26z-fq7f7 Pod phase: Running level=info timestamp=2018-08-02T11:53:43.857301Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:53:43.857559Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:53:43.859125Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:53:53.869559Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:53:53.989233Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiwd26z" level=info timestamp=2018-08-02T11:53:53.997093Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:53:53.997775Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [182.199 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with custom MAC address and slirp interface [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:427 should configure custom MAC address /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:428 Unexpected Warning event received: testvmiwd26z,b0b9f4b0-964a-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:53:39.204686Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiwd26z kind=VirtualMachineInstance uid=b0b9f4b0-964a-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiwd26z-fq7f7" level=info timestamp=2018-08-02T11:53:55.731787Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiwd26z kind=VirtualMachineInstance uid=b0b9f4b0-964a-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmiwd26z-fq7f7" level=error timestamp=2018-08-02T11:53:55.764970Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmiwd26z kind=VirtualMachineInstance uid=b0b9f4b0-964a-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 11:56:11 http: TLS handshake error from 10.129.0.1:38368: EOF level=info timestamp=2018-08-02T11:56:12.274665Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:56:17.214119Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:56:21 http: TLS handshake error from 10.129.0.1:38380: EOF level=info timestamp=2018-08-02T11:56:27.439877Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:56:29.829735Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:56:29.946559Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:56:31 http: TLS handshake error from 10.129.0.1:38392: EOF level=info timestamp=2018-08-02T11:56:36.421451Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:56:36.458823Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:56:36.462558Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T11:56:37.734646Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:56:41 http: TLS handshake error from 10.129.0.1:38404: EOF 2018/08/02 11:56:51 http: TLS handshake error from 10.129.0.1:38416: EOF level=info timestamp=2018-08-02T11:56:55.235318Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:54:46 http: TLS handshake error from 10.129.0.1:47990: EOF 2018/08/02 11:54:56 http: TLS handshake error from 10.129.0.1:48002: EOF 2018/08/02 11:55:06 http: TLS handshake error from 10.129.0.1:48014: EOF level=info timestamp=2018-08-02T11:55:12.193455Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:55:16 http: TLS handshake error from 10.129.0.1:48026: EOF 2018/08/02 11:55:26 http: TLS handshake error from 10.129.0.1:48038: EOF 2018/08/02 11:55:36 http: TLS handshake error from 10.129.0.1:48050: EOF 2018/08/02 11:55:46 http: TLS handshake error from 10.129.0.1:48064: EOF 2018/08/02 11:55:56 http: TLS handshake error from 10.129.0.1:48076: EOF 2018/08/02 11:56:06 http: TLS handshake error from 10.129.0.1:48088: EOF 2018/08/02 11:56:16 http: TLS handshake error from 10.129.0.1:48100: EOF 2018/08/02 11:56:26 http: TLS handshake error from 10.129.0.1:48112: EOF 2018/08/02 11:56:36 http: TLS handshake error from 10.129.0.1:48124: EOF level=info timestamp=2018-08-02T11:56:42.174338Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:56:47 http: TLS handshake error from 10.129.0.1:48136: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:53:38.611572Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipt8hn kind= uid=b0e33623-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:53:38.611826Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipt8hn kind= uid=b0e33623-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.634714Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.635931Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.669527Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.670261Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.695882Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96txx kind= uid=1d7c6824-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.696109Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96txx kind= uid=1d7c6824-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.745646Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.745862Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.921858Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix22nr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix22nr" level=info timestamp=2018-08-02T11:56:40.956738Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.444053Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.844268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96txx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96txx" level=info timestamp=2018-08-02T11:56:42.046284Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif7nvb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif7nvb" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:56:56.198373Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:56:56.199898Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:56:56.200398Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmix22nr" level=info timestamp=2018-08-02T11:56:56.281065Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmix22nr, existing: true\n" level=info timestamp=2018-08-02T11:56:56.281458Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:56:56.281594Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:56:56.282071Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:56:56.285217Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:56:56.285947Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmix22nr" level=info timestamp=2018-08-02T11:56:56.446759Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmix22nr, existing: true\n" level=info timestamp=2018-08-02T11:56:56.447102Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:56:56.447343Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:56:56.447802Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:56:56.450880Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:56:56.451666Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmix22nr" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:56:39.542392Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:56:39.542502Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmic8m6x kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:56:39.542604Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmic8m6x kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:56:55.434424Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: true\n" level=info timestamp=2018-08-02T11:56:55.435291Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:56:55.435510Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:56:55.435721Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:56:55.454637Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/30047/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:56:55.486615Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/30047/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:55.489988Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: true\n" level=info timestamp=2018-08-02T11:56:55.490050Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:56:55.490079Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:56:55.490208Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:56:55.491057Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1484/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:56:55.491249Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1484/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" Pod name: virt-launcher-testvmi96txx-tjf5c Pod phase: Running level=info timestamp=2018-08-02T11:56:45.308059Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:56:45.308257Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:56:45.309896Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:56:55.318392Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:56:55.419536Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi96txx" level=info timestamp=2018-08-02T11:56:55.421481Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:56:55.422083Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmif7nvb-hgz6r Pod phase: Running level=info timestamp=2018-08-02T11:56:45.323774Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:56:45.323919Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:56:45.325671Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:56:55.633311Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:56:55.663705Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmif7nvb" level=info timestamp=2018-08-02T11:56:55.664931Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:56:55.665256Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmimpwfh-ft9kz Pod phase: Running level=info timestamp=2018-08-02T11:56:44.909886Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:56:44.912529Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:56:44.914346Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:56:54.921500Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:56:54.951325Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmimpwfh" level=info timestamp=2018-08-02T11:56:54.953212Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:56:54.953898Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmix22nr-zwnhj Pod phase: Running level=info timestamp=2018-08-02T11:56:44.895757Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:56:44.895980Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:56:44.897700Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:56:54.908011Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:56:54.973500Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmix22nr" level=info timestamp=2018-08-02T11:56:54.976124Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:56:54.976650Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:58:58.452884Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:59:01.203332Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:59:01.209882Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:59:01 http: TLS handshake error from 10.129.0.1:38574: EOF level=info timestamp=2018-08-02T11:59:07.184619Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:59:08.878076Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:59:11 http: TLS handshake error from 10.129.0.1:38586: EOF level=info timestamp=2018-08-02T11:59:18.968417Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:59:21 http: TLS handshake error from 10.129.0.1:38598: EOF level=info timestamp=2018-08-02T11:59:29.107247Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:59:31 http: TLS handshake error from 10.129.0.1:38610: EOF level=info timestamp=2018-08-02T11:59:31.566568Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:59:31.581007Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:59:37.351346Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:59:39.273914Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running level=info timestamp=2018-08-02T11:57:42.159315Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:57:46 http: TLS handshake error from 10.129.0.1:48208: EOF 2018/08/02 11:57:56 http: TLS handshake error from 10.129.0.1:48220: EOF 2018/08/02 11:58:06 http: TLS handshake error from 10.129.0.1:48232: EOF level=info timestamp=2018-08-02T11:58:12.138686Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:58:16 http: TLS handshake error from 10.129.0.1:48244: EOF 2018/08/02 11:58:26 http: TLS handshake error from 10.129.0.1:48256: EOF 2018/08/02 11:58:36 http: TLS handshake error from 10.129.0.1:48268: EOF 2018/08/02 11:58:46 http: TLS handshake error from 10.129.0.1:48282: EOF 2018/08/02 11:58:56 http: TLS handshake error from 10.129.0.1:48294: EOF 2018/08/02 11:59:06 http: TLS handshake error from 10.129.0.1:48306: EOF level=info timestamp=2018-08-02T11:59:12.045192Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:59:16 http: TLS handshake error from 10.129.0.1:48318: EOF 2018/08/02 11:59:26 http: TLS handshake error from 10.129.0.1:48330: EOF 2018/08/02 11:59:36 http: TLS handshake error from 10.129.0.1:48342: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:53:38.611572Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipt8hn kind= uid=b0e33623-964a-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:53:38.611826Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmipt8hn kind= uid=b0e33623-964a-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.634714Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.635931Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.669527Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.670261Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.695882Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96txx kind= uid=1d7c6824-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.696109Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96txx kind= uid=1d7c6824-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.745646Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.745862Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.921858Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix22nr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix22nr" level=info timestamp=2018-08-02T11:56:40.956738Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.444053Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.844268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96txx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96txx" level=info timestamp=2018-08-02T11:56:42.046284Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif7nvb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif7nvb" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T11:59:23.244613Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:23.245254Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi5bddc kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:23.245933Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi5bddc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:40.050907Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmix22nr, existing: true\n" level=info timestamp=2018-08-02T11:59:40.051690Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:59:40.051777Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:40.052357Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:59:40.070213Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:59:40.075067Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmix22nr" level=info timestamp=2018-08-02T11:59:41.043791Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi96txx, existing: true\n" level=info timestamp=2018-08-02T11:59:41.043963Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:59:41.044052Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.044444Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi96txx kind= uid=1d7c6824-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:59:41.047106Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi96txx kind= uid=1d7c6824-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:59:41.047581Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi96txx" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:58:17.723228Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:58:17.729051Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:58:17.729381Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:58:20.019539Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif7nvb, existing: true\n" level=info timestamp=2018-08-02T11:58:20.020868Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:58:20.021511Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:58:20.022621Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:58:20.027073Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/30047/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:58:20.028162Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/30047/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmif7nvb" level=info timestamp=2018-08-02T11:59:39.651798Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: true\n" level=info timestamp=2018-08-02T11:59:39.653310Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T11:59:39.653398Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:39.659520Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T11:59:39.662059Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T11:59:39.662396Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/1456/task/1476/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" Pod name: virt-launcher-testvmi96txx-tjf5c Pod phase: Running level=info timestamp=2018-08-02T11:56:45.308059Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:56:45.308257Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:56:45.309896Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:56:55.318392Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:56:55.419536Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi96txx" level=info timestamp=2018-08-02T11:56:55.421481Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:56:55.422083Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmif7nvb-hgz6r Pod phase: Running level=info timestamp=2018-08-02T11:56:45.323774Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:56:45.323919Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:56:45.325671Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:56:55.633311Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:56:55.663705Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmif7nvb" level=info timestamp=2018-08-02T11:56:55.664931Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:56:55.665256Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmimpwfh-ft9kz Pod phase: Running level=info timestamp=2018-08-02T11:56:44.909886Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:56:44.912529Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:56:44.914346Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:56:54.921500Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:56:54.951325Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmimpwfh" level=info timestamp=2018-08-02T11:56:54.953212Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:56:54.953898Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: virt-launcher-testvmix22nr-zwnhj Pod phase: Running level=info timestamp=2018-08-02T11:56:44.895757Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:56:44.895980Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T11:56:44.897700Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:56:54.908011Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:56:54.973500Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmix22nr" level=info timestamp=2018-08-02T11:56:54.976124Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:56:54.976650Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [182.113 seconds] Networking /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:48 VirtualMachineInstance with disabled automatic attachment of interfaces [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:440 should not configure any external interfaces /root/go/src/kubevirt.io/kubevirt/tests/vmi_networking_test.go:441 Unexpected Warning event received: testvmix22nr,1d7234b0-964b-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T11:56:40.624408Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmix22nr kind=VirtualMachineInstance uid=1d7234b0-964b-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmix22nr-zwnhj" level=info timestamp=2018-08-02T11:56:55.518370Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmix22nr kind=VirtualMachineInstance uid=1d7234b0-964b-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmix22nr-zwnhj" level=error timestamp=2018-08-02T11:56:55.648164Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmix22nr kind=VirtualMachineInstance uid=1d7234b0-964b-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T11:59:07.184619Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:59:08.878076Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:59:11 http: TLS handshake error from 10.129.0.1:38586: EOF level=info timestamp=2018-08-02T11:59:18.968417Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:59:21 http: TLS handshake error from 10.129.0.1:38598: EOF level=info timestamp=2018-08-02T11:59:29.107247Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:59:31 http: TLS handshake error from 10.129.0.1:38610: EOF level=info timestamp=2018-08-02T11:59:31.566568Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:59:31.581007Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:59:37.351346Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T11:59:39.273914Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:59:41 http: TLS handshake error from 10.129.0.1:38622: EOF level=info timestamp=2018-08-02T11:59:49.506945Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 11:59:51 http: TLS handshake error from 10.129.0.1:38634: EOF level=info timestamp=2018-08-02T11:59:59.643276Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 11:58:06 http: TLS handshake error from 10.129.0.1:48232: EOF level=info timestamp=2018-08-02T11:58:12.138686Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:58:16 http: TLS handshake error from 10.129.0.1:48244: EOF 2018/08/02 11:58:26 http: TLS handshake error from 10.129.0.1:48256: EOF 2018/08/02 11:58:36 http: TLS handshake error from 10.129.0.1:48268: EOF 2018/08/02 11:58:46 http: TLS handshake error from 10.129.0.1:48282: EOF 2018/08/02 11:58:56 http: TLS handshake error from 10.129.0.1:48294: EOF 2018/08/02 11:59:06 http: TLS handshake error from 10.129.0.1:48306: EOF level=info timestamp=2018-08-02T11:59:12.045192Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:59:16 http: TLS handshake error from 10.129.0.1:48318: EOF 2018/08/02 11:59:26 http: TLS handshake error from 10.129.0.1:48330: EOF 2018/08/02 11:59:36 http: TLS handshake error from 10.129.0.1:48342: EOF level=info timestamp=2018-08-02T11:59:42.283129Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 11:59:46 http: TLS handshake error from 10.129.0.1:48354: EOF 2018/08/02 11:59:56 http: TLS handshake error from 10.129.0.1:48366: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:56:40.634714Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.635931Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.669527Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.670261Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.695882Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96txx kind= uid=1d7c6824-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.696109Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96txx kind= uid=1d7c6824-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.745646Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.745862Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.921858Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix22nr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix22nr" level=info timestamp=2018-08-02T11:56:40.956738Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.444053Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.844268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96txx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96txx" level=info timestamp=2018-08-02T11:56:42.046284Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif7nvb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif7nvb" level=info timestamp=2018-08-02T11:59:42.564434Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:59:42.565263Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T12:00:00.050526Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:00:00.052209Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:00:00.052601Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6xfsp" level=info timestamp=2018-08-02T12:00:00.133234Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6xfsp, existing: true\n" level=info timestamp=2018-08-02T12:00:00.133410Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:00:00.133512Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:00:00.133825Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:00:00.135584Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:00:00.135995Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6xfsp" level=info timestamp=2018-08-02T12:00:00.296578Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6xfsp, existing: true\n" level=info timestamp=2018-08-02T12:00:00.296770Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:00:00.296850Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:00:00.297111Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:00:00.299109Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:00:00.299797Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6xfsp" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:59:41.059559Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.059685Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.059843Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.202114Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif7nvb, existing: false\n" level=info timestamp=2018-08-02T11:59:41.202195Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.202283Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.202380Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.223642Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T11:59:41.223744Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.223869Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.224008Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.949157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif7nvb, existing: false\n" level=info timestamp=2018-08-02T11:59:41.950096Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.951872Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.953116Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi6xfsp-qhtz8 Pod phase: Running level=info timestamp=2018-08-02T11:59:48.073319Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:59:48.073925Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[OnDefineDomain:[0xc42031a4c0]]" level=info timestamp=2018-08-02T11:59:48.077866Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:59:58.090393Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:59:58.201979Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi6xfsp" level=info timestamp=2018-08-02T11:59:58.203867Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:59:58.204447Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T12:02:03.362728Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:02:08.867935Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:02:11 http: TLS handshake error from 10.129.0.1:38804: EOF level=info timestamp=2018-08-02T12:02:11.890025Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:02:12.115708Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:02:21 http: TLS handshake error from 10.129.0.1:38816: EOF level=info timestamp=2018-08-02T12:02:22.132563Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:02:31 http: TLS handshake error from 10.129.0.1:38828: EOF level=info timestamp=2018-08-02T12:02:32.395774Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:02:33.450218Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:02:33.622867Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:02:39.056589Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:02:41 http: TLS handshake error from 10.129.0.1:38840: EOF level=info timestamp=2018-08-02T12:02:42.148431Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:02:42.677109Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 12:00:26 http: TLS handshake error from 10.129.0.1:48402: EOF 2018/08/02 12:00:36 http: TLS handshake error from 10.129.0.1:48414: EOF 2018/08/02 12:00:46 http: TLS handshake error from 10.129.0.1:48426: EOF 2018/08/02 12:00:56 http: TLS handshake error from 10.129.0.1:48438: EOF 2018/08/02 12:01:06 http: TLS handshake error from 10.129.0.1:48450: EOF 2018/08/02 12:01:16 http: TLS handshake error from 10.129.0.1:48462: EOF 2018/08/02 12:01:26 http: TLS handshake error from 10.129.0.1:48474: EOF 2018/08/02 12:01:36 http: TLS handshake error from 10.129.0.1:48486: EOF level=info timestamp=2018-08-02T12:01:42.167111Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:01:46 http: TLS handshake error from 10.129.0.1:48498: EOF 2018/08/02 12:01:56 http: TLS handshake error from 10.129.0.1:48512: EOF 2018/08/02 12:02:06 http: TLS handshake error from 10.129.0.1:48524: EOF 2018/08/02 12:02:16 http: TLS handshake error from 10.129.0.1:48536: EOF 2018/08/02 12:02:26 http: TLS handshake error from 10.129.0.1:48548: EOF 2018/08/02 12:02:36 http: TLS handshake error from 10.129.0.1:48560: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:56:40.634714Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.635931Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmix22nr kind= uid=1d7234b0-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.669527Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.670261Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimpwfh kind= uid=1d773af0-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.695882Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96txx kind= uid=1d7c6824-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.696109Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96txx kind= uid=1d7c6824-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.745646Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.745862Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.921858Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix22nr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix22nr" level=info timestamp=2018-08-02T11:56:40.956738Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.444053Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.844268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96txx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96txx" level=info timestamp=2018-08-02T11:56:42.046284Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif7nvb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif7nvb" level=info timestamp=2018-08-02T11:59:42.564434Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:59:42.565263Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T12:00:40.998456Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6xfsp" level=info timestamp=2018-08-02T12:01:21.960730Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi6xfsp, existing: true\n" level=info timestamp=2018-08-02T12:01:21.962337Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:01:21.962500Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:01:21.963269Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:01:21.970525Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:01:21.971403Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi6xfsp" level=info timestamp=2018-08-02T12:02:23.917473Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmix22nr, existing: false\n" level=info timestamp=2018-08-02T12:02:23.918290Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:02:23.918642Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmix22nr kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T12:02:23.919364Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmix22nr kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T12:02:24.888586Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi96txx, existing: false\n" level=info timestamp=2018-08-02T12:02:24.888809Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:02:24.889274Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi96txx kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T12:02:24.889726Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi96txx kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:59:41.202195Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.202283Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.202380Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.223642Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T11:59:41.223744Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.223869Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.224008Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.949157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif7nvb, existing: false\n" level=info timestamp=2018-08-02T11:59:41.950096Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.951872Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.953116Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T12:02:23.508423Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T12:02:23.508932Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:02:23.509499Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T12:02:23.510832Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi6xfsp-qhtz8 Pod phase: Running level=info timestamp=2018-08-02T11:59:48.073319Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T11:59:48.073925Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[OnDefineDomain:[0xc42031a4c0]]" level=info timestamp=2018-08-02T11:59:48.077866Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T11:59:58.090393Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T11:59:58.201979Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi6xfsp" level=info timestamp=2018-08-02T11:59:58.203867Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T11:59:58.204447Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [181.401 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should successfully start with hook sidecar annotation [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:60 Unexpected Warning event received: testvmi6xfsp,89e3a7b1-964b-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ STEP: Starting a VMI level=info timestamp=2018-08-02T11:59:42.561101Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi6xfsp kind=VirtualMachineInstance uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi6xfsp-qhtz8" level=info timestamp=2018-08-02T11:59:59.408112Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi6xfsp kind=VirtualMachineInstance uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi6xfsp-qhtz8" level=error timestamp=2018-08-02T11:59:59.523758Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi6xfsp kind=VirtualMachineInstance uid=89e3a7b1-964b-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T12:02:11.890025Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:02:12.115708Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:02:21 http: TLS handshake error from 10.129.0.1:38816: EOF level=info timestamp=2018-08-02T12:02:22.132563Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:02:31 http: TLS handshake error from 10.129.0.1:38828: EOF level=info timestamp=2018-08-02T12:02:32.395774Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:02:33.450218Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:02:33.622867Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:02:39.056589Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:02:41 http: TLS handshake error from 10.129.0.1:38840: EOF level=info timestamp=2018-08-02T12:02:42.148431Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:02:42.677109Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:02:51 http: TLS handshake error from 10.129.0.1:38852: EOF level=info timestamp=2018-08-02T12:02:52.947065Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:03:01 http: TLS handshake error from 10.129.0.1:38864: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 12:00:46 http: TLS handshake error from 10.129.0.1:48426: EOF 2018/08/02 12:00:56 http: TLS handshake error from 10.129.0.1:48438: EOF 2018/08/02 12:01:06 http: TLS handshake error from 10.129.0.1:48450: EOF 2018/08/02 12:01:16 http: TLS handshake error from 10.129.0.1:48462: EOF 2018/08/02 12:01:26 http: TLS handshake error from 10.129.0.1:48474: EOF 2018/08/02 12:01:36 http: TLS handshake error from 10.129.0.1:48486: EOF level=info timestamp=2018-08-02T12:01:42.167111Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:01:46 http: TLS handshake error from 10.129.0.1:48498: EOF 2018/08/02 12:01:56 http: TLS handshake error from 10.129.0.1:48512: EOF 2018/08/02 12:02:06 http: TLS handshake error from 10.129.0.1:48524: EOF 2018/08/02 12:02:16 http: TLS handshake error from 10.129.0.1:48536: EOF 2018/08/02 12:02:26 http: TLS handshake error from 10.129.0.1:48548: EOF 2018/08/02 12:02:36 http: TLS handshake error from 10.129.0.1:48560: EOF 2018/08/02 12:02:46 http: TLS handshake error from 10.129.0.1:48572: EOF 2018/08/02 12:02:56 http: TLS handshake error from 10.129.0.1:48584: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:56:40.695882Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96txx kind= uid=1d7c6824-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.696109Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96txx kind= uid=1d7c6824-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.745646Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.745862Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.921858Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix22nr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix22nr" level=info timestamp=2018-08-02T11:56:40.956738Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.444053Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.844268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96txx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96txx" level=info timestamp=2018-08-02T11:56:42.046284Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif7nvb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif7nvb" level=info timestamp=2018-08-02T11:59:42.564434Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:59:42.565263Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.372200Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:02:43.372882Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.607841Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:02:43.638534Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T12:03:01.705591Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:03:01.707883Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:03:01.708440Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:03:01.789021Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiv6ftr, existing: true\n" level=info timestamp=2018-08-02T12:03:01.789240Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:03:01.789330Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:03:01.789560Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:03:01.791687Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:03:01.792106Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:03:01.952598Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiv6ftr, existing: true\n" level=info timestamp=2018-08-02T12:03:01.952754Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:03:01.952871Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:03:01.953105Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:03:01.955908Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:03:01.957991Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:59:41.202195Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.202283Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.202380Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.223642Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T11:59:41.223744Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.223869Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.224008Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.949157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif7nvb, existing: false\n" level=info timestamp=2018-08-02T11:59:41.950096Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.951872Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.953116Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T12:02:23.508423Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T12:02:23.508932Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:02:23.509499Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T12:02:23.510832Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiv6ftr-pcvw6 Pod phase: Running level=info timestamp=2018-08-02T12:02:50.551714Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T12:02:50.552374Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[OnDefineDomain:[0xc4202b1340]]" level=info timestamp=2018-08-02T12:02:50.556267Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T12:03:00.570403Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T12:03:00.685337Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiv6ftr" level=info timestamp=2018-08-02T12:03:00.688490Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T12:03:00.689063Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T12:05:06.308000Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:05:10.606643Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:05:11 http: TLS handshake error from 10.129.0.1:39022: EOF level=info timestamp=2018-08-02T12:05:16.477451Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:05:21 http: TLS handshake error from 10.129.0.1:39034: EOF level=info timestamp=2018-08-02T12:05:27.245386Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:05:31 http: TLS handshake error from 10.129.0.1:39046: EOF level=info timestamp=2018-08-02T12:05:35.397875Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:05:35.456767Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:05:36.167970Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:05:36.173804Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:05:37.517390Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:05:40.736703Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:05:41 http: TLS handshake error from 10.129.0.1:39058: EOF level=info timestamp=2018-08-02T12:05:42.019891Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 12:03:36 http: TLS handshake error from 10.129.0.1:48632: EOF 2018/08/02 12:03:46 http: TLS handshake error from 10.129.0.1:48644: EOF 2018/08/02 12:03:56 http: TLS handshake error from 10.129.0.1:48656: EOF 2018/08/02 12:04:06 http: TLS handshake error from 10.129.0.1:48668: EOF 2018/08/02 12:04:16 http: TLS handshake error from 10.129.0.1:48680: EOF 2018/08/02 12:04:26 http: TLS handshake error from 10.129.0.1:48692: EOF 2018/08/02 12:04:36 http: TLS handshake error from 10.129.0.1:48704: EOF level=info timestamp=2018-08-02T12:04:42.114126Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:04:46 http: TLS handshake error from 10.129.0.1:48716: EOF 2018/08/02 12:04:56 http: TLS handshake error from 10.129.0.1:48730: EOF 2018/08/02 12:05:06 http: TLS handshake error from 10.129.0.1:48742: EOF level=info timestamp=2018-08-02T12:05:12.182494Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:05:16 http: TLS handshake error from 10.129.0.1:48754: EOF 2018/08/02 12:05:26 http: TLS handshake error from 10.129.0.1:48766: EOF 2018/08/02 12:05:36 http: TLS handshake error from 10.129.0.1:48778: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:56:40.695882Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96txx kind= uid=1d7c6824-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.696109Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi96txx kind= uid=1d7c6824-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.745646Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.745862Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.921858Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix22nr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix22nr" level=info timestamp=2018-08-02T11:56:40.956738Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.444053Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.844268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96txx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96txx" level=info timestamp=2018-08-02T11:56:42.046284Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif7nvb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif7nvb" level=info timestamp=2018-08-02T11:59:42.564434Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:59:42.565263Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.372200Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:02:43.372882Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.607841Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:02:43.638534Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T12:03:22.185992Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:03:22.191547Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:03:22.191943Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:03:42.673836Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiv6ftr, existing: true\n" level=info timestamp=2018-08-02T12:03:42.674781Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:03:42.674881Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:03:42.675535Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:03:42.679655Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:03:42.680479Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:04:23.641720Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiv6ftr, existing: true\n" level=info timestamp=2018-08-02T12:04:23.642627Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:04:23.642776Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:04:23.643496Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:04:23.647491Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:04:23.648111Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:59:41.202195Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.202283Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.202380Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.223642Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T11:59:41.223744Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.223869Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.224008Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.949157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif7nvb, existing: false\n" level=info timestamp=2018-08-02T11:59:41.950096Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.951872Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.953116Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T12:02:23.508423Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T12:02:23.508932Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:02:23.509499Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T12:02:23.510832Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiv6ftr-pcvw6 Pod phase: Running level=info timestamp=2018-08-02T12:02:50.551714Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T12:02:50.552374Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[OnDefineDomain:[0xc4202b1340]]" level=info timestamp=2018-08-02T12:02:50.556267Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T12:03:00.570403Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T12:03:00.685337Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiv6ftr" level=info timestamp=2018-08-02T12:03:00.688490Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T12:03:00.689063Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.875 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should call Collect and OnDefineDomain on the hook sidecar [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:67 Unexpected Warning event received: testvmiv6ftr,f5a7e2aa-964b-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ STEP: Getting hook-sidecar logs level=info timestamp=2018-08-02T12:02:43.253866Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiv6ftr kind=VirtualMachineInstance uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiv6ftr-pcvw6" level=info timestamp=2018-08-02T12:03:01.100942Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiv6ftr kind=VirtualMachineInstance uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmiv6ftr-pcvw6" level=error timestamp=2018-08-02T12:03:01.179902Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmiv6ftr kind=VirtualMachineInstance uid=f5a7e2aa-964b-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 12:05:21 http: TLS handshake error from 10.129.0.1:39034: EOF level=info timestamp=2018-08-02T12:05:27.245386Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:05:31 http: TLS handshake error from 10.129.0.1:39046: EOF level=info timestamp=2018-08-02T12:05:35.397875Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:05:35.456767Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:05:36.167970Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:05:36.173804Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:05:37.517390Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:05:40.736703Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:05:41 http: TLS handshake error from 10.129.0.1:39058: EOF level=info timestamp=2018-08-02T12:05:42.019891Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:05:47.726074Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:05:51 http: TLS handshake error from 10.129.0.1:39070: EOF level=info timestamp=2018-08-02T12:05:57.975319Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:06:01 http: TLS handshake error from 10.129.0.1:39082: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 12:03:56 http: TLS handshake error from 10.129.0.1:48656: EOF 2018/08/02 12:04:06 http: TLS handshake error from 10.129.0.1:48668: EOF 2018/08/02 12:04:16 http: TLS handshake error from 10.129.0.1:48680: EOF 2018/08/02 12:04:26 http: TLS handshake error from 10.129.0.1:48692: EOF 2018/08/02 12:04:36 http: TLS handshake error from 10.129.0.1:48704: EOF level=info timestamp=2018-08-02T12:04:42.114126Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:04:46 http: TLS handshake error from 10.129.0.1:48716: EOF 2018/08/02 12:04:56 http: TLS handshake error from 10.129.0.1:48730: EOF 2018/08/02 12:05:06 http: TLS handshake error from 10.129.0.1:48742: EOF level=info timestamp=2018-08-02T12:05:12.182494Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:05:16 http: TLS handshake error from 10.129.0.1:48754: EOF 2018/08/02 12:05:26 http: TLS handshake error from 10.129.0.1:48766: EOF 2018/08/02 12:05:36 http: TLS handshake error from 10.129.0.1:48778: EOF 2018/08/02 12:05:46 http: TLS handshake error from 10.129.0.1:48790: EOF 2018/08/02 12:05:56 http: TLS handshake error from 10.129.0.1:48802: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:56:40.745646Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.745862Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.921858Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix22nr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix22nr" level=info timestamp=2018-08-02T11:56:40.956738Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.444053Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.844268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96txx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96txx" level=info timestamp=2018-08-02T11:56:42.046284Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif7nvb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif7nvb" level=info timestamp=2018-08-02T11:59:42.564434Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:59:42.565263Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.372200Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:02:43.372882Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.607841Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:02:43.638534Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:05:44.349113Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:05:44.349996Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T12:06:02.893307Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:06:02.894821Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:06:02.895306Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2ktrz" level=info timestamp=2018-08-02T12:06:02.975729Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi2ktrz, existing: true\n" level=info timestamp=2018-08-02T12:06:02.975917Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:06:02.975987Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:06:02.976343Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:06:02.978051Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:06:02.978485Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2ktrz" level=info timestamp=2018-08-02T12:06:03.139263Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi2ktrz, existing: true\n" level=info timestamp=2018-08-02T12:06:03.139451Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:06:03.139526Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:06:03.139753Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:06:03.142539Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:06:03.143450Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2ktrz" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:59:41.202195Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.202283Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.202380Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.223642Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T11:59:41.223744Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.223869Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.224008Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.949157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif7nvb, existing: false\n" level=info timestamp=2018-08-02T11:59:41.950096Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.951872Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.953116Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T12:02:23.508423Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T12:02:23.508932Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:02:23.509499Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T12:02:23.510832Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2ktrz-8dp5c Pod phase: Running level=info timestamp=2018-08-02T12:05:50.805859Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T12:05:50.806480Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[OnDefineDomain:[0xc42036e5c0]]" level=info timestamp=2018-08-02T12:05:50.810317Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T12:06:00.823126Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T12:06:00.912405Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi2ktrz" level=info timestamp=2018-08-02T12:06:00.914721Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T12:06:00.915495Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T12:08:10.716244Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:08:11 http: TLS handshake error from 10.129.0.1:39240: EOF level=info timestamp=2018-08-02T12:08:11.788454Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:08:12.035682Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:08:21.010723Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:08:21 http: TLS handshake error from 10.129.0.1:39252: EOF level=info timestamp=2018-08-02T12:08:31.261854Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:08:31 http: TLS handshake error from 10.129.0.1:39264: EOF level=info timestamp=2018-08-02T12:08:36.214769Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:08:36.221268Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:08:37.521510Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:08:37.646781Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:08:41.481455Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:08:41 http: TLS handshake error from 10.129.0.1:39276: EOF level=info timestamp=2018-08-02T12:08:42.296074Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 12:06:36 http: TLS handshake error from 10.129.0.1:48850: EOF 2018/08/02 12:06:46 http: TLS handshake error from 10.129.0.1:48862: EOF 2018/08/02 12:06:56 http: TLS handshake error from 10.129.0.1:48874: EOF 2018/08/02 12:07:06 http: TLS handshake error from 10.129.0.1:48886: EOF level=info timestamp=2018-08-02T12:07:12.198353Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:07:16 http: TLS handshake error from 10.129.0.1:48898: EOF 2018/08/02 12:07:26 http: TLS handshake error from 10.129.0.1:48910: EOF 2018/08/02 12:07:36 http: TLS handshake error from 10.129.0.1:48922: EOF 2018/08/02 12:07:46 http: TLS handshake error from 10.129.0.1:48934: EOF 2018/08/02 12:07:56 http: TLS handshake error from 10.129.0.1:48948: EOF 2018/08/02 12:08:06 http: TLS handshake error from 10.129.0.1:48960: EOF 2018/08/02 12:08:16 http: TLS handshake error from 10.129.0.1:48972: EOF 2018/08/02 12:08:26 http: TLS handshake error from 10.129.0.1:48984: EOF 2018/08/02 12:08:36 http: TLS handshake error from 10.129.0.1:48996: EOF level=info timestamp=2018-08-02T12:08:42.146478Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:56:40.745646Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:56:40.745862Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif7nvb kind= uid=1d80251d-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T11:56:40.921858Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmix22nr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmix22nr" level=info timestamp=2018-08-02T11:56:40.956738Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.444053Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.844268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96txx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96txx" level=info timestamp=2018-08-02T11:56:42.046284Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif7nvb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif7nvb" level=info timestamp=2018-08-02T11:59:42.564434Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:59:42.565263Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.372200Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:02:43.372882Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.607841Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:02:43.638534Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:05:44.349113Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:05:44.349996Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T12:06:23.334076Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:06:23.337909Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:06:23.338659Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2ktrz" level=info timestamp=2018-08-02T12:06:43.819708Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi2ktrz, existing: true\n" level=info timestamp=2018-08-02T12:06:43.820034Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:06:43.820112Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:06:43.820495Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:06:43.824381Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:06:43.824882Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2ktrz" level=info timestamp=2018-08-02T12:07:24.786096Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi2ktrz, existing: true\n" level=info timestamp=2018-08-02T12:07:24.786959Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:07:24.787059Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:07:24.787667Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:07:24.792357Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:07:24.793367Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmi2ktrz" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:59:41.202195Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.202283Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.202380Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.223642Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T11:59:41.223744Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.223869Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.224008Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.949157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif7nvb, existing: false\n" level=info timestamp=2018-08-02T11:59:41.950096Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.951872Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.953116Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T12:02:23.508423Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T12:02:23.508932Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:02:23.509499Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T12:02:23.510832Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmi2ktrz-8dp5c Pod phase: Running level=info timestamp=2018-08-02T12:05:50.805859Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T12:05:50.806480Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[OnDefineDomain:[0xc42036e5c0]]" level=info timestamp=2018-08-02T12:05:50.810317Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T12:06:00.823126Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T12:06:00.912405Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmi2ktrz" level=info timestamp=2018-08-02T12:06:00.914721Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T12:06:00.915495Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure [180.995 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should update domain XML with SM BIOS properties [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:83 Unexpected Warning event received: testvmi2ktrz,61864ca6-964c-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ STEP: Reading domain XML using virsh level=info timestamp=2018-08-02T12:05:44.226572Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi2ktrz kind=VirtualMachineInstance uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmi2ktrz-8dp5c" level=info timestamp=2018-08-02T12:06:02.274046Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmi2ktrz kind=VirtualMachineInstance uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmi2ktrz-8dp5c" level=error timestamp=2018-08-02T12:06:02.343504Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmi2ktrz kind=VirtualMachineInstance uid=61864ca6-964c-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running level=info timestamp=2018-08-02T12:08:11.788454Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:08:12.035682Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:08:21.010723Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:08:21 http: TLS handshake error from 10.129.0.1:39252: EOF level=info timestamp=2018-08-02T12:08:31.261854Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:08:31 http: TLS handshake error from 10.129.0.1:39264: EOF level=info timestamp=2018-08-02T12:08:36.214769Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:08:36.221268Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:08:37.521510Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:08:37.646781Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:08:41.481455Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:08:41 http: TLS handshake error from 10.129.0.1:39276: EOF level=info timestamp=2018-08-02T12:08:42.296074Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:08:51 http: TLS handshake error from 10.129.0.1:39288: EOF level=info timestamp=2018-08-02T12:08:51.705865Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 12:06:56 http: TLS handshake error from 10.129.0.1:48874: EOF 2018/08/02 12:07:06 http: TLS handshake error from 10.129.0.1:48886: EOF level=info timestamp=2018-08-02T12:07:12.198353Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:07:16 http: TLS handshake error from 10.129.0.1:48898: EOF 2018/08/02 12:07:26 http: TLS handshake error from 10.129.0.1:48910: EOF 2018/08/02 12:07:36 http: TLS handshake error from 10.129.0.1:48922: EOF 2018/08/02 12:07:46 http: TLS handshake error from 10.129.0.1:48934: EOF 2018/08/02 12:07:56 http: TLS handshake error from 10.129.0.1:48948: EOF 2018/08/02 12:08:06 http: TLS handshake error from 10.129.0.1:48960: EOF 2018/08/02 12:08:16 http: TLS handshake error from 10.129.0.1:48972: EOF 2018/08/02 12:08:26 http: TLS handshake error from 10.129.0.1:48984: EOF 2018/08/02 12:08:36 http: TLS handshake error from 10.129.0.1:48996: EOF level=info timestamp=2018-08-02T12:08:42.146478Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:08:46 http: TLS handshake error from 10.129.0.1:49008: EOF 2018/08/02 12:08:56 http: TLS handshake error from 10.129.0.1:49020: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:56:40.956738Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.444053Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.844268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96txx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96txx" level=info timestamp=2018-08-02T11:56:42.046284Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif7nvb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif7nvb" level=info timestamp=2018-08-02T11:59:42.564434Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:59:42.565263Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.372200Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:02:43.372882Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.607841Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:02:43.638534Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:05:44.349113Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:05:44.349996Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:08:45.372413Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:08:45.373842Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:08:45.596536Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizsrgc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizsrgc" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T12:09:00.181433Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:09:00.183847Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:09:00.184224Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmizsrgc" level=info timestamp=2018-08-02T12:09:00.264683Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmizsrgc, existing: true\n" level=info timestamp=2018-08-02T12:09:00.264858Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:09:00.264931Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:09:00.265322Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:09:00.267940Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:09:00.268404Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmizsrgc" level=info timestamp=2018-08-02T12:09:00.428851Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmizsrgc, existing: true\n" level=info timestamp=2018-08-02T12:09:00.429095Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:09:00.429232Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:09:00.429507Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:09:00.431849Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:09:00.432306Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmizsrgc" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:59:41.202195Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.202283Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.202380Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.223642Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T11:59:41.223744Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.223869Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.224008Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.949157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif7nvb, existing: false\n" level=info timestamp=2018-08-02T11:59:41.950096Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.951872Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.953116Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T12:02:23.508423Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T12:02:23.508932Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:02:23.509499Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T12:02:23.510832Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmizsrgc-48gll Pod phase: Running level=info timestamp=2018-08-02T12:08:49.524460Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T12:08:49.525033Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T12:08:49.527933Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T12:08:59.564643Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T12:08:59.760708Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmizsrgc" level=info timestamp=2018-08-02T12:08:59.766388Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T12:08:59.766849Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 12:11:11 http: TLS handshake error from 10.129.0.1:39458: EOF level=info timestamp=2018-08-02T12:11:13.710993Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:11:14.780794Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:11:21 http: TLS handshake error from 10.129.0.1:39470: EOF level=info timestamp=2018-08-02T12:11:25.011085Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:11:31 http: TLS handshake error from 10.129.0.1:39482: EOF level=info timestamp=2018-08-02T12:11:35.147718Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:11:35.899638Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:11:35.933821Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:11:39.264655Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:11:39.380610Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:11:41 http: TLS handshake error from 10.129.0.1:39494: EOF level=info timestamp=2018-08-02T12:11:42.023865Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:11:43.803964Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:11:45.345490Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 12:09:46 http: TLS handshake error from 10.129.0.1:49080: EOF 2018/08/02 12:09:56 http: TLS handshake error from 10.129.0.1:49092: EOF 2018/08/02 12:10:06 http: TLS handshake error from 10.129.0.1:49104: EOF level=info timestamp=2018-08-02T12:10:09.559252Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:10:11.138929Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:10:16 http: TLS handshake error from 10.129.0.1:49116: EOF 2018/08/02 12:10:26 http: TLS handshake error from 10.129.0.1:49128: EOF 2018/08/02 12:10:36 http: TLS handshake error from 10.129.0.1:49140: EOF 2018/08/02 12:10:46 http: TLS handshake error from 10.129.0.1:49152: EOF 2018/08/02 12:10:56 http: TLS handshake error from 10.129.0.1:49166: EOF 2018/08/02 12:11:06 http: TLS handshake error from 10.129.0.1:49178: EOF level=info timestamp=2018-08-02T12:11:12.192180Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:11:16 http: TLS handshake error from 10.129.0.1:49190: EOF 2018/08/02 12:11:26 http: TLS handshake error from 10.129.0.1:49202: EOF 2018/08/02 12:11:36 http: TLS handshake error from 10.129.0.1:49214: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:56:40.956738Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.444053Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimpwfh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimpwfh" level=info timestamp=2018-08-02T11:56:41.844268Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi96txx\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi96txx" level=info timestamp=2018-08-02T11:56:42.046284Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif7nvb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif7nvb" level=info timestamp=2018-08-02T11:59:42.564434Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:59:42.565263Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.372200Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:02:43.372882Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.607841Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:02:43.638534Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:05:44.349113Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:05:44.349996Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:08:45.372413Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:08:45.373842Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:08:45.596536Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizsrgc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizsrgc" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T12:09:41.105009Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:09:41.109226Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:09:41.109945Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmizsrgc" level=info timestamp=2018-08-02T12:10:22.071438Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmizsrgc, existing: true\n" level=info timestamp=2018-08-02T12:10:22.072239Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:10:22.072595Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:10:22.073197Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:10:22.081225Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:10:22.082119Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmizsrgc" level=info timestamp=2018-08-02T12:11:44.005618Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmizsrgc, existing: true\n" level=info timestamp=2018-08-02T12:11:44.007710Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:11:44.007885Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:11:44.008963Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:11:44.019378Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:11:44.021252Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmizsrgc" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:59:41.202195Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.202283Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.202380Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.223642Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T11:59:41.223744Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.223869Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.224008Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.949157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif7nvb, existing: false\n" level=info timestamp=2018-08-02T11:59:41.950096Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.951872Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.953116Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T12:02:23.508423Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T12:02:23.508932Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:02:23.509499Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T12:02:23.510832Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmizsrgc-48gll Pod phase: Running level=info timestamp=2018-08-02T12:08:49.524460Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T12:08:49.525033Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T12:08:49.527933Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T12:08:59.564643Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T12:08:59.760708Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmizsrgc" level=info timestamp=2018-08-02T12:08:59.766388Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T12:08:59.766849Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [181.031 seconds] VNC /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:54 with VNC connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:62 should allow accessing the VNC device /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:64 Unexpected Warning event received: testvmizsrgc,cd6e439b-964c-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T12:08:45.333560Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmizsrgc kind=VirtualMachineInstance uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmizsrgc-48gll" level=info timestamp=2018-08-02T12:08:59.564541Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmizsrgc kind=VirtualMachineInstance uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmizsrgc-48gll" level=error timestamp=2018-08-02T12:08:59.609342Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmizsrgc kind=VirtualMachineInstance uid=cd6e439b-964c-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23771/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 12:11:21 http: TLS handshake error from 10.129.0.1:39470: EOF level=info timestamp=2018-08-02T12:11:25.011085Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:11:31 http: TLS handshake error from 10.129.0.1:39482: EOF level=info timestamp=2018-08-02T12:11:35.147718Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:11:35.899638Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:11:35.933821Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:11:39.264655Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:11:39.380610Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:11:41 http: TLS handshake error from 10.129.0.1:39494: EOF level=info timestamp=2018-08-02T12:11:42.023865Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:11:43.803964Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:11:45.345490Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:11:51 http: TLS handshake error from 10.129.0.1:39506: EOF level=info timestamp=2018-08-02T12:11:55.616106Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:12:01 http: TLS handshake error from 10.129.0.1:39518: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 12:10:06 http: TLS handshake error from 10.129.0.1:49104: EOF level=info timestamp=2018-08-02T12:10:09.559252Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 level=info timestamp=2018-08-02T12:10:11.138929Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:10:16 http: TLS handshake error from 10.129.0.1:49116: EOF 2018/08/02 12:10:26 http: TLS handshake error from 10.129.0.1:49128: EOF 2018/08/02 12:10:36 http: TLS handshake error from 10.129.0.1:49140: EOF 2018/08/02 12:10:46 http: TLS handshake error from 10.129.0.1:49152: EOF 2018/08/02 12:10:56 http: TLS handshake error from 10.129.0.1:49166: EOF 2018/08/02 12:11:06 http: TLS handshake error from 10.129.0.1:49178: EOF level=info timestamp=2018-08-02T12:11:12.192180Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:11:16 http: TLS handshake error from 10.129.0.1:49190: EOF 2018/08/02 12:11:26 http: TLS handshake error from 10.129.0.1:49202: EOF 2018/08/02 12:11:36 http: TLS handshake error from 10.129.0.1:49214: EOF 2018/08/02 12:11:46 http: TLS handshake error from 10.129.0.1:49226: EOF 2018/08/02 12:11:56 http: TLS handshake error from 10.129.0.1:49238: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:56:42.046284Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif7nvb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif7nvb" level=info timestamp=2018-08-02T11:59:42.564434Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:59:42.565263Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.372200Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:02:43.372882Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.607841Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:02:43.638534Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:05:44.349113Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:05:44.349996Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:08:45.372413Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:08:45.373842Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:08:45.596536Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizsrgc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizsrgc" level=info timestamp=2018-08-02T12:11:46.491486Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:11:46.497529Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:11:46.885636Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifrbxf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifrbxf" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T12:12:03.045128Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:12:03.047670Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:12:03.048047Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmifrbxf" level=info timestamp=2018-08-02T12:12:03.128590Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmifrbxf, existing: true\n" level=info timestamp=2018-08-02T12:12:03.128789Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:12:03.128898Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:12:03.129257Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:12:03.131839Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:12:03.132325Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmifrbxf" level=info timestamp=2018-08-02T12:12:03.292755Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmifrbxf, existing: true\n" level=info timestamp=2018-08-02T12:12:03.292952Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:12:03.293031Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:12:03.293349Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:12:03.296042Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:12:03.296541Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmifrbxf" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:59:41.202195Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.202283Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.202380Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.223642Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T11:59:41.223744Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.223869Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.224008Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.949157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif7nvb, existing: false\n" level=info timestamp=2018-08-02T11:59:41.950096Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.951872Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.953116Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T12:02:23.508423Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T12:02:23.508932Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:02:23.509499Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T12:02:23.510832Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmifrbxf-8cb69 Pod phase: Running level=info timestamp=2018-08-02T12:11:51.060373Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T12:11:51.060669Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T12:11:51.062909Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T12:12:01.075692Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T12:12:01.185532Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmifrbxf" level=info timestamp=2018-08-02T12:12:01.192904Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T12:12:01.194546Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 12:14:01 http: TLS handshake error from 10.129.0.1:39664: EOF level=info timestamp=2018-08-02T12:14:08.685376Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:14:10.824436Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:14:10.825951Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:14:11 http: TLS handshake error from 10.129.0.1:39676: EOF level=info timestamp=2018-08-02T12:14:15.191489Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:14:18.788885Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:14:21 http: TLS handshake error from 10.129.0.1:39688: EOF level=info timestamp=2018-08-02T12:14:28.881679Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:14:31 http: TLS handshake error from 10.129.0.1:39700: EOF level=info timestamp=2018-08-02T12:14:39.013219Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:14:41.038835Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:14:41.080780Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:14:41 http: TLS handshake error from 10.129.0.1:39712: EOF level=info timestamp=2018-08-02T12:14:45.478757Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 12:12:46 http: TLS handshake error from 10.129.0.1:49298: EOF 2018/08/02 12:12:56 http: TLS handshake error from 10.129.0.1:49310: EOF 2018/08/02 12:13:06 http: TLS handshake error from 10.129.0.1:49322: EOF level=info timestamp=2018-08-02T12:13:12.295636Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:13:16 http: TLS handshake error from 10.129.0.1:49334: EOF 2018/08/02 12:13:26 http: TLS handshake error from 10.129.0.1:49346: EOF 2018/08/02 12:13:36 http: TLS handshake error from 10.129.0.1:49358: EOF 2018/08/02 12:13:46 http: TLS handshake error from 10.129.0.1:49370: EOF 2018/08/02 12:13:56 http: TLS handshake error from 10.129.0.1:49384: EOF 2018/08/02 12:14:06 http: TLS handshake error from 10.129.0.1:49396: EOF level=info timestamp=2018-08-02T12:14:12.193699Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:14:16 http: TLS handshake error from 10.129.0.1:49408: EOF 2018/08/02 12:14:26 http: TLS handshake error from 10.129.0.1:49420: EOF 2018/08/02 12:14:36 http: TLS handshake error from 10.129.0.1:49432: EOF level=info timestamp=2018-08-02T12:14:42.206014Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T11:56:42.046284Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif7nvb\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif7nvb" level=info timestamp=2018-08-02T11:59:42.564434Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T11:59:42.565263Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6xfsp kind= uid=89e3a7b1-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.372200Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:02:43.372882Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.607841Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:02:43.638534Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:05:44.349113Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:05:44.349996Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:08:45.372413Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:08:45.373842Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:08:45.596536Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizsrgc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizsrgc" level=info timestamp=2018-08-02T12:11:46.491486Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:11:46.497529Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:11:46.885636Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifrbxf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifrbxf" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T12:12:43.970227Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:12:43.970344Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:12:43.970709Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:12:43.975914Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:12:43.982048Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/14885/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmifrbxf" level=info timestamp=2018-08-02T12:13:24.945096Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmifrbxf, existing: true\n" level=info timestamp=2018-08-02T12:13:24.945939Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:13:24.946044Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:13:24.946746Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:13:24.952905Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:13:24.953711Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23770/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmifrbxf" level=info timestamp=2018-08-02T12:14:27.862665Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmizsrgc, existing: false\n" level=info timestamp=2018-08-02T12:14:27.863327Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:14:27.863817Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmizsrgc kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T12:14:27.864384Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmizsrgc kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:59:41.202195Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.202283Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.202380Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.223642Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T11:59:41.223744Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.223869Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.224008Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.949157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif7nvb, existing: false\n" level=info timestamp=2018-08-02T11:59:41.950096Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.951872Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.953116Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T12:02:23.508423Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T12:02:23.508932Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:02:23.509499Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T12:02:23.510832Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmifrbxf-8cb69 Pod phase: Running level=info timestamp=2018-08-02T12:11:51.060373Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T12:11:51.060669Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T12:11:51.062909Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T12:12:01.075692Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T12:12:01.185532Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmifrbxf" level=info timestamp=2018-08-02T12:12:01.192904Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T12:12:01.194546Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" • Failure in Spec Setup (BeforeEach) [181.010 seconds] VNC /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:54 should upgrade subresource connections if an origin header is given [BeforeEach] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 for vnc /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 Unexpected Warning event received: testvmifrbxf,39594b3c-964d-11e8-bba8-525500d15501: Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt Expected : Warning not to equal : Warning /root/go/src/kubevirt.io/kubevirt/tests/utils.go:247 ------------------------------ level=info timestamp=2018-08-02T12:11:46.542108Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmifrbxf kind=VirtualMachineInstance uid=39594b3c-964d-11e8-bba8-525500d15501 msg="Created virtual machine pod virt-launcher-testvmifrbxf-8cb69" level=info timestamp=2018-08-02T12:12:02.403610Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmifrbxf kind=VirtualMachineInstance uid=39594b3c-964d-11e8-bba8-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmifrbxf-8cb69" level=error timestamp=2018-08-02T12:12:02.483935Z pos=utils.go:243 component=tests namespace=kubevirt-test-default name=testvmifrbxf kind=VirtualMachineInstance uid=39594b3c-964d-11e8-bba8-525500d15501 reason="unexpected warning event received" msg="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23760/ns/mnt" Pod name: disks-images-provider-7b7bs Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-g5zxr Pod phase: Running copy all images to host mount directory Pod name: virt-api-7d79764579-x6llj Pod phase: Running 2018/08/02 12:14:11 http: TLS handshake error from 10.129.0.1:39676: EOF level=info timestamp=2018-08-02T12:14:15.191489Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:14:18.788885Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:14:21 http: TLS handshake error from 10.129.0.1:39688: EOF level=info timestamp=2018-08-02T12:14:28.881679Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:14:31 http: TLS handshake error from 10.129.0.1:39700: EOF level=info timestamp=2018-08-02T12:14:39.013219Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:14:41.038835Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:14:41.080780Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:14:41 http: TLS handshake error from 10.129.0.1:39712: EOF level=info timestamp=2018-08-02T12:14:45.478757Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-02T12:14:49.253528Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:14:51 http: TLS handshake error from 10.129.0.1:39724: EOF level=info timestamp=2018-08-02T12:14:59.373267Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/02 12:15:01 http: TLS handshake error from 10.129.0.1:39736: EOF Pod name: virt-api-7d79764579-x6tjd Pod phase: Running 2018/08/02 12:13:06 http: TLS handshake error from 10.129.0.1:49322: EOF level=info timestamp=2018-08-02T12:13:12.295636Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:13:16 http: TLS handshake error from 10.129.0.1:49334: EOF 2018/08/02 12:13:26 http: TLS handshake error from 10.129.0.1:49346: EOF 2018/08/02 12:13:36 http: TLS handshake error from 10.129.0.1:49358: EOF 2018/08/02 12:13:46 http: TLS handshake error from 10.129.0.1:49370: EOF 2018/08/02 12:13:56 http: TLS handshake error from 10.129.0.1:49384: EOF 2018/08/02 12:14:06 http: TLS handshake error from 10.129.0.1:49396: EOF level=info timestamp=2018-08-02T12:14:12.193699Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:14:16 http: TLS handshake error from 10.129.0.1:49408: EOF 2018/08/02 12:14:26 http: TLS handshake error from 10.129.0.1:49420: EOF 2018/08/02 12:14:36 http: TLS handshake error from 10.129.0.1:49432: EOF level=info timestamp=2018-08-02T12:14:42.206014Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/02 12:14:46 http: TLS handshake error from 10.129.0.1:49444: EOF 2018/08/02 12:14:56 http: TLS handshake error from 10.129.0.1:49456: EOF Pod name: virt-controller-7d57d96b65-dx6hk Pod phase: Running level=info timestamp=2018-08-02T12:02:43.372882Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiv6ftr kind= uid=f5a7e2aa-964b-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:02:43.607841Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:02:43.638534Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiv6ftr\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiv6ftr" level=info timestamp=2018-08-02T12:05:44.349113Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:05:44.349996Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi2ktrz kind= uid=61864ca6-964c-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:08:45.372413Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:08:45.373842Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmizsrgc kind= uid=cd6e439b-964c-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:08:45.596536Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmizsrgc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmizsrgc" level=info timestamp=2018-08-02T12:11:46.491486Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:11:46.497529Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmifrbxf kind= uid=39594b3c-964d-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:11:46.885636Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifrbxf\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifrbxf" level=info timestamp=2018-08-02T12:14:47.300497Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmit45z9 kind= uid=a5253021-964d-11e8-bba8-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-02T12:14:47.301818Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmit45z9 kind= uid=a5253021-964d-11e8-bba8-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-02T12:14:47.427088Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmit45z9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmit45z9" level=info timestamp=2018-08-02T12:14:47.553913Z pos=vmi.go:157 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmit45z9\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmit45z9" Pod name: virt-controller-7d57d96b65-x56j2 Pod phase: Running level=info timestamp=2018-08-02T10:45:20.900286Z pos=application.go:177 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-handler-blk8f Pod phase: Running level=info timestamp=2018-08-02T12:15:03.078118Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmit45z9 kind= uid=a5253021-964d-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:15:03.081440Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmit45z9 kind= uid=a5253021-964d-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:15:03.082317Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23767/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmit45z9" level=info timestamp=2018-08-02T12:15:03.125459Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmit45z9, existing: true\n" level=info timestamp=2018-08-02T12:15:03.125622Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:15:03.125737Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:15:03.125956Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmit45z9 kind= uid=a5253021-964d-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:15:03.129064Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmit45z9 kind= uid=a5253021-964d-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:15:03.129630Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmit45z9" level=info timestamp=2018-08-02T12:15:03.210272Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmit45z9, existing: true\n" level=info timestamp=2018-08-02T12:15:03.210411Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-02T12:15:03.210501Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:15:03.210791Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmit45z9 kind= uid=a5253021-964d-11e8-bba8-525500d15501 msg="Processing vmi update" level=error timestamp=2018-08-02T12:15:03.213642Z pos=vm.go:426 component=virt-handler namespace=kubevirt-test-default name=testvmit45z9 kind= uid=a5253021-964d-11e8-bba8-525500d15501 reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="Synchronizing the VirtualMachineInstance failed." level=info timestamp=2018-08-02T12:15:03.214920Z pos=vm.go:253 component=virt-handler reason="Failed to open current namespace: Error detecting namespace type from path: /proc/23708/task/23708/ns/mnt" msg="re-enqueuing VirtualMachineInstance kubevirt-test-default/testvmit45z9" Pod name: virt-handler-zmfm7 Pod phase: Running level=info timestamp=2018-08-02T11:59:41.202195Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.202283Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.202380Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.223642Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T11:59:41.223744Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.223869Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.224008Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T11:59:41.949157Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif7nvb, existing: false\n" level=info timestamp=2018-08-02T11:59:41.950096Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T11:59:41.951872Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T11:59:41.953116Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif7nvb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-02T12:02:23.508423Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmimpwfh, existing: false\n" level=info timestamp=2018-08-02T12:02:23.508932Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-02T12:02:23.509499Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-02T12:02:23.510832Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmimpwfh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmit45z9-vgwwm Pod phase: Running level=info timestamp=2018-08-02T12:14:52.293615Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-02T12:14:52.294609Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-02T12:14:52.302627Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" level=info timestamp=2018-08-02T12:15:02.319344Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-02T12:15:02.425420Z pos=virt-launcher.go:143 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmit45z9" level=info timestamp=2018-08-02T12:15:02.428633Z pos=client.go:152 component=virt-launcher msg="Registered libvirt event notify callback" level=info timestamp=2018-08-02T12:15:02.429531Z pos=virt-launcher.go:60 component=virt-launcher msg="Marked as ready" panic: test timed out after 1h30m0s goroutine 15500 [running]: testing.(*M).startAlarm.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1240 +0xfc created by time.goFunc /gimme/.gimme/versions/go1.10.linux.amd64/src/time/sleep.go:172 +0x44 goroutine 1 [chan receive, 90 minutes]: testing.(*T).Run(0xc4204c5e00, 0x139e775, 0x9, 0x1430cc8, 0x4801e6) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:825 +0x301 testing.runTests.func1(0xc4204c5d10) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1063 +0x64 testing.tRunner(0xc4204c5d10, 0xc420123df8) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 testing.runTests(0xc4204084c0, 0x1d32a50, 0x1, 0x1, 0x412009) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1061 +0x2c4 testing.(*M).Run(0xc4206dec00, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:978 +0x171 main.main() _testmain.go:44 +0x151 goroutine 5 [chan receive]: kubevirt.io/kubevirt/vendor/github.com/golang/glog.(*loggingT).flushDaemon(0x1d5e280) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:879 +0x8b created by kubevirt.io/kubevirt/vendor/github.com/golang/glog.init.0 /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:410 +0x203 goroutine 6 [syscall, 90 minutes]: os/signal.signal_recv(0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/sigqueue.go:139 +0xa6 os/signal.loop() /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:22 +0x22 created by os/signal.init.0 /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:28 +0x41 goroutine 52 [sleep]: time.Sleep(0x7a3572b) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/time.go:102 +0x166 kubevirt.io/kubevirt/vendor/k8s.io/client-go/util/flowcontrol.realClock.Sleep(0x7a3572b) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/util/flowcontrol/throttle.go:66 +0x2b kubevirt.io/kubevirt/vendor/k8s.io/client-go/util/flowcontrol.(*tokenBucketRateLimiter).Accept(0xc420694f00) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/util/flowcontrol/throttle.go:91 +0xbd kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest.(*Request).tryThrottle(0xc4210cac00) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest/request.go:478 +0x1fd kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest.(*Request).Do(0xc4210cac00, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest/request.go:733 +0x62 kubevirt.io/kubevirt/pkg/kubecli.(*vmis).Get(0xc420771740, 0xc420b265f0, 0xc, 0xc420ad0840, 0xc420771740, 0xc4202cd000, 0x8) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:369 +0x125 kubevirt.io/kubevirt/tests.waitForVMIStart.func1(0x0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1055 +0xc2 reflect.Value.call(0x118b3e0, 0xc420882030, 0x13, 0x1398830, 0x4, 0xc420e28d10, 0x0, 0x0, 0x118b3e0, 0x118b3e0, ...) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:447 +0x969 reflect.Value.Call(0x118b3e0, 0xc420882030, 0x13, 0xc420e28d10, 0x0, 0x0, 0x44b21b, 0xc420aa8ff8, 0xc420e28d48) /gimme/.gimme/versions/go1.10.linux.amd64/src/reflect/value.go:308 +0xa4 kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion.(*AsyncAssertion).pollActual(0xc420a80300, 0x0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go:71 +0x9f kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion.(*AsyncAssertion).match(0xc420a80300, 0x14c4b20, 0xc42039beb0, 0x412801, 0xc42039bef0, 0x1, 0x1, 0xc42039bef0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go:141 +0x305 kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion.(*AsyncAssertion).Should(0xc420a80300, 0x14c4b20, 0xc42039beb0, 0xc42039bef0, 0x1, 0x1, 0x14bbac0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go:48 +0x62 kubevirt.io/kubevirt/tests.waitForVMIStart(0x14ba840, 0xc42012a000, 0x5a, 0x0, 0x0, 0x1d7c901) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1065 +0x703 kubevirt.io/kubevirt/tests.WaitForSuccessfulVMIStart(0x14ba840, 0xc42012a000, 0x1d7c938, 0x0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1088 +0x43 kubevirt.io/kubevirt/tests_test.glob..func23.1.1() /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:59 +0x20b kubevirt.io/kubevirt/tests.BeforeAll.func1() /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1375 +0x3f kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).runSync(0xc4205da900, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:113 +0x9c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).run(0xc4205da900, 0xc420a1bee0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:64 +0x13e kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*SetupNode).Run(0xc420146168, 0x14b6ce0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/setup_nodes.go:15 +0x7f kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).runSample(0xc4208bcd20, 0x0, 0x14b6ce0, 0xc4200c74c0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:181 +0x1f1 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).Run(0xc4208bcd20, 0x14b6ce0, 0xc4200c74c0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:138 +0xff kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpec(0xc420734280, 0xc4208bcd20, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:200 +0x10d kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpecs(0xc420734280, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:170 +0x329 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run(0xc420734280, 0xb) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:66 +0x11b kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite.(*Suite).Run(0xc4200ecaf0, 0x7fca9adf22e0, 0xc4204c5e00, 0x13a0d58, 0xb, 0xc420408520, 0x2, 0x2, 0x14d35e0, 0xc4200c74c0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite/suite.go:62 +0x27c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithCustomReporters(0x14b7d40, 0xc4204c5e00, 0x13a0d58, 0xb, 0xc420408500, 0x2, 0x2, 0x2) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:221 +0x258 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithDefaultAndCustomReporters(0x14b7d40, 0xc4204c5e00, 0x13a0d58, 0xb, 0xc420530a20, 0x1, 0x1, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:209 +0xab kubevirt.io/kubevirt/tests_test.TestTests(0xc4204c5e00) /root/go/src/kubevirt.io/kubevirt/tests/tests_suite_test.go:43 +0xaa testing.tRunner(0xc4204c5e00, 0x1430cc8) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 created by testing.(*T).Run /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:824 +0x2e0 goroutine 53 [chan receive, 90 minutes]: kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).registerForInterrupts(0xc420734280, 0xc420048840) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:223 +0xd1 created by kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:60 +0x88 goroutine 54 [select, 90 minutes, locked to thread]: runtime.gopark(0x1432ea0, 0x0, 0x139b297, 0x6, 0x18, 0x1) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/proc.go:291 +0x11a runtime.selectgo(0xc420489750, 0xc420048900) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/select.go:392 +0xe50 runtime.ensureSigM.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/signal_unix.go:549 +0x1f4 runtime.goexit() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/asm_amd64.s:2361 +0x1 goroutine 32 [IO wait]: internal/poll.runtime_pollWait(0x7fca9add0f00, 0x72, 0xc420f7d850) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/netpoll.go:173 +0x57 internal/poll.(*pollDesc).wait(0xc420712398, 0x72, 0xffffffffffffff00, 0x14b8f00, 0x1c497d0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:85 +0x9b internal/poll.(*pollDesc).waitRead(0xc420712398, 0xc42094c000, 0x8000, 0x8000) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:90 +0x3d internal/poll.(*FD).Read(0xc420712380, 0xc42094c000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_unix.go:157 +0x17d net.(*netFD).Read(0xc420712380, 0xc42094c000, 0x8000, 0x8000, 0x0, 0x8, 0x7ffb) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/fd_unix.go:202 +0x4f net.(*conn).Read(0xc42000eaf8, 0xc42094c000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/net.go:176 +0x6a crypto/tls.(*block).readFromUntil(0xc4205930b0, 0x7fca9acec5a0, 0xc42000eaf8, 0x5, 0xc42000eaf8, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:493 +0x96 crypto/tls.(*Conn).readRecord(0xc42071e380, 0x1432f17, 0xc42071e4a0, 0x20) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:595 +0xe0 crypto/tls.(*Conn).Read(0xc42071e380, 0xc4206bb000, 0x1000, 0x1000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:1156 +0x100 bufio.(*Reader).Read(0xc4209643c0, 0xc4203d4498, 0x9, 0x9, 0xc4208a1a98, 0xc4206b6780, 0xc420f7dd10) /gimme/.gimme/versions/go1.10.linux.amd64/src/bufio/bufio.go:216 +0x238 io.ReadAtLeast(0x14b5ae0, 0xc4209643c0, 0xc4203d4498, 0x9, 0x9, 0x9, 0xc420f7dce0, 0xc420f7dce0, 0x406614) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:309 +0x86 io.ReadFull(0x14b5ae0, 0xc4209643c0, 0xc4203d4498, 0x9, 0x9, 0xc4208a1a40, 0xc420f7dd10, 0xc400007301) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:327 +0x58 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.readFrameHeader(0xc4203d4498, 0x9, 0x9, 0x14b5ae0, 0xc4209643c0, 0x0, 0xc400000000, 0x7ef9ad, 0xc420f7dfb0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:237 +0x7b kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Framer).ReadFrame(0xc4203d4460, 0xc4204ae210, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:492 +0xa4 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*clientConnReadLoop).run(0xc420f7dfb0, 0x1431c20, 0xc4204837b0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1428 +0x8e kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*ClientConn).readLoop(0xc42071c4e0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1354 +0x76 created by kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Transport).newClientConn /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:579 +0x651 goroutine 106 [chan send, 89 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420b14de0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 3746 [chan send, 68 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420708000) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 8013 [chan send, 44 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4208d3740) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 1815 [chan send, 80 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4207a9b00) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 2850 [chan send, 74 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4206e5080) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 4262 [chan send, 65 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4208e9200) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 1261 [chan send, 83 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4207ea450) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 671 [chan send, 86 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4208e9980) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 6410 [chan send, 53 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4206e42a0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 2307 [chan send, 77 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc42086b7a0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 4837 [chan send, 62 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4208e95c0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 9124 [chan send, 38 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4204af500) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 6926 [chan send, 50 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420878780) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 5344 [chan send, 59 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4206e5e60) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 15317 [chan send, 2 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4208f03f0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 11210 [chan send, 26 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4208f0780) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 5894 [chan send, 56 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc42038ba10) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 10121 [chan send, 32 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4208caf00) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 13796 [chan send, 11 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420443020) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 14812 [chan send, 5 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4206e5bf0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 8562 [chan send, 41 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4208e97a0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 7456 [chan send, 47 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420a29020) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 9637 [chan send, 35 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4208e8d80) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 10638 [chan send, 29 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4202cfec0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 10673 [chan send, 14 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4208e8210) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 12780 [chan send, 17 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420b146c0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 11722 [chan send, 23 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420708150) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 12268 [chan send, 20 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4207ea9f0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 14309 [chan send, 8 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4208d38c0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 make: *** [functest] Error 2 + make cluster-down ./cluster/down.sh