+ export WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + WORKSPACE=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release + [[ openshift-3.10-release =~ openshift-.* ]] + [[ openshift-3.10-release =~ .*-crio-.* ]] + export KUBEVIRT_PROVIDER=os-3.10.0 + KUBEVIRT_PROVIDER=os-3.10.0 + export KUBEVIRT_NUM_NODES=2 + KUBEVIRT_NUM_NODES=2 + export NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + NFS_WINDOWS_DIR=/home/nfs/images/windows2016 + export NAMESPACE=kube-system + NAMESPACE=kube-system + trap '{ make cluster-down; }' EXIT SIGINT SIGTERM SIGSTOP + make cluster-down ./cluster/down.sh + make cluster-up ./cluster/up.sh Downloading ....... Downloading ....... 2018/08/10 12:31:56 Waiting for host: 192.168.66.102:22 2018/08/10 12:31:59 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/10 12:32:07 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/10 12:32:15 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/10 12:32:23 Problem with dial: dial tcp 192.168.66.102:22: getsockopt: no route to host. Sleeping 5s 2018/08/10 12:32:28 Connected to tcp://192.168.66.102:22 + systemctl stop origin-node.service + rm -rf /etc/origin/ /etc/etcd/ /var/lib/origin /var/lib/etcd/ ++ docker ps -q + containers= + '[' -n '' ']' ++ docker ps -q -a + containers='8d985261fee2 cdd5cb050f63 3b39c685d71c 1fe63ccc7eb5 7fb562e6d373 7357d7573809 4cd9d31e59d6 3dd2b4e034df b8bd1bfbd6fe 2b04452ad9d2 5a8d8e270d07 5bcee7ba14b9 d317f6c94b21 249a96e5b5be bf8eede72349 37a4fab28e2d 08ce4e71eea8 e4e0f326f8c6 0a856ce6e727 64a97837a605 4e67e156ec7c 6b2d2d048d59 8b938906ea49 6572244ef7bc b44328453c0f f0bde1a8d259 61984dd36d89 d0b161b029c9 6b12e033271c 7cd73847cb28 c98ef990eea8' + '[' -n '8d985261fee2 cdd5cb050f63 3b39c685d71c 1fe63ccc7eb5 7fb562e6d373 7357d7573809 4cd9d31e59d6 3dd2b4e034df b8bd1bfbd6fe 2b04452ad9d2 5a8d8e270d07 5bcee7ba14b9 d317f6c94b21 249a96e5b5be bf8eede72349 37a4fab28e2d 08ce4e71eea8 e4e0f326f8c6 0a856ce6e727 64a97837a605 4e67e156ec7c 6b2d2d048d59 8b938906ea49 6572244ef7bc b44328453c0f f0bde1a8d259 61984dd36d89 d0b161b029c9 6b12e033271c 7cd73847cb28 c98ef990eea8' ']' + docker rm -f 8d985261fee2 cdd5cb050f63 3b39c685d71c 1fe63ccc7eb5 7fb562e6d373 7357d7573809 4cd9d31e59d6 3dd2b4e034df b8bd1bfbd6fe 2b04452ad9d2 5a8d8e270d07 5bcee7ba14b9 d317f6c94b21 249a96e5b5be bf8eede72349 37a4fab28e2d 08ce4e71eea8 e4e0f326f8c6 0a856ce6e727 64a97837a605 4e67e156ec7c 6b2d2d048d59 8b938906ea49 6572244ef7bc b44328453c0f f0bde1a8d259 61984dd36d89 d0b161b029c9 6b12e033271c 7cd73847cb28 c98ef990eea8 8d985261fee2 cdd5cb050f63 3b39c685d71c 1fe63ccc7eb5 7fb562e6d373 7357d7573809 4cd9d31e59d6 3dd2b4e034df b8bd1bfbd6fe 2b04452ad9d2 5a8d8e270d07 5bcee7ba14b9 d317f6c94b21 249a96e5b5be bf8eede72349 37a4fab28e2d 08ce4e71eea8 e4e0f326f8c6 0a856ce6e727 64a97837a605 4e67e156ec7c 6b2d2d048d59 8b938906ea49 6572244ef7bc b44328453c0f f0bde1a8d259 61984dd36d89 d0b161b029c9 6b12e033271c 7cd73847cb28 c98ef990eea8 2018/08/10 12:32:31 Waiting for host: 192.168.66.101:22 2018/08/10 12:32:34 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/10 12:32:42 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/10 12:32:50 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: no route to host. Sleeping 5s 2018/08/10 12:32:55 Problem with dial: dial tcp 192.168.66.101:22: getsockopt: connection refused. Sleeping 5s 2018/08/10 12:33:00 Connected to tcp://192.168.66.101:22 + inventory_file=/root/inventory + openshift_ansible=/root/openshift-ansible + echo '[new_nodes]' + sed -i '/\[OSEv3:children\]/a new_nodes' /root/inventory + nodes_found=false ++ seq 2 100 + for i in '$(seq 2 100)' ++ printf node%02d 2 + node=node02 ++ printf 192.168.66.1%02d 2 + node_ip=192.168.66.102 + set +e + ping 192.168.66.102 -c 1 PING 192.168.66.102 (192.168.66.102) 56(84) bytes of data. 64 bytes from 192.168.66.102: icmp_seq=1 ttl=64 time=1.61 ms --- 192.168.66.102 ping statistics --- 1 packets transmitted, 1 received, 0% packet loss, time 0ms rtt min/avg/max/mdev = 1.617/1.617/1.617/0.000 ms + '[' 0 -ne 0 ']' + nodes_found=true + set -e + echo '192.168.66.102 node02' + echo 'Found node02. Adding it to the inventory.' Found node02. Adding it to the inventory. + echo 'node02 openshift_node_group_name="node-config-compute" openshift_schedulable=true openshift_ip=192.168.66.102' + for i in '$(seq 2 100)' ++ printf node%02d 3 + node=node03 ++ printf 192.168.66.1%02d 3 + node_ip=192.168.66.103 + set +e + ping 192.168.66.103 -c 1 PING 192.168.66.103 (192.168.66.103) 56(84) bytes of data. From 192.168.66.101 icmp_seq=1 Destination Host Unreachable --- 192.168.66.103 ping statistics --- 1 packets transmitted, 0 received, +1 errors, 100% packet loss, time 0ms + '[' 1 -ne 0 ']' + break + '[' true = true ']' + ansible-playbook -i /root/inventory /root/openshift-ansible/playbooks/openshift-node/scaleup.yml PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) PLAY [Ensure there are new_nodes] ********************************************** TASK [fail] ******************************************************************** skipping: [localhost] TASK [fail] ******************************************************************** skipping: [localhost] PLAY [Initialization Checkpoint Start] ***************************************** TASK [Set install initialization 'In Progress'] ******************************** ok: [node01] PLAY [Populate config host groups] ********************************************* TASK [Load group name mapping variables] *************************************** ok: [localhost] TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* skipping: [localhost] TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* skipping: [localhost] TASK [Evaluate groups - g_lb_hosts required] *********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts required] ********************************** skipping: [localhost] TASK [Evaluate groups - g_nfs_hosts is single host] **************************** skipping: [localhost] TASK [Evaluate groups - g_glusterfs_hosts required] **************************** skipping: [localhost] TASK [Evaluate oo_all_hosts] *************************************************** ok: [localhost] => (item=node01) ok: [localhost] => (item=node02) TASK [Evaluate oo_masters] ***************************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_master] ************************************************ ok: [localhost] TASK [Evaluate oo_new_etcd_to_config] ****************************************** TASK [Evaluate oo_masters_to_config] ******************************************* ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_to_config] ********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_first_etcd] ************************************************** ok: [localhost] TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_nodes_to_config] ********************************************* ok: [localhost] => (item=node02) TASK [Evaluate oo_nodes_to_bootstrap] ****************************************** ok: [localhost] => (item=node02) TASK [Add masters to oo_nodes_to_bootstrap] ************************************ ok: [localhost] => (item=node01) TASK [Evaluate oo_lb_to_config] ************************************************ TASK [Evaluate oo_nfs_to_config] *********************************************** ok: [localhost] => (item=node01) TASK [Evaluate oo_glusterfs_to_config] ***************************************** TASK [Evaluate oo_etcd_to_migrate] ********************************************* ok: [localhost] => (item=node01) [WARNING]: Could not match supplied host pattern, ignoring: oo_lb_to_config PLAY [Ensure that all non-node hosts are accessible] *************************** TASK [Gathering Facts] ********************************************************* ok: [node01] PLAY [Initialize basic host facts] ********************************************* TASK [Gathering Facts] ********************************************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml for node01, node02 TASK [openshift_sanitize_inventory : Check for usage of deprecated variables] *** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : debug] ************************************ skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_stats] ******************************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Assign deprecated variables to correct counterparts] *** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml for node01, node02 included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml for node01, node02 TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Standardize on latest variable names] ***** ok: [node01] ok: [node02] TASK [openshift_sanitize_inventory : Normalize openshift_release] ************** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Abort when openshift_release is invalid] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : include_tasks] **************************** included: /root/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml for node01, node02 TASK [openshift_sanitize_inventory : Ensure that openshift_use_dnsmasq is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that openshift_node_dnsmasq_install_network_manager_hook is true] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : set_fact] ********************************* skipping: [node01] => (item=openshift_hosted_etcd_storage_kind) skipping: [node02] => (item=openshift_hosted_etcd_storage_kind) TASK [openshift_sanitize_inventory : Ensure that dynamic provisioning is set if using dynamic storage] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure clusterid is set along with the cloudprovider] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure ansible_service_broker_remove and ansible_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure template_service_broker_remove and template_service_broker_install are mutually exclusive] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that all requires vsphere configuration variables are set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : ensure provider configuration variables are defined] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure removed web console extension variables are not set] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : Ensure that web console port matches API server port] *** skipping: [node01] skipping: [node02] TASK [openshift_sanitize_inventory : At least one master is schedulable] ******* skipping: [node01] skipping: [node02] TASK [Detecting Operating System from ostree_booted] *************************** ok: [node01] ok: [node02] TASK [set openshift_deployment_type if unset] ********************************** skipping: [node01] skipping: [node02] TASK [check for node already bootstrapped] ************************************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_bootstrapped] ********************* ok: [node01] ok: [node02] TASK [initialize_facts set fact openshift_is_atomic and openshift_is_containerized] *** ok: [node01] ok: [node02] TASK [Determine Atomic Host Docker Version] ************************************ skipping: [node01] skipping: [node02] TASK [assert atomic host docker version is 1.12 or later] ********************** skipping: [node01] skipping: [node02] PLAY [Retrieve existing master configs and validate] *************************** TASK [openshift_control_plane : stat] ****************************************** ok: [node01] TASK [openshift_control_plane : slurp] ***************************************** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [openshift_control_plane : Check for file paths outside of /etc/origin/master in master's config] *** ok: [node01] TASK [openshift_control_plane : set_fact] ************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Initialize special first-master variables] ******************************* TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Disable web console if required] ***************************************** TASK [set_fact] **************************************************************** skipping: [node01] PLAY [Setup yum repositories for all hosts] ************************************ TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Install Red Hat Subscription manager] ******************* skipping: [node02] TASK [rhel_subscribe : Is host already registered?] **************************** skipping: [node02] TASK [rhel_subscribe : Register host] ****************************************** skipping: [node02] TASK [rhel_subscribe : fail] *************************************************** skipping: [node02] TASK [rhel_subscribe : Determine if OpenShift Pool Already Attached] *********** skipping: [node02] TASK [rhel_subscribe : Attach to OpenShift Pool] ******************************* skipping: [node02] TASK [rhel_subscribe : Satellite preparation] ********************************** skipping: [node02] TASK [openshift_repos : openshift_repos detect ostree] ************************* ok: [node02] TASK [openshift_repos : Ensure libselinux-python is installed] ***************** ok: [node02] TASK [openshift_repos : Remove openshift_additional.repo file] ***************** ok: [node02] TASK [openshift_repos : Create any additional repos that are defined] ********** TASK [openshift_repos : include_tasks] ***************************************** skipping: [node02] TASK [openshift_repos : include_tasks] ***************************************** included: /root/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml for node02 TASK [openshift_repos : Configure origin gpg keys] ***************************** ok: [node02] TASK [openshift_repos : Configure correct origin release repository] *********** ok: [node02] => (item=/root/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin.repo.j2) TASK [openshift_repos : Ensure clean repo cache in the event repos have been changed manually] *** changed: [node02] => { "msg": "First run of openshift_repos" } TASK [openshift_repos : Record that openshift_repos already ran] *************** ok: [node02] RUNNING HANDLER [openshift_repos : refresh cache] ****************************** changed: [node02] PLAY [Install packages necessary for installer] ******************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Determine if chrony is installed] **************************************** changed: [node02] [WARNING]: Consider using the yum, dnf or zypper module rather than running rpm. If you need to use command because yum, dnf or zypper is insufficient you can add warn=False to this command task or set command_warnings=False in ansible.cfg to get rid of this message. TASK [Install ntp package] ***************************************************** skipping: [node02] TASK [Start and enable ntpd/chronyd] ******************************************* changed: [node02] TASK [Ensure openshift-ansible installer package deps are installed] *********** ok: [node02] => (item=iproute) ok: [node02] => (item=dbus-python) ok: [node02] => (item=PyYAML) ok: [node02] => (item=python-ipaddress) ok: [node02] => (item=libsemanage-python) ok: [node02] => (item=yum-utils) changed: [node02] => (item=python-docker) PLAY [Initialize cluster facts] ************************************************ TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [get openshift_current_version] ******************************************* ok: [node02] ok: [node01] TASK [set_fact openshift_portal_net if present on masters] ********************* ok: [node02] ok: [node01] TASK [Gather Cluster facts] **************************************************** changed: [node02] changed: [node01] TASK [Set fact of no_proxy_internal_hostnames] ********************************* skipping: [node01] skipping: [node02] TASK [Initialize openshift.node.sdn_mtu] *************************************** changed: [node02] ok: [node01] PLAY [Initialize etcd host variables] ****************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] TASK [set_fact] **************************************************************** ok: [node01] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** ok: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** ok: [node01] => { "msg": "openshift_pkg_version was not defined. Falling back to -3.10.0" } TASK [openshift_version : set_fact] ******************************************** ok: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Verify Requirements] ***************************************************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [Run variable sanity checks] ********************************************** ok: [node01] TASK [Validate openshift_node_groups and openshift_node_group_name] ************ ok: [node01] PLAY [Initialization Checkpoint End] ******************************************* TASK [Set install initialization 'Complete'] *********************************** ok: [node01] PLAY [Validate node hostnames] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [Query DNS for IP address of node02] ************************************** ok: [node02] TASK [Validate openshift_hostname when defined] ******************************** skipping: [node02] TASK [Validate openshift_ip exists on node when defined] *********************** skipping: [node02] PLAY [Configure os_firewall] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [os_firewall : Detecting Atomic Host Operating System] ******************** ok: [node02] TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** ok: [node02] TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** skipping: [node02] TASK [os_firewall : Install firewalld packages] ******************************** skipping: [node02] TASK [os_firewall : Ensure iptables services are not enabled] ****************** skipping: [node02] => (item=iptables) skipping: [node02] => (item=ip6tables) TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** skipping: [node02] TASK [os_firewall : Start and enable firewalld service] ************************ skipping: [node02] TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** skipping: [node02] TASK [os_firewall : Restart polkitd] ******************************************* skipping: [node02] TASK [os_firewall : Wait for polkit action to have been created] *************** skipping: [node02] TASK [os_firewall : Ensure firewalld service is not enabled] ******************* ok: [node02] TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** skipping: [node02] TASK [os_firewall : Install iptables packages] ********************************* ok: [node02] => (item=iptables) ok: [node02] => (item=iptables-services) TASK [os_firewall : Start and enable iptables service] ************************* ok: [node02 -> node02] => (item=node02) TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** skipping: [node02] PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [container_runtime : Setup the docker-storage for overlay] **************** skipping: [node02] TASK [container_runtime : Create file system on extra volume device] *********** TASK [container_runtime : Create mount entry for extra volume] ***************** PLAY [oo_nodes_to_config] ****************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** ok: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* ok: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Add http_proxy to /etc/atomic.conf] ****************** skipping: [node02] TASK [container_runtime : Add https_proxy to /etc/atomic.conf] ***************** skipping: [node02] TASK [container_runtime : Add no_proxy to /etc/atomic.conf] ******************** skipping: [node02] TASK [container_runtime : Get current installed Docker version] **************** ok: [node02] TASK [container_runtime : Error out if Docker pre-installed but too old] ******* skipping: [node02] TASK [container_runtime : Error out if requested Docker is too old] ************ skipping: [node02] TASK [container_runtime : Install Docker] ************************************** skipping: [node02] TASK [container_runtime : Ensure docker.service.d directory exists] ************ ok: [node02] TASK [container_runtime : Configure Docker service unit file] ****************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Set registry params] ********************************* skipping: [node02] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) skipping: [node02] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) TASK [container_runtime : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** skipping: [node02] TASK [container_runtime : Set Proxy Settings] ********************************** skipping: [node02] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) skipping: [node02] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) TASK [container_runtime : Set various Docker options] ************************** ok: [node02] TASK [container_runtime : stat] ************************************************ ok: [node02] TASK [container_runtime : Configure Docker Network OPTIONS] ******************** ok: [node02] TASK [container_runtime : Detect if docker is already started] ***************** ok: [node02] TASK [container_runtime : Start the Docker service] **************************** ok: [node02] TASK [container_runtime : set_fact] ******************************************** ok: [node02] TASK [container_runtime : Check for docker_storage_path/overlay2] ************** ok: [node02] TASK [container_runtime : Fixup SELinux permissions for docker] **************** changed: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* ok: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** ok: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** ok: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] TASK [container_runtime : Fail if Atomic Host since this is an rpm request] **** skipping: [node02] TASK [container_runtime : Getting current systemd-udevd exec command] ********** skipping: [node02] TASK [container_runtime : Assure systemd-udevd.service.d directory exists] ***** skipping: [node02] TASK [container_runtime : Create systemd-udevd override file] ****************** skipping: [node02] TASK [container_runtime : Add enterprise registry, if necessary] *************** skipping: [node02] TASK [container_runtime : Check that overlay is in the kernel] ***************** skipping: [node02] TASK [container_runtime : Add overlay to modprobe.d] *************************** skipping: [node02] TASK [container_runtime : Manually modprobe overlay into the kernel] *********** skipping: [node02] TASK [container_runtime : Enable and start systemd-modules-load] *************** skipping: [node02] TASK [container_runtime : Install cri-o] *************************************** skipping: [node02] TASK [container_runtime : Remove CRI-O default configuration files] ************ skipping: [node02] => (item=/etc/cni/net.d/200-loopback.conf) skipping: [node02] => (item=/etc/cni/net.d/100-crio-bridge.conf) TASK [container_runtime : Create the CRI-O configuration] ********************** skipping: [node02] TASK [container_runtime : Ensure CNI configuration directory exists] *********** skipping: [node02] TASK [container_runtime : Add iptables allow rules] **************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove iptables rules] ******************************* TASK [container_runtime : Add firewalld allow rules] *************************** skipping: [node02] => (item={u'port': u'10010/tcp', u'service': u'crio'}) TASK [container_runtime : Remove firewalld allow rules] ************************ TASK [container_runtime : Configure the CNI network] *************************** skipping: [node02] TASK [container_runtime : Create /etc/sysconfig/crio-network] ****************** skipping: [node02] TASK [container_runtime : Start the CRI-O service] ***************************** skipping: [node02] TASK [container_runtime : Ensure /var/lib/containers exists] ******************* skipping: [node02] TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ****** skipping: [node02] TASK [container_runtime : Check for credentials file for registry auth] ******** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth] ***** skipping: [node02] TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] *** skipping: [node02] TASK [container_runtime : stat the docker data dir] **************************** skipping: [node02] TASK [container_runtime : stop the current running docker] ********************* skipping: [node02] TASK [container_runtime : copy "/var/lib/docker" to "/var/lib/containers/docker"] *** skipping: [node02] TASK [container_runtime : Set the selinux context on /var/lib/containers/docker] *** skipping: [node02] TASK [container_runtime : restorecon the /var/lib/containers/docker] *********** skipping: [node02] TASK [container_runtime : Remove the old docker location] ********************** skipping: [node02] TASK [container_runtime : Setup the link] ************************************** skipping: [node02] TASK [container_runtime : start docker] **************************************** skipping: [node02] PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [node01] TASK [include_role : openshift_version] **************************************** TASK [openshift_version : Use openshift_current_version fact as version to configure if already installed] *** skipping: [node01] TASK [openshift_version : Set openshift_version to openshift_release if undefined] *** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : debug] *********************************************** skipping: [node01] TASK [openshift_version : set_fact] ******************************************** skipping: [node01] TASK [openshift_version : assert openshift_release in openshift_image_tag] ***** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : assert openshift_release in openshift_pkg_version] *** ok: [node01] => { "changed": false, "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_release": "3.10" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_image_tag": "v3.10.0-rc.0" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_pkg_version": "-3.10.0*" } TASK [openshift_version : debug] *********************************************** ok: [node01] => { "openshift_version": "3.10.0" } TASK [set openshift_version booleans (first master)] *************************** ok: [node01] PLAY [Set openshift_version for etcd, node, and master hosts] ****************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [set_fact] **************************************************************** ok: [node02] TASK [set openshift_version booleans (masters and nodes)] ********************** ok: [node02] PLAY [Node Preparation Checkpoint Start] *************************************** TASK [Set Node preparation 'In Progress'] ************************************** ok: [node01] PLAY [Only target nodes that have not yet been bootstrapped] ******************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [add_host] **************************************************************** skipping: [localhost] => (item=node02) ok: [localhost] => (item=node01) PLAY [Disable excluders] ******************************************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for node02 TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-docker-excluder version detected] ************ skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-docker-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Get available excluder version] ********************* skipping: [node02] TASK [openshift_excluder : Fail when excluder package is not found] ************ skipping: [node02] TASK [openshift_excluder : Set fact excluder_version] ************************** skipping: [node02] TASK [openshift_excluder : origin-excluder version detected] ******************* skipping: [node02] TASK [openshift_excluder : Printing upgrade target version] ******************** skipping: [node02] TASK [openshift_excluder : Check the available origin-excluder version is at most of the upgrade target version] *** skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : disable docker excluder] **************************** skipping: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : disable openshift excluder] ************************* changed: [node02] PLAY [Configure nodes] ********************************************************* TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_cloud_provider : Set cloud provider facts] ********************* skipping: [node02] TASK [openshift_cloud_provider : Create cloudprovider config dir] ************** skipping: [node02] TASK [openshift_cloud_provider : include the defined cloud provider files] ***** skipping: [node02] TASK [openshift_node : fail] *************************************************** skipping: [node02] TASK [openshift_node : Check for NetworkManager service] *********************** ok: [node02] TASK [openshift_node : Set fact using_network_manager] ************************* ok: [node02] TASK [openshift_node : Install dnsmasq] **************************************** ok: [node02] TASK [openshift_node : ensure origin/node directory exists] ******************** changed: [node02] => (item=/etc/origin) changed: [node02] => (item=/etc/origin/node) TASK [openshift_node : Install NetworkManager during node_bootstrap provisioning] *** skipping: [node02] TASK [openshift_node : Install network manager dispatch script] **************** skipping: [node02] TASK [openshift_node : Install dnsmasq configuration] ************************** ok: [node02] TASK [openshift_node : Deploy additional dnsmasq.conf] ************************* skipping: [node02] TASK [openshift_node : Enable dnsmasq] ***************************************** ok: [node02] TASK [openshift_node : Install network manager dispatch script] **************** ok: [node02] TASK [openshift_node : Add iptables allow rules] ******************************* ok: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) ok: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) ok: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) ok: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) ok: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove iptables rules] ********************************** TASK [openshift_node : Add firewalld allow rules] ****************************** skipping: [node02] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) skipping: [node02] => (item={u'port': u'10256/tcp', u'service': u'Kubernetes kube-proxy health check for service load balancers'}) skipping: [node02] => (item={u'port': u'80/tcp', u'service': u'http'}) skipping: [node02] => (item={u'port': u'443/tcp', u'service': u'https'}) skipping: [node02] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) skipping: [node02] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) skipping: [node02] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) skipping: [node02] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) TASK [openshift_node : Remove firewalld allow rules] *************************** TASK [openshift_node : Checking for journald.conf] ***************************** ok: [node02] TASK [openshift_node : Create journald persistence directories] **************** ok: [node02] TASK [openshift_node : Update journald setup] ********************************** ok: [node02] => (item={u'var': u'Storage', u'val': u'persistent'}) ok: [node02] => (item={u'var': u'Compress', u'val': True}) ok: [node02] => (item={u'var': u'SyncIntervalSec', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitInterval', u'val': u'1s'}) ok: [node02] => (item={u'var': u'RateLimitBurst', u'val': 10000}) ok: [node02] => (item={u'var': u'SystemMaxUse', u'val': u'8G'}) ok: [node02] => (item={u'var': u'SystemKeepFree', u'val': u'20%'}) ok: [node02] => (item={u'var': u'SystemMaxFileSize', u'val': u'10M'}) ok: [node02] => (item={u'var': u'MaxRetentionSec', u'val': u'1month'}) ok: [node02] => (item={u'var': u'MaxFileSec', u'val': u'1day'}) ok: [node02] => (item={u'var': u'ForwardToSyslog', u'val': False}) ok: [node02] => (item={u'var': u'ForwardToWall', u'val': False}) TASK [openshift_node : Restart journald] *************************************** skipping: [node02] TASK [openshift_node : Disable swap] ******************************************* ok: [node02] TASK [openshift_node : Install node, clients, and conntrack packages] ********** ok: [node02] => (item={u'name': u'origin-node-3.10.0*'}) ok: [node02] => (item={u'name': u'origin-clients-3.10.0*'}) ok: [node02] => (item={u'name': u'conntrack-tools'}) TASK [openshift_node : Restart cri-o] ****************************************** skipping: [node02] TASK [openshift_node : restart NetworkManager to ensure resolv.conf is present] *** changed: [node02] TASK [openshift_node : sysctl] ************************************************* ok: [node02] TASK [openshift_node : Check for credentials file for registry auth] *********** skipping: [node02] TASK [openshift_node : Create credentials for registry auth] ******************* skipping: [node02] TASK [openshift_node : Create credentials for registry auth (alternative)] ***** skipping: [node02] TASK [openshift_node : Setup ro mount of /root/.docker for containerized hosts] *** skipping: [node02] TASK [openshift_node : Check that node image is present] *********************** changed: [node02] TASK [openshift_node : Pre-pull node image] ************************************ skipping: [node02] TASK [openshift_node : Copy node script to the node] *************************** ok: [node02] TASK [openshift_node : Install Node service file] ****************************** ok: [node02] TASK [openshift_node : Ensure old system path is set] ************************** skipping: [node02] => (item=/etc/origin/openvswitch) skipping: [node02] => (item=/var/lib/kubelet) skipping: [node02] => (item=/opt/cni/bin) TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Copy node container image to ostree storage] ************ skipping: [node02] TASK [openshift_node : Install or Update node system container] **************** skipping: [node02] TASK [openshift_node : Restart network manager to ensure networking configuration is in place] *** skipping: [node02] TASK [openshift_node : Configure Node settings] ******************************** ok: [node02] => (item={u'regex': u'^OPTIONS=', u'line': u'OPTIONS='}) ok: [node02] => (item={u'regex': u'^DEBUG_LOGLEVEL=', u'line': u'DEBUG_LOGLEVEL=2'}) ok: [node02] => (item={u'regex': u'^IMAGE_VERSION=', u'line': u'IMAGE_VERSION=v3.10.0-rc.0'}) TASK [openshift_node : Configure Proxy Settings] ******************************* skipping: [node02] => (item={u'regex': u'^HTTP_PROXY=', u'line': u'HTTP_PROXY='}) skipping: [node02] => (item={u'regex': u'^HTTPS_PROXY=', u'line': u'HTTPS_PROXY='}) skipping: [node02] => (item={u'regex': u'^NO_PROXY=', u'line': u'NO_PROXY=[],172.30.0.0/16,10.128.0.0/14'}) TASK [openshift_node : file] *************************************************** skipping: [node02] TASK [openshift_node : Create the Node config] ********************************* changed: [node02] TASK [openshift_node : Configure Node Environment Variables] ******************* TASK [openshift_node : Ensure the node static pod directory exists] ************ changed: [node02] TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : Check status of node image pre-pull] ******************** skipping: [node02] TASK [openshift_node : Install NFS storage plugin dependencies] **************** ok: [node02] TASK [openshift_node : Check for existence of nfs sebooleans] ****************** ok: [node02] => (item=virt_use_nfs) ok: [node02] => (item=virt_sandbox_use_nfs) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-10 12:41:04.607958', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.013972', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-10 12:41:04.593986', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-10 12:41:05.995153', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.012862', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-10 12:41:05.982291', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-10 12:41:04.607958', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.013972', '_ansible_item_label': u'virt_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-10 12:41:04.593986', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-10 12:41:05.995153', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.012862', '_ansible_item_label': u'virt_sandbox_use_nfs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-08-10 12:41:05.982291', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install GlusterFS storage plugin dependencies] ********** ok: [node02] TASK [openshift_node : Check for existence of fusefs sebooleans] *************** ok: [node02] => (item=virt_use_fusefs) ok: [node02] => (item=virt_sandbox_use_fusefs) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers] *** ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-10 12:41:14.042761', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.012456', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-10 12:41:14.030305', '_ansible_ignore_errors': None, 'failed': False}) ok: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-10 12:41:15.504929', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.012736', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-10 12:41:15.492193', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers (python 3)] *** skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-10 12:41:14.042761', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.012456', '_ansible_item_label': u'virt_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-10 12:41:14.030305', '_ansible_ignore_errors': None, 'failed': False}) skipping: [node02] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-08-10 12:41:15.504929', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.012736', '_ansible_item_label': u'virt_sandbox_use_fusefs', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> on'], 'failed_when_result': False, u'start': u'2018-08-10 12:41:15.492193', '_ansible_ignore_errors': None, 'failed': False}) TASK [openshift_node : Install Ceph storage plugin dependencies] *************** ok: [node02] TASK [openshift_node : Install iSCSI storage plugin dependencies] ************** ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=device-mapper-multipath) TASK [openshift_node : restart services] *************************************** ok: [node02] => (item=multipathd) ok: [node02] => (item=rpcbind) ok: [node02] => (item=iscsid) TASK [openshift_node : Template multipath configuration] *********************** changed: [node02] TASK [openshift_node : Enable and start multipath] ***************************** changed: [node02] TASK [tuned : Check for tuned package] ***************************************** ok: [node02] TASK [tuned : Set tuned OpenShift variables] *********************************** ok: [node02] TASK [tuned : Ensure directory structure exists] ******************************* ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9317749, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9317749}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9327748}) ok: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9327748}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) skipping: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) TASK [tuned : Ensure files are populated from templates] *********************** skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9317749, 'owner': 'root', 'path': u'openshift-control-plane', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9317749}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-node', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9327748}) skipping: [node02] => (item={'serole': 'object_r', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'setype': 'admin_home_t', 'state': 'directory', 'gid': 0, 'mode': '0755', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift', 'size': 24, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'ctime': 1533205381.9327748}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'recommend.conf', 'size': 290, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'size': 744, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': 's0', 'seuser': 'unconfined_u', 'serole': 'object_r', 'ctime': 1533205381.9327748, 'state': 'file', 'gid': 0, 'mode': '0644', 'mtime': 1533205381.9327748, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'size': 135, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': 'admin_home_t'}) ok: [node02] => (item={'src': u'/root/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'selevel': u's0', 'seuser': u'unconfined_u', 'serole': u'object_r', 'ctime': 1533205381.9327748, 'state': u'file', 'gid': 0, 'mode': u'0644', 'mtime': 1533205381.9327748, 'owner': u'root', 'path': u'openshift/tuned.conf', 'size': 594, 'root': u'/root/openshift-ansible/roles/tuned/templates', 'setype': u'admin_home_t'}) TASK [tuned : Make tuned use the recommended tuned profile on restart] ********* changed: [node02] => (item=/etc/tuned/active_profile) changed: [node02] => (item=/etc/tuned/profile_mode) TASK [tuned : Restart tuned service] ******************************************* changed: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] ******* ok: [node02] TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] *** PLAY [node bootstrap config] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] TASK [openshift_node : install needed rpm(s)] ********************************** ok: [node02] => (item=origin-node) ok: [node02] => (item=origin-docker-excluder) ok: [node02] => (item=ansible) ok: [node02] => (item=bash-completion) ok: [node02] => (item=docker) ok: [node02] => (item=haproxy) ok: [node02] => (item=dnsmasq) ok: [node02] => (item=ntp) ok: [node02] => (item=logrotate) ok: [node02] => (item=httpd-tools) ok: [node02] => (item=bind-utils) ok: [node02] => (item=firewalld) ok: [node02] => (item=libselinux-python) ok: [node02] => (item=conntrack-tools) ok: [node02] => (item=openssl) ok: [node02] => (item=iproute) ok: [node02] => (item=python-dbus) ok: [node02] => (item=PyYAML) ok: [node02] => (item=yum-utils) ok: [node02] => (item=glusterfs-fuse) ok: [node02] => (item=device-mapper-multipath) ok: [node02] => (item=nfs-utils) ok: [node02] => (item=cockpit-ws) ok: [node02] => (item=cockpit-system) ok: [node02] => (item=cockpit-bridge) ok: [node02] => (item=cockpit-docker) ok: [node02] => (item=iscsi-initiator-utils) ok: [node02] => (item=ceph-common) TASK [openshift_node : create the directory for node] ************************** skipping: [node02] TASK [openshift_node : laydown systemd override] ******************************* skipping: [node02] TASK [openshift_node : update the sysconfig to have necessary variables] ******* ok: [node02] => (item={u'regexp': u'^KUBECONFIG=.*', u'line': u'KUBECONFIG=/etc/origin/node/bootstrap.kubeconfig'}) TASK [openshift_node : Configure AWS Cloud Provider Settings] ****************** skipping: [node02] => (item=None) skipping: [node02] => (item=None) skipping: [node02] TASK [openshift_node : disable origin-node service] **************************** changed: [node02] => (item=origin-node.service) TASK [openshift_node : Check for RPM generated config marker file .config_managed] *** ok: [node02] TASK [openshift_node : create directories for bootstrapping] ******************* ok: [node02] => (item=/root/openshift_bootstrap) changed: [node02] => (item=/var/lib/origin/openshift.local.config) changed: [node02] => (item=/var/lib/origin/openshift.local.config/node) ok: [node02] => (item=/etc/docker/certs.d/docker-registry.default.svc:5000) TASK [openshift_node : laydown the bootstrap.yml file for on boot configuration] *** ok: [node02] TASK [openshift_node : Create a symlink to the node client CA for the docker registry] *** ok: [node02] TASK [openshift_node : Remove RPM generated config files if present] *********** skipping: [node02] => (item=master) skipping: [node02] => (item=.config_managed) TASK [openshift_node : find all files in /etc/origin/node so we can remove them] *** skipping: [node02] TASK [openshift_node : Remove everything except the resolv.conf required for node] *** skipping: [node02] TASK [openshift_node_group : create node config template] ********************** changed: [node02] TASK [openshift_node_group : remove existing node config] ********************** changed: [node02] TASK [openshift_node_group : Ensure required directories are present] ********** ok: [node02] => (item=/etc/origin/node/pods) changed: [node02] => (item=/etc/origin/node/certificates) TASK [openshift_node_group : Update the sysconfig to group "node-config-compute"] *** changed: [node02] TASK [set_fact] **************************************************************** ok: [node02] PLAY [Re-enable excluder if it was previously enabled] ************************* TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* ok: [node02] TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_docker_excluder": true } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** ok: [node02] => { "r_openshift_excluder_enable_openshift_excluder": true } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** skipping: [node02] TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** skipping: [node02] TASK [openshift_excluder : Include main action task file] ********************** included: /root/openshift-ansible/roles/openshift_excluder/tasks/enable.yml for node02 TASK [openshift_excluder : Install docker excluder - yum] ********************** skipping: [node02] TASK [openshift_excluder : Install docker excluder - dnf] ********************** skipping: [node02] TASK [openshift_excluder : Install openshift excluder - yum] ******************* skipping: [node02] TASK [openshift_excluder : Install openshift excluder - dnf] ******************* skipping: [node02] TASK [openshift_excluder : set_fact] ******************************************* skipping: [node02] TASK [openshift_excluder : Check for docker-excluder] ************************** ok: [node02] TASK [openshift_excluder : Enable docker excluder] ***************************** changed: [node02] TASK [openshift_excluder : Check for openshift excluder] *********************** ok: [node02] TASK [openshift_excluder : Enable openshift excluder] ************************** changed: [node02] PLAY [Node Preparation Checkpoint End] ***************************************** TASK [Set Node preparation 'Complete'] ***************************************** ok: [node01] PLAY [Distribute bootstrap and start nodes] ************************************ TASK [openshift_node : Gather node information] ******************************** changed: [node02] ok: [node01] TASK [openshift_node : Copy master bootstrap config locally] ******************* ok: [node02 -> node01] TASK [openshift_node : Distribute bootstrap kubeconfig if one does not exist] *** ok: [node01] changed: [node02] TASK [openshift_node : Start and enable node for bootstrapping] **************** changed: [node02] changed: [node01] TASK [openshift_node : Get node logs] ****************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : debug] ************************************************** skipping: [node02] skipping: [node01] TASK [openshift_node : fail] *************************************************** skipping: [node02] skipping: [node01] PLAY [Approve any pending CSR requests from inventory nodes] ******************* TASK [Dump all candidate bootstrap hostnames] ********************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Find all hostnames for bootstrapping] ************************************ ok: [node01] TASK [Dump the bootstrap hostnames] ******************************************** ok: [node01] => { "msg": [ "node02", "node01" ] } TASK [Approve bootstrap nodes] ************************************************* changed: [node01] TASK [Get CSRs] **************************************************************** skipping: [node01] TASK [Report approval errors] ************************************************** skipping: [node01] PLAY [Ensure any inventory labels are applied to the nodes] ******************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [openshift_manage_node : Wait for master API to become available before proceeding] *** skipping: [node02] TASK [openshift_manage_node : Wait for Node Registration] ********************** ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/config.yml for node02, node01 TASK [openshift_manage_node : Set node schedulability] ************************* ok: [node02 -> node01] ok: [node01 -> node01] TASK [openshift_manage_node : include_tasks] *********************************** included: /root/openshift-ansible/roles/openshift_manage_node/tasks/set_default_node_role.yml for node02, node01 TASK [openshift_manage_node : Retrieve nodes that are marked with the infra selector or the legacy infra selector] *** ok: [node02 -> node01] TASK [openshift_manage_node : Label infra or legacy infra nodes with the new role label] *** TASK [openshift_manage_node : Retrieve non-infra, non-master nodes that are not yet labeled compute] *** ok: [node02 -> node01] TASK [openshift_manage_node : label non-master non-infra nodes compute] ******** TASK [openshift_manage_node : Label all-in-one master as a compute node] ******* skipping: [node02] PLAY RECAP ********************************************************************* localhost : ok=30 changed=0 unreachable=0 failed=0 node01 : ok=71 changed=3 unreachable=0 failed=0 node02 : ok=155 changed=34 unreachable=0 failed=0 INSTALLER STATUS *************************************************************** Initialization : Complete (0:03:17) Node Preparation : Complete (0:05:13) + set +e + crio=false + grep crio /root/inventory + '[' 1 -eq 0 ']' + set -e + cat + ansible-playbook -i /root/inventory post_deployment_configuration --extra-vars=crio=false PLAY [nodes, new_nodes] ******************************************************** TASK [Gathering Facts] ********************************************************* ok: [node02] ok: [node01] TASK [replace] ***************************************************************** skipping: [node01] skipping: [node02] TASK [replace] ***************************************************************** skipping: [node01] skipping: [node02] TASK [service] ***************************************************************** skipping: [node01] skipping: [node02] PLAY RECAP ********************************************************************* node01 : ok=1 changed=0 unreachable=0 failed=0 node02 : ok=1 changed=0 unreachable=0 failed=0 + set -x + /usr/bin/oc get nodes --no-headers node01 Ready compute,infra,master 8d v1.10.0+b81c8f8 node02 Ready compute 42s v1.10.0+b81c8f8 + os_rc=0 + retry_counter=0 + [[ 0 -lt 20 ]] + [[ 0 -ne 0 ]] + /usr/bin/oc create -f /tmp/local-volume.yaml storageclass.storage.k8s.io "local" created configmap "local-storage-config" created clusterrolebinding.rbac.authorization.k8s.io "local-storage-provisioner-pv-binding" created clusterrole.rbac.authorization.k8s.io "local-storage-provisioner-node-clusterrole" created clusterrolebinding.rbac.authorization.k8s.io "local-storage-provisioner-node-binding" created role.rbac.authorization.k8s.io "local-storage-provisioner-jobs-role" created rolebinding.rbac.authorization.k8s.io "local-storage-provisioner-jobs-rolebinding" created serviceaccount "local-storage-admin" created daemonset.extensions "local-volume-provisioner" created Sending file modes: C0755 110489328 oc Sending file modes: C0600 5645 admin.kubeconfig Cluster "node01:8443" set. Cluster "node01:8443" set. + set +e + kubectl get nodes --no-headers + cluster/kubectl.sh get nodes --no-headers node01 Ready compute,infra,master 8d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + kubectl_rc=0 + '[' 0 -ne 0 ']' ++ kubectl get nodes --no-headers ++ cluster/kubectl.sh get nodes --no-headers ++ grep NotReady + '[' -n '' ']' + set -e + echo 'Nodes are ready:' Nodes are ready: + kubectl get nodes + cluster/kubectl.sh get nodes NAME STATUS ROLES AGE VERSION node01 Ready compute,infra,master 8d v1.10.0+b81c8f8 node02 Ready compute 1m v1.10.0+b81c8f8 + make cluster-sync ./cluster/build.sh Building ... Sending build context to Docker daemon 7.168 kB Step 1/12 : FROM fedora:28 ---> cc510acfcd70 Step 2/12 : ENV LIBVIRT_VERSION 4.2.0 ---> Using cache ---> b1088795aeb6 Step 3/12 : COPY fedora-virt-preview.repo /etc/yum.repos.d/fedora-virt-preview.repo ---> 1966e43b79cf Removing intermediate container f3049cc603f8 Step 4/12 : RUN dnf -y install libvirt-devel-${LIBVIRT_VERSION} make git mercurial sudo gcc findutils gradle rsync-daemon rsync qemu-img protobuf-compiler && dnf -y clean all ---> Running in dd34c74dbdd3  Fedora 28 - x86_64 - Updates 737 kB/s | 21 MB 00:29 Virtualization packages from Rawhide built for 187 kB/s | 57 kB 00:00 Fedora 28 - x86_64 7.2 MB/s | 60 MB 00:08 Last metadata expiration check: 0:00:00 ago on Fri Aug 10 12:46:48 2018. Dependencies resolved. ================================================================================ Package Arch Version Repository Size ================================================================================ Installing: findutils x86_64 1:4.6.0-19.fc28 updates 526 k gcc x86_64 8.1.1-5.fc28 updates 23 M git x86_64 2.17.1-3.fc28 updates 221 k gradle noarch 4.3.1-7.fc28 updates 9.1 M libvirt-devel x86_64 4.2.0-1.fc28 fedora-virt-preview 167 k make x86_64 1:4.2.1-6.fc28 fedora 497 k mercurial x86_64 4.4.2-4.fc28 fedora 4.2 M protobuf-compiler x86_64 3.5.0-4.fc28 fedora 792 k qemu-img x86_64 2:2.12.0-0.5.rc1.fc28 fedora-virt-preview 939 k rsync x86_64 3.1.3-3.fc28 fedora 403 k rsync-daemon noarch 3.1.3-3.fc28 fedora 41 k sudo x86_64 1.8.23-1.fc28 updates 864 k Upgrading: libgcc x86_64 8.1.1-5.fc28 updates 95 k Installing dependencies: aajohan-comfortaa-fonts noarch 3.001-2.fc28 fedora 147 k alsa-lib x86_64 1.1.6-2.fc28 fedora 430 k ant-lib noarch 1.10.1-10.fc28 updates 1.9 M antlr-tool noarch 2.7.7-53.fc28 fedora 364 k aopalliance noarch 1.0-17.fc28 fedora 16 k apache-commons-cli noarch 1.4-4.fc28 fedora 73 k apache-commons-codec noarch 1.11-3.fc28 fedora 287 k apache-commons-collections noarch 3.2.2-8.fc28 updates 535 k apache-commons-compress noarch 1.16.1-1.fc28 fedora 498 k apache-commons-io noarch 1:2.6-3.fc28 fedora 222 k apache-commons-lang noarch 2.6-21.fc28 fedora 281 k apache-commons-lang3 noarch 3.7-3.fc28 fedora 482 k apache-commons-logging noarch 1.2-13.fc28 fedora 86 k apache-ivy noarch 2.4.0-10.fc28 fedora 1.1 M apr x86_64 1.6.3-5.fc28 fedora 124 k aqute-bndlib noarch 3.5.0-2.fc28 fedora 1.4 M atinject noarch 1-27.20100611svn86.fc28 fedora 19 k avahi-libs x86_64 0.7-13.fc28 updates 62 k aws-sdk-java-core noarch 1.11.3-5.fc28 fedora 532 k aws-sdk-java-kms noarch 1.11.3-5.fc28 fedora 350 k aws-sdk-java-s3 noarch 1.11.3-5.fc28 fedora 566 k base64coder noarch 20101219-20.fc28 fedora 18 k bcel noarch 6.2-2.fc28 fedora 616 k beust-jcommander noarch 1.71-3.fc28 fedora 85 k binutils x86_64 2.29.1-23.fc28 updates 6.0 M bouncycastle noarch 1.59-1.fc28 updates 4.0 M bouncycastle-pg noarch 1.59-1.fc28 updates 317 k capstone x86_64 3.0.4-16.fc28 updates 487 k cdi-api noarch 1.2-7.fc28 fedora 142 k compat-openssl10 x86_64 1:1.0.2o-1.fc28 fedora 1.1 M copy-jdk-configs noarch 3.7-1.fc28 updates 26 k cpp x86_64 8.1.1-5.fc28 updates 10 M cyrus-sasl x86_64 2.1.27-0.1rc7.fc28 fedora 94 k cyrus-sasl-gssapi x86_64 2.1.27-0.1rc7.fc28 fedora 48 k ecj noarch 1:4.7.3a-1.fc28 fedora 2.6 M emacs-filesystem noarch 1:26.1-3.fc28 updates 68 k extra166y noarch 1.7.0-9.fc28 fedora 553 k felix-osgi-core noarch 1.4.0-23.fc28 fedora 104 k fipscheck x86_64 1.5.0-4.fc28 fedora 26 k fipscheck-lib x86_64 1.5.0-4.fc28 fedora 14 k fontconfig x86_64 2.13.0-4.fc28 updates 253 k fontpackages-filesystem noarch 1.44-21.fc28 fedora 15 k freetype x86_64 2.8-10.fc28 fedora 384 k gc x86_64 7.6.4-3.fc28 fedora 108 k geronimo-annotation noarch 1.0-22.fc28 fedora 24 k giflib x86_64 5.1.4-1.fc28 fedora 50 k git-core x86_64 2.17.1-3.fc28 updates 4.0 M git-core-doc noarch 2.17.1-3.fc28 updates 2.3 M glassfish-el-api noarch 3.0.1-0.7.b08.fc28 fedora 104 k glassfish-servlet-api noarch 3.1.0-14.fc28 fedora 87 k glibc-devel x86_64 2.27-8.fc28 fedora 1.0 M glibc-headers x86_64 2.27-8.fc28 fedora 454 k google-gson noarch 2.8.2-1.fc28 fedora 236 k google-guice noarch 4.1-10.fc28 fedora 469 k gpars noarch 1.2.1-11.fc28 fedora 570 k groff-base x86_64 1.22.3-15.fc28 fedora 1.0 M groovy-lib noarch 2.4.8-6.fc28 updates 14 M guava20 noarch 20.0-6.fc28 updates 2.1 M guile x86_64 5:2.0.14-7.fc28 fedora 3.5 M hamcrest-core noarch 1.3-23.fc28 fedora 57 k hawtjni-runtime noarch 1.15-3.fc28 fedora 41 k hicolor-icon-theme noarch 0.17-2.fc28 fedora 47 k httpcomponents-client noarch 4.5.5-4.fc28 fedora 718 k httpcomponents-core noarch 4.4.9-4.fc28 fedora 635 k isl x86_64 0.16.1-6.fc28 fedora 841 k jackson-annotations noarch 2.9.4-2.fc28 fedora 70 k jackson-core noarch 2.9.4-2.fc28 fedora 320 k jackson-databind noarch 2.9.4-3.fc28 fedora 1.2 M jackson-dataformat-cbor noarch 2.9.4-3.fc28 fedora 63 k jansi noarch 1.16-3.fc28 fedora 57 k jansi-native x86_64 1.7-5.fc28 fedora 37 k jatl noarch 0.2.2-13.fc28 fedora 37 k java-1.8.0-openjdk x86_64 1:1.8.0.162-3.b12.fc28 fedora 244 k java-1.8.0-openjdk-headless x86_64 1:1.8.0.162-3.b12.fc28 fedora 32 M javapackages-tools noarch 5.0.0-14.fc28 updates 51 k jboss-interceptors-1.2-api noarch 1.0.0-8.fc28 fedora 32 k jcifs noarch 1.3.18-7.fc28 fedora 467 k jcip-annotations noarch 1-23.20060626.fc28 fedora 14 k jcl-over-slf4j noarch 1.7.25-4.fc28 fedora 30 k jcsp noarch 1.1-0.8.rc5.fc28 fedora 599 k jetty-http noarch 9.4.11-2.v20180605.fc28 updates 212 k jetty-io noarch 9.4.11-2.v20180605.fc28 updates 151 k jetty-server noarch 9.4.11-2.v20180605.fc28 updates 573 k jetty-util noarch 9.4.11-2.v20180605.fc28 updates 492 k jline noarch 2.14.6-1.fc28 updates 157 k joda-time noarch 2.9.9-2.tzdata2017b.fc28 fedora 524 k jsch noarch 0.1.54-6.fc28 fedora 267 k jsoup noarch 1.11.2-2.fc28 fedora 381 k jsr-305 noarch 0-0.22.20130910svn.fc28 fedora 33 k jul-to-slf4j noarch 1.7.25-4.fc28 fedora 20 k junit noarch 1:4.12-8.fc28 fedora 299 k jzlib noarch 1.1.3-8.fc28 fedora 83 k kernel-headers x86_64 4.17.12-1.fc28 updates 1.1 M kryo noarch 3.0.3-5.fc28 fedora 288 k less x86_64 530-1.fc28 fedora 163 k libX11 x86_64 1.6.5-7.fc28 fedora 622 k libX11-common noarch 1.6.5-7.fc28 fedora 167 k libXau x86_64 1.0.8-11.fc28 fedora 34 k libXcomposite x86_64 0.4.4-12.fc28 fedora 27 k libXext x86_64 1.3.3-8.fc28 fedora 44 k libXi x86_64 1.7.9-6.fc28 fedora 48 k libXrender x86_64 0.9.10-5.fc28 fedora 32 k libXtst x86_64 1.2.3-5.fc28 fedora 25 k libaio x86_64 0.3.110-11.fc28 fedora 29 k libatomic_ops x86_64 7.6.2-3.fc28 fedora 37 k libedit x86_64 3.1-23.20170329cvs.fc28 fedora 101 k libfontenc x86_64 1.1.3-7.fc28 fedora 36 k libgomp x86_64 8.1.1-5.fc28 updates 207 k libjpeg-turbo x86_64 1.5.3-6.fc28 updates 154 k libmpc x86_64 1.0.2-9.fc28 fedora 58 k libnl3 x86_64 3.4.0-3.fc28 fedora 307 k libpkgconf x86_64 1.4.2-1.fc28 fedora 34 k libpng x86_64 2:1.6.34-6.fc28 updates 125 k libsecret x86_64 0.18.6-1.fc28 fedora 162 k libssh2 x86_64 1.8.0-7.fc28 fedora 97 k libstdc++ x86_64 8.1.1-5.fc28 updates 487 k libtool-ltdl x86_64 2.4.6-24.fc28 updates 57 k libvirt-libs x86_64 4.2.0-1.fc28 fedora-virt-preview 4.2 M libwsman1 x86_64 2.6.5-2.fc28 fedora 140 k libxcb x86_64 1.13-1.fc28 fedora 228 k libxcrypt-devel x86_64 4.0.0-5.fc28 fedora 15 k lksctp-tools x86_64 1.0.16-9.fc28 fedora 96 k log4j-over-slf4j noarch 1.7.25-4.fc28 fedora 36 k lua x86_64 5.3.4-10.fc28 fedora 191 k lua-posix x86_64 33.3.1-8.fc28 fedora 175 k maven-lib noarch 1:3.5.2-5.fc28 fedora 1.4 M maven-resolver-api noarch 1:1.1.0-2.fc28 fedora 136 k maven-resolver-connector-basic noarch 1:1.1.0-2.fc28 fedora 49 k maven-resolver-impl noarch 1:1.1.0-2.fc28 fedora 175 k maven-resolver-spi noarch 1:1.1.0-2.fc28 fedora 39 k maven-resolver-transport-wagon noarch 1:1.1.0-2.fc28 fedora 37 k maven-resolver-util noarch 1:1.1.0-2.fc28 fedora 146 k maven-shared-utils noarch 3.1.0-5.fc27 fedora 160 k maven-wagon-file noarch 3.0.0-1.fc28 fedora 24 k maven-wagon-http noarch 3.0.0-1.fc28 fedora 25 k maven-wagon-http-shared noarch 3.0.0-1.fc28 fedora 48 k maven-wagon-provider-api noarch 3.0.0-1.fc28 fedora 62 k minlog noarch 1.3.0-5.fc27 fedora 18 k multiverse noarch 0.7.0-7.fc27 fedora 228 k native-platform x86_64 0.14-12.fc28 fedora 93 k ncurses x86_64 6.1-4.20180224.fc28 fedora 377 k nekohtml noarch 1.9.22-5.fc27 fedora 159 k netty-tcnative x86_64 1.1.30-9.fc28 fedora 118 k netty3 noarch 3.10.6-3.fc27 fedora 1.2 M nmap-ncat x86_64 2:7.60-12.fc28 fedora 235 k numactl-libs x86_64 2.0.11-8.fc28 fedora 34 k objectweb-asm noarch 6.1.1-1.fc28 updates 440 k objenesis noarch 2.6-1.fc28 fedora 105 k openssh x86_64 7.7p1-5.fc28 updates 483 k openssh-clients x86_64 7.7p1-5.fc28 updates 684 k osgi-annotation noarch 6.0.0-7.fc28 fedora 17 k osgi-compendium noarch 6.0.0-5.fc28 fedora 274 k osgi-core noarch 6.0.0-6.fc28 fedora 156 k perl-Carp noarch 1.42-396.fc28 updates 29 k perl-Data-Dumper x86_64 2.167-399.fc28 fedora 57 k perl-Digest noarch 1.17-395.fc28 fedora 26 k perl-Digest-MD5 x86_64 2.55-396.fc28 fedora 36 k perl-Encode x86_64 4:2.97-3.fc28 fedora 1.5 M perl-Errno x86_64 1.28-413.fc28 updates 74 k perl-Error noarch 1:0.17025-2.fc28 fedora 45 k perl-Exporter noarch 5.72-396.fc28 fedora 33 k perl-File-Path noarch 2.15-2.fc28 fedora 37 k perl-File-Temp noarch 0.230.600-1.fc28 updates 62 k perl-Getopt-Long noarch 1:2.50-4.fc28 fedora 62 k perl-Git noarch 2.17.1-3.fc28 updates 73 k perl-HTTP-Tiny noarch 0.070-395.fc28 fedora 56 k perl-IO x86_64 1.38-413.fc28 updates 140 k perl-IO-Socket-IP noarch 0.39-5.fc28 fedora 46 k perl-MIME-Base64 x86_64 3.15-396.fc28 fedora 29 k perl-Net-SSLeay x86_64 1.85-1.fc28 fedora 356 k perl-PathTools x86_64 3.74-1.fc28 fedora 89 k perl-Pod-Escapes noarch 1:1.07-395.fc28 fedora 19 k perl-Pod-Perldoc noarch 3.28-396.fc28 fedora 87 k perl-Pod-Simple noarch 1:3.35-395.fc28 fedora 212 k perl-Pod-Usage noarch 4:1.69-395.fc28 fedora 33 k perl-Scalar-List-Utils x86_64 3:1.49-2.fc28 fedora 67 k perl-Socket x86_64 4:2.027-2.fc28 fedora 58 k perl-Storable x86_64 1:3.11-2.fc28 updates 97 k perl-Term-ANSIColor noarch 4.06-396.fc28 fedora 45 k perl-Term-Cap noarch 1.17-395.fc28 fedora 22 k perl-TermReadKey x86_64 2.37-7.fc28 fedora 39 k perl-Text-ParseWords noarch 3.30-395.fc28 fedora 17 k perl-Text-Tabs+Wrap noarch 2013.0523-395.fc28 fedora 23 k perl-Time-Local noarch 1:1.280-1.fc28 updates 32 k perl-URI noarch 1.73-2.fc28 fedora 115 k perl-Unicode-Normalize x86_64 1.25-396.fc28 fedora 81 k perl-constant noarch 1.33-396.fc28 fedora 24 k perl-interpreter x86_64 4:5.26.2-413.fc28 updates 6.3 M perl-libnet noarch 3.11-3.fc28 fedora 120 k perl-libs x86_64 4:5.26.2-413.fc28 updates 1.6 M perl-macros x86_64 4:5.26.2-413.fc28 updates 70 k perl-parent noarch 1:0.236-395.fc28 fedora 19 k perl-podlators noarch 4.11-1.fc28 updates 117 k perl-threads x86_64 1:2.21-2.fc28 fedora 60 k perl-threads-shared x86_64 1.58-2.fc28 fedora 46 k pkgconf x86_64 1.4.2-1.fc28 fedora 37 k pkgconf-m4 noarch 1.4.2-1.fc28 fedora 16 k pkgconf-pkg-config x86_64 1.4.2-1.fc28 fedora 14 k plexus-cipher noarch 1.7-14.fc28 fedora 28 k plexus-classworlds noarch 2.5.2-9.fc28 fedora 64 k plexus-containers-component-annotations noarch 1.7.1-5.fc28 fedora 17 k plexus-interpolation noarch 1.22-9.fc28 fedora 78 k plexus-sec-dispatcher noarch 1.4-24.fc28 fedora 31 k plexus-utils noarch 3.0.24-5.fc28 fedora 255 k protobuf x86_64 3.5.0-4.fc28 fedora 897 k publicsuffix-list noarch 20180514-1.fc28 updates 78 k python2 x86_64 2.7.15-2.fc28 updates 101 k python2-libs x86_64 2.7.15-2.fc28 updates 6.2 M python2-pip noarch 9.0.3-2.fc28 updates 2.0 M python2-setuptools noarch 39.2.0-6.fc28 updates 644 k reflectasm noarch 1.11.0-6.fc28 fedora 33 k rhino noarch 1.7.7.1-4.fc28 fedora 1.1 M sisu-inject noarch 1:0.3.3-3.fc28 fedora 337 k sisu-plexus noarch 1:0.3.3-3.fc28 fedora 178 k slf4j noarch 1.7.25-4.fc28 fedora 76 k snakeyaml noarch 1.17-5.fc28 fedora 247 k tesla-polyglot-common noarch 0.2.0-2.fc28 fedora 33 k tesla-polyglot-groovy noarch 0.2.0-2.fc28 fedora 56 k testng noarch 6.14.3-2.fc28 fedora 863 k ttmkfdir x86_64 3.0.9-54.fc28 fedora 60 k tzdata-java noarch 2018e-1.fc28 updates 185 k which x86_64 2.21-8.fc28 fedora 47 k xalan-j2 noarch 2.7.1-34.fc28 fedora 1.9 M xbean noarch 4.5-9.fc28 fedora 412 k xerces-j2 noarch 2.11.0-31.fc28 fedora 1.2 M xml-commons-apis noarch 1.4.01-25.fc28 fedora 234 k xml-commons-resolver noarch 1.2-24.fc28 fedora 114 k xorg-x11-font-utils x86_64 1:7.5-38.fc28 updates 95 k xorg-x11-fonts-Type1 noarch 7.5-19.fc28 fedora 521 k xpp3 noarch 1.1.4-18.c.fc28 fedora 294 k xpp3-minimal noarch 1.1.4-18.c.fc28 fedora 37 k xstream noarch 1.4.9-7.fc28 fedora 500 k yajl x86_64 2.1.0-10.fc28 fedora 39 k Installing weak dependencies: java-1.8.0-openjdk-devel x86_64 1:1.8.0.162-3.b12.fc28 fedora 9.8 M perl-IO-Socket-SSL noarch 2.056-1.fc28 fedora 285 k perl-Mozilla-CA noarch 20160104-7.fc28 fedora 14 k Transaction Summary ================================================================================ Install 248 Packages Upgrade 1 Package Total download size: 204 M Downloading Packages: (1/249): libvirt-devel-4.2.0-1.fc28.x86_64.rpm 388 kB/s | 167 kB 00:00 (2/249): rsync-daemon-3.1.3-3.fc28.noarch.rpm 24 kB/s | 41 kB 00:01 (3/249): make-4.2.1-6.fc28.x86_64.rpm 161 kB/s | 497 kB 00:03 (4/249): rsync-3.1.3-3.fc28.x86_64.rpm 353 kB/s | 403 kB 00:01 (5/249): libvirt-libs-4.2.0-1.fc28.x86_64.rpm 4.5 MB/s | 4.2 MB 00:00 (6/249): gc-7.6.4-3.fc28.x86_64.rpm 150 kB/s | 108 kB 00:00 (7/249): libnl3-3.4.0-3.fc28.x86_64.rpm 288 kB/s | 307 kB 00:01 (8/249): mercurial-4.4.2-4.fc28.x86_64.rpm 826 kB/s | 4.2 MB 00:05 (9/249): libssh2-1.8.0-7.fc28.x86_64.rpm 143 kB/s | 97 kB 00:00 (10/249): libwsman1-2.6.5-2.fc28.x86_64.rpm 192 kB/s | 140 kB 00:00 (11/249): numactl-libs-2.0.11-8.fc28.x86_64.rpm 83 kB/s | 34 kB 00:00 (12/249): nmap-ncat-7.60-12.fc28.x86_64.rpm 225 kB/s | 235 kB 00:01 (13/249): yajl-2.1.0-10.fc28.x86_64.rpm 76 kB/s | 39 kB 00:00 (14/249): libatomic_ops-7.6.2-3.fc28.x86_64.rpm 85 kB/s | 37 kB 00:00 (15/249): git-2.17.1-3.fc28.x86_64.rpm 533 kB/s | 221 kB 00:00 (16/249): git-core-doc-2.17.1-3.fc28.noarch.rpm 2.6 MB/s | 2.3 MB 00:00 (17/249): perl-Git-2.17.1-3.fc28.noarch.rpm 1.2 MB/s | 73 kB 00:00 (18/249): git-core-2.17.1-3.fc28.x86_64.rpm 2.5 MB/s | 4.0 MB 00:01 (19/249): guile-2.0.14-7.fc28.x86_64.rpm 722 kB/s | 3.5 MB 00:04 (20/249): libsecret-0.18.6-1.fc28.x86_64.rpm 201 kB/s | 162 kB 00:00 (21/249): perl-Getopt-Long-2.50-4.fc28.noarch.r 99 kB/s | 62 kB 00:00 (22/249): perl-TermReadKey-2.37-7.fc28.x86_64.r 92 kB/s | 39 kB 00:00 (23/249): perl-PathTools-3.74-1.fc28.x86_64.rpm 125 kB/s | 89 kB 00:00 (24/249): perl-Error-0.17025-2.fc28.noarch.rpm 107 kB/s | 45 kB 00:00 (25/249): perl-Exporter-5.72-396.fc28.noarch.rp 78 kB/s | 33 kB 00:00 (26/249): less-530-1.fc28.x86_64.rpm 196 kB/s | 163 kB 00:00 (27/249): perl-Pod-Usage-1.69-395.fc28.noarch.r 66 kB/s | 33 kB 00:00 (28/249): perl-Text-ParseWords-3.30-395.fc28.no 54 kB/s | 17 kB 00:00 (29/249): perl-constant-1.33-396.fc28.noarch.rp 75 kB/s | 24 kB 00:00 (30/249): perl-Scalar-List-Utils-1.49-2.fc28.x8 123 kB/s | 67 kB 00:00 (31/249): perl-Pod-Perldoc-3.28-396.fc28.noarch 136 kB/s | 87 kB 00:00 (32/249): perl-HTTP-Tiny-0.070-395.fc28.noarch. 94 kB/s | 56 kB 00:00 (33/249): perl-parent-0.236-395.fc28.noarch.rpm 57 kB/s | 19 kB 00:00 (34/249): perl-Pod-Simple-3.35-395.fc28.noarch. 212 kB/s | 212 kB 00:01 (35/249): perl-MIME-Base64-3.15-396.fc28.x86_64 63 kB/s | 29 kB 00:00 (36/249): perl-Socket-2.027-2.fc28.x86_64.rpm 97 kB/s | 58 kB 00:00 (37/249): groff-base-1.22.3-15.fc28.x86_64.rpm 504 kB/s | 1.0 MB 00:02 (38/249): perl-Pod-Escapes-1.07-395.fc28.noarch 58 kB/s | 19 kB 00:00 (39/249): perl-Text-Tabs+Wrap-2013.0523-395.fc2 74 kB/s | 23 kB 00:00 (40/249): sudo-1.8.23-1.fc28.x86_64.rpm 1.8 MB/s | 864 kB 00:00 (41/249): isl-0.16.1-6.fc28.x86_64.rpm 537 kB/s | 841 kB 00:01 (42/249): libmpc-1.0.2-9.fc28.x86_64.rpm 109 kB/s | 58 kB 00:00 (43/249): findutils-4.6.0-19.fc28.x86_64.rpm 1.6 MB/s | 526 kB 00:00 (44/249): cpp-8.1.1-5.fc28.x86_64.rpm 2.4 MB/s | 10 MB 00:04 (45/249): apache-commons-cli-1.4-4.fc28.noarch. 135 kB/s | 73 kB 00:00 (46/249): apache-commons-codec-1.11-3.fc28.noar 254 kB/s | 287 kB 00:01 (47/249): gradle-4.3.1-7.fc28.noarch.rpm 2.5 MB/s | 9.1 MB 00:03 (48/249): apache-commons-io-2.6-3.fc28.noarch.r 235 kB/s | 222 kB 00:00 (49/249): apache-commons-compress-1.16.1-1.fc28 350 kB/s | 498 kB 00:01 (50/249): gcc-8.1.1-5.fc28.x86_64.rpm 2.6 MB/s | 23 MB 00:08 (51/249): apache-commons-lang-2.6-21.fc28.noarc 233 kB/s | 281 kB 00:01 (52/249): apache-commons-lang3-3.7-3.fc28.noarc 365 kB/s | 482 kB 00:01 (53/249): atinject-1-27.20100611svn86.fc28.noar 48 kB/s | 19 kB 00:00 (54/249): apache-ivy-2.4.0-10.fc28.noarch.rpm 650 kB/s | 1.1 MB 00:01 (55/249): aqute-bndlib-3.5.0-2.fc28.noarch.rpm 701 kB/s | 1.4 MB 00:02 (56/249): aws-sdk-java-core-1.11.3-5.fc28.noarc 355 kB/s | 532 kB 00:01 (57/249): base64coder-20101219-20.fc28.noarch.r 47 kB/s | 18 kB 00:00 (58/249): aws-sdk-java-kms-1.11.3-5.fc28.noarch 302 kB/s | 350 kB 00:01 (59/249): beust-jcommander-1.71-3.fc28.noarch.r 123 kB/s | 85 kB 00:00 (60/249): aws-sdk-java-s3-1.11.3-5.fc28.noarch. 370 kB/s | 566 kB 00:01 (61/249): glassfish-servlet-api-3.1.0-14.fc28.n 137 kB/s | 87 kB 00:00 (62/249): google-gson-2.8.2-1.fc28.noarch.rpm 243 kB/s | 236 kB 00:00 (63/249): hawtjni-runtime-1.15-3.fc28.noarch.rp 93 kB/s | 41 kB 00:00 (64/249): google-guice-4.1-10.fc28.noarch.rpm 367 kB/s | 469 kB 00:01 (65/249): ecj-4.7.3a-1.fc28.noarch.rpm 973 kB/s | 2.6 MB 00:02 (66/249): hicolor-icon-theme-0.17-2.fc28.noarch 81 kB/s | 47 kB 00:00 (67/249): jackson-annotations-2.9.4-2.fc28.noar 116 kB/s | 70 kB 00:00 (68/249): httpcomponents-client-4.5.5-4.fc28.no 431 kB/s | 718 kB 00:01 (69/249): httpcomponents-core-4.4.9-4.fc28.noar 441 kB/s | 635 kB 00:01 (70/249): jackson-core-2.9.4-2.fc28.noarch.rpm 303 kB/s | 320 kB 00:01 (71/249): jansi-1.16-3.fc28.noarch.rpm 106 kB/s | 57 kB 00:00 (72/249): jansi-native-1.7-5.fc28.x86_64.rpm 89 kB/s | 37 kB 00:00 (73/249): jatl-0.2.2-13.fc28.noarch.rpm 86 kB/s | 37 kB 00:00 (74/249): jcip-annotations-1-23.20060626.fc28.n 54 kB/s | 14 kB 00:00 (75/249): jcl-over-slf4j-1.7.25-4.fc28.noarch.r 74 kB/s | 30 kB 00:00 (76/249): jackson-databind-2.9.4-3.fc28.noarch. 606 kB/s | 1.2 MB 00:02 (77/249): jcifs-1.3.18-7.fc28.noarch.rpm 349 kB/s | 467 kB 00:01 (78/249): jsr-305-0-0.22.20130910svn.fc28.noarc 67 kB/s | 33 kB 00:00 (79/249): jul-to-slf4j-1.7.25-4.fc28.noarch.rpm 64 kB/s | 20 kB 00:00 (80/249): jsch-0.1.54-6.fc28.noarch.rpm 239 kB/s | 267 kB 00:01 (81/249): joda-time-2.9.9-2.tzdata2017b.fc28.no 358 kB/s | 524 kB 00:01 (82/249): log4j-over-slf4j-1.7.25-4.fc28.noarch 70 kB/s | 36 kB 00:00 (83/249): junit-4.12-8.fc28.noarch.rpm 291 kB/s | 299 kB 00:01 (84/249): kryo-3.0.3-5.fc28.noarch.rpm 278 kB/s | 288 kB 00:01 (85/249): maven-resolver-connector-basic-1.1.0- 118 kB/s | 49 kB 00:00 (86/249): maven-resolver-api-1.1.0-2.fc28.noarc 183 kB/s | 136 kB 00:00 (87/249): maven-resolver-spi-1.1.0-2.fc28.noarc 80 kB/s | 39 kB 00:00 (88/249): maven-resolver-impl-1.1.0-2.fc28.noar 173 kB/s | 175 kB 00:01 (89/249): maven-resolver-transport-wagon-1.1.0- 91 kB/s | 37 kB 00:00 (90/249): maven-lib-3.5.2-5.fc28.noarch.rpm 668 kB/s | 1.4 MB 00:02 (91/249): maven-wagon-file-3.0.0-1.fc28.noarch. 73 kB/s | 24 kB 00:00 (92/249): maven-wagon-http-3.0.0-1.fc28.noarch. 68 kB/s | 25 kB 00:00 (93/249): maven-resolver-util-1.1.0-2.fc28.noar 194 kB/s | 146 kB 00:00 (94/249): maven-wagon-http-shared-3.0.0-1.fc28. 114 kB/s | 48 kB 00:00 (95/249): minlog-1.3.0-5.fc27.noarch.rpm 55 kB/s | 18 kB 00:00 (96/249): maven-wagon-provider-api-3.0.0-1.fc28 106 kB/s | 62 kB 00:00 (97/249): native-platform-0.14-12.fc28.x86_64.r 146 kB/s | 93 kB 00:00 (98/249): plexus-cipher-1.7-14.fc28.noarch.rpm 88 kB/s | 28 kB 00:00 (99/249): nekohtml-1.9.22-5.fc27.noarch.rpm 193 kB/s | 159 kB 00:00 (100/249): objenesis-2.6-1.fc28.noarch.rpm 144 kB/s | 105 kB 00:00 (101/249): plexus-containers-component-annotati 42 kB/s | 17 kB 00:00 (102/249): plexus-classworlds-2.5.2-9.fc28.noar 103 kB/s | 64 kB 00:00 (103/249): plexus-interpolation-1.22-9.fc28.noa 126 kB/s | 78 kB 00:00 (104/249): plexus-sec-dispatcher-1.4-24.fc28.no 73 kB/s | 31 kB 00:00 (105/249): reflectasm-1.11.0-6.fc28.noarch.rpm 75 kB/s | 33 kB 00:00 (106/249): plexus-utils-3.0.24-5.fc28.noarch.rp 244 kB/s | 255 kB 00:01 (107/249): sisu-inject-0.3.3-3.fc28.noarch.rpm 258 kB/s | 337 kB 00:01 (108/249): sisu-plexus-0.3.3-3.fc28.noarch.rpm 179 kB/s | 178 kB 00:00 (109/249): rhino-1.7.7.1-4.fc28.noarch.rpm 583 kB/s | 1.1 MB 00:01 (110/249): slf4j-1.7.25-4.fc28.noarch.rpm 138 kB/s | 76 kB 00:00 (111/249): tesla-polyglot-common-0.2.0-2.fc28.n 78 kB/s | 33 kB 00:00 (112/249): snakeyaml-1.17-5.fc28.noarch.rpm 252 kB/s | 247 kB 00:00 (113/249): tesla-polyglot-groovy-0.2.0-2.fc28.n 106 kB/s | 56 kB 00:00 (114/249): xbean-4.5-9.fc28.noarch.rpm 322 kB/s | 412 kB 00:01 (115/249): testng-6.14.3-2.fc28.noarch.rpm 495 kB/s | 863 kB 00:01 (116/249): osgi-annotation-6.0.0-7.fc28.noarch. 52 kB/s | 17 kB 00:00 (117/249): xerces-j2-2.11.0-31.fc28.noarch.rpm 594 kB/s | 1.2 MB 00:01 (118/249): xml-commons-apis-1.4.01-25.fc28.noar 231 kB/s | 234 kB 00:01 (119/249): osgi-compendium-6.0.0-5.fc28.noarch. 291 kB/s | 274 kB 00:00 (120/249): osgi-core-6.0.0-6.fc28.noarch.rpm 171 kB/s | 156 kB 00:00 (121/249): apache-commons-logging-1.2-13.fc28.n 138 kB/s | 86 kB 00:00 (122/249): jackson-dataformat-cbor-2.9.4-3.fc28 120 kB/s | 63 kB 00:00 (123/249): aopalliance-1.0-17.fc28.noarch.rpm 37 kB/s | 16 kB 00:00 (124/249): jzlib-1.1.3-8.fc28.noarch.rpm 132 kB/s | 83 kB 00:00 (125/249): geronimo-annotation-1.0-22.fc28.noar 59 kB/s | 24 kB 00:00 (126/249): hamcrest-core-1.3-23.fc28.noarch.rpm 111 kB/s | 57 kB 00:00 (127/249): maven-shared-utils-3.1.0-5.fc27.noar 224 kB/s | 160 kB 00:00 (128/249): jsoup-1.11.2-2.fc28.noarch.rpm 369 kB/s | 381 kB 00:01 (129/249): cdi-api-1.2-7.fc28.noarch.rpm 176 kB/s | 142 kB 00:00 (130/249): bcel-6.2-2.fc28.noarch.rpm 400 kB/s | 616 kB 00:01 (131/249): xml-commons-resolver-1.2-24.fc28.noa 157 kB/s | 114 kB 00:00 (132/249): glassfish-el-api-3.0.1-0.7.b08.fc28. 151 kB/s | 104 kB 00:00 (133/249): jboss-interceptors-1.2-api-1.0.0-8.f 71 kB/s | 32 kB 00:00 (134/249): libaio-0.3.110-11.fc28.x86_64.rpm 69 kB/s | 29 kB 00:00 (135/249): qemu-img-2.12.0-0.5.rc1.fc28.x86_64. 1.1 MB/s | 939 kB 00:00 (136/249): xalan-j2-2.7.1-34.fc28.noarch.rpm 750 kB/s | 1.9 MB 00:02 (137/249): javapackages-tools-5.0.0-14.fc28.noa 322 kB/s | 51 kB 00:00 (138/249): protobuf-compiler-3.5.0-4.fc28.x86_6 477 kB/s | 792 kB 00:01 (139/249): protobuf-3.5.0-4.fc28.x86_64.rpm 495 kB/s | 897 kB 00:01 (140/249): which-2.21-8.fc28.x86_64.rpm 113 kB/s | 47 kB 00:00 (141/249): lksctp-tools-1.0.16-9.fc28.x86_64.rp 156 kB/s | 96 kB 00:00 (142/249): freetype-2.8-10.fc28.x86_64.rpm 312 kB/s | 384 kB 00:01 (143/249): antlr-tool-2.7.7-53.fc28.noarch.rpm 391 kB/s | 364 kB 00:00 (144/249): gpars-1.2.1-11.fc28.noarch.rpm 461 kB/s | 570 kB 00:01 (145/249): groovy-lib-2.4.8-6.fc28.noarch.rpm 3.5 MB/s | 14 MB 00:03 (146/249): xstream-1.4.9-7.fc28.noarch.rpm 387 kB/s | 500 kB 00:01 (147/249): extra166y-1.7.0-9.fc28.noarch.rpm 445 kB/s | 553 kB 00:01 (148/249): jcsp-1.1-0.8.rc5.fc28.noarch.rpm 408 kB/s | 599 kB 00:01 (149/249): java-1.8.0-openjdk-headless-1.8.0.16 3.9 MB/s | 32 MB 00:08 (150/249): multiverse-0.7.0-7.fc27.noarch.rpm 198 kB/s | 228 kB 00:01 (151/249): xpp3-minimal-1.1.4-18.c.fc28.noarch. 82 kB/s | 37 kB 00:00 (152/249): netty3-3.10.6-3.fc27.noarch.rpm 773 kB/s | 1.2 MB 00:01 (153/249): xpp3-1.1.4-18.c.fc28.noarch.rpm 297 kB/s | 294 kB 00:00 (154/249): felix-osgi-core-1.4.0-23.fc28.noarch 164 kB/s | 104 kB 00:00 (155/249): netty-tcnative-1.1.30-9.fc28.x86_64. 150 kB/s | 118 kB 00:00 (156/249): apr-1.6.3-5.fc28.x86_64.rpm 169 kB/s | 124 kB 00:00 (157/249): objectweb-asm-6.1.1-1.fc28.noarch.rp 1.8 MB/s | 440 kB 00:00 (158/249): jline-2.14.6-1.fc28.noarch.rpm 917 kB/s | 157 kB 00:00 (159/249): libstdc++-8.1.1-5.fc28.x86_64.rpm 2.7 MB/s | 487 kB 00:00 (160/249): perl-libs-5.26.2-413.fc28.x86_64.rpm 3.4 MB/s | 1.6 MB 00:00 (161/249): perl-File-Path-2.15-2.fc28.noarch.rp 90 kB/s | 37 kB 00:00 (162/249): compat-openssl10-1.0.2o-1.fc28.x86_6 596 kB/s | 1.1 MB 00:01 (163/249): perl-Unicode-Normalize-1.25-396.fc28 145 kB/s | 81 kB 00:00 (164/249): perl-threads-2.21-2.fc28.x86_64.rpm 114 kB/s | 60 kB 00:00 (165/249): perl-Errno-1.28-413.fc28.x86_64.rpm 609 kB/s | 74 kB 00:00 (166/249): perl-interpreter-5.26.2-413.fc28.x86 3.2 MB/s | 6.3 MB 00:01 (167/249): perl-Carp-1.42-396.fc28.noarch.rpm 152 kB/s | 29 kB 00:00 (168/249): perl-threads-shared-1.58-2.fc28.x86_ 107 kB/s | 46 kB 00:00 (169/249): perl-podlators-4.11-1.fc28.noarch.rp 973 kB/s | 117 kB 00:00 (170/249): perl-Term-Cap-1.17-395.fc28.noarch.r 56 kB/s | 22 kB 00:00 (171/249): perl-Term-ANSIColor-4.06-396.fc28.no 87 kB/s | 45 kB 00:00 (172/249): perl-File-Temp-0.230.600-1.fc28.noar 626 kB/s | 62 kB 00:00 (173/249): perl-IO-1.38-413.fc28.x86_64.rpm 1.4 MB/s | 140 kB 00:00 (174/249): perl-Time-Local-1.280-1.fc28.noarch. 338 kB/s | 32 kB 00:00 (175/249): perl-Storable-3.11-2.fc28.x86_64.rpm 971 kB/s | 97 kB 00:00 (176/249): emacs-filesystem-26.1-3.fc28.noarch. 699 kB/s | 68 kB 00:00 (177/249): python2-2.7.15-2.fc28.x86_64.rpm 1.0 MB/s | 101 kB 00:00 (178/249): ncurses-6.1-4.20180224.fc28.x86_64.r 308 kB/s | 377 kB 00:01 (179/249): guava20-20.0-6.fc28.noarch.rpm 2.2 MB/s | 2.1 MB 00:00 (180/249): copy-jdk-configs-3.7-1.fc28.noarch.r 593 kB/s | 26 kB 00:00 (181/249): perl-Encode-2.97-3.fc28.x86_64.rpm 734 kB/s | 1.5 MB 00:02 (182/249): libjpeg-turbo-1.5.3-6.fc28.x86_64.rp 1.8 MB/s | 154 kB 00:00 (183/249): python2-libs-2.7.15-2.fc28.x86_64.rp 3.8 MB/s | 6.2 MB 00:01 (184/249): tzdata-java-2018e-1.fc28.noarch.rpm 838 kB/s | 185 kB 00:00 (185/249): publicsuffix-list-20180514-1.fc28.no 1.4 MB/s | 78 kB 00:00 (186/249): libtool-ltdl-2.4.6-24.fc28.x86_64.rp 1.1 MB/s | 57 kB 00:00 (187/249): libpng-1.6.34-6.fc28.x86_64.rpm 1.4 MB/s | 125 kB 00:00 (188/249): avahi-libs-0.7-13.fc28.x86_64.rpm 857 kB/s | 62 kB 00:00 (189/249): capstone-3.0.4-16.fc28.x86_64.rpm 2.0 MB/s | 487 kB 00:00 (190/249): lua-posix-33.3.1-8.fc28.x86_64.rpm 185 kB/s | 175 kB 00:00 (191/249): cyrus-sasl-gssapi-2.1.27-0.1rc7.fc28 113 kB/s | 48 kB 00:00 (192/249): pkgconf-pkg-config-1.4.2-1.fc28.x86_ 66 kB/s | 14 kB 00:00 (193/249): cyrus-sasl-2.1.27-0.1rc7.fc28.x86_64 147 kB/s | 94 kB 00:00 (194/249): pkgconf-m4-1.4.2-1.fc28.noarch.rpm 52 kB/s | 16 kB 00:00 (195/249): pkgconf-1.4.2-1.fc28.x86_64.rpm 90 kB/s | 37 kB 00:00 (196/249): libpkgconf-1.4.2-1.fc28.x86_64.rpm 79 kB/s | 34 kB 00:00 (197/249): perl-macros-5.26.2-413.fc28.x86_64.r 594 kB/s | 70 kB 00:00 (198/249): python2-setuptools-39.2.0-6.fc28.noa 1.7 MB/s | 644 kB 00:00 (199/249): python2-pip-9.0.3-2.fc28.noarch.rpm 3.2 MB/s | 2.0 MB 00:00 (200/249): apache-commons-collections-3.2.2-8.f 2.1 MB/s | 535 kB 00:00 (201/249): bouncycastle-pg-1.59-1.fc28.noarch.r 2.4 MB/s | 317 kB 00:00 (202/249): jetty-server-9.4.11-2.v20180605.fc28 2.7 MB/s | 573 kB 00:00 (203/249): jetty-http-9.4.11-2.v20180605.fc28.n 2.3 MB/s | 212 kB 00:00 (204/249): jetty-io-9.4.11-2.v20180605.fc28.noa 1.7 MB/s | 151 kB 00:00 (205/249): jetty-util-9.4.11-2.v20180605.fc28.n 2.8 MB/s | 492 kB 00:00 (206/249): ant-lib-1.10.1-10.fc28.noarch.rpm 1.7 MB/s | 1.9 MB 00:01 (207/249): openssh-clients-7.7p1-5.fc28.x86_64. 2.6 MB/s | 684 kB 00:00 (208/249): openssh-7.7p1-5.fc28.x86_64.rpm 1.8 MB/s | 483 kB 00:00 (209/249): bouncycastle-1.59-1.fc28.noarch.rpm 3.2 MB/s | 4.0 MB 00:01 (210/249): fipscheck-lib-1.5.0-4.fc28.x86_64.rp 37 kB/s | 14 kB 00:00 (211/249): fipscheck-1.5.0-4.fc28.x86_64.rpm 86 kB/s | 26 kB 00:00 (212/249): libedit-3.1-23.20170329cvs.fc28.x86_ 155 kB/s | 101 kB 00:00 (213/249): libxcrypt-devel-4.0.0-5.fc28.x86_64. 48 kB/s | 15 kB 00:00 (214/249): binutils-2.29.1-23.fc28.x86_64.rpm 3.8 MB/s | 6.0 MB 00:01 (215/249): libgomp-8.1.1-5.fc28.x86_64.rpm 1.9 MB/s | 207 kB 00:00 (216/249): glibc-headers-2.27-8.fc28.x86_64.rpm 351 kB/s | 454 kB 00:01 (217/249): glibc-devel-2.27-8.fc28.x86_64.rpm 535 kB/s | 1.0 MB 00:01 (218/249): perl-Mozilla-CA-20160104-7.fc28.noar 66 kB/s | 14 kB 00:00 (219/249): kernel-headers-4.17.12-1.fc28.x86_64 2.7 MB/s | 1.1 MB 00:00 (220/249): lua-5.3.4-10.fc28.x86_64.rpm 227 kB/s | 191 kB 00:00 (221/249): perl-IO-Socket-IP-0.39-5.fc28.noarch 107 kB/s | 46 kB 00:00 (222/249): perl-IO-Socket-SSL-2.056-1.fc28.noar 262 kB/s | 285 kB 00:01 (223/249): perl-URI-1.73-2.fc28.noarch.rpm 158 kB/s | 115 kB 00:00 (224/249): perl-Net-SSLeay-1.85-1.fc28.x86_64.r 308 kB/s | 356 kB 00:01 (225/249): perl-Data-Dumper-2.167-399.fc28.x86_ 108 kB/s | 57 kB 00:00 (226/249): perl-Digest-1.17-395.fc28.noarch.rpm 79 kB/s | 26 kB 00:00 (227/249): perl-Digest-MD5-2.55-396.fc28.x86_64 82 kB/s | 36 kB 00:00 (228/249): perl-libnet-3.11-3.fc28.noarch.rpm 161 kB/s | 120 kB 00:00 (229/249): java-1.8.0-openjdk-1.8.0.162-3.b12.f 260 kB/s | 244 kB 00:00 (230/249): libX11-1.6.5-7.fc28.x86_64.rpm 401 kB/s | 622 kB 00:01 (231/249): giflib-5.1.4-1.fc28.x86_64.rpm 102 kB/s | 50 kB 00:00 (232/249): alsa-lib-1.1.6-2.fc28.x86_64.rpm 304 kB/s | 430 kB 00:01 (233/249): libXcomposite-0.4.4-12.fc28.x86_64.r 85 kB/s | 27 kB 00:00 (234/249): libXext-1.3.3-8.fc28.x86_64.rpm 89 kB/s | 44 kB 00:00 (235/249): libXi-1.7.9-6.fc28.x86_64.rpm 117 kB/s | 48 kB 00:00 (236/249): libXtst-1.2.3-5.fc28.x86_64.rpm 83 kB/s | 25 kB 00:00 (237/249): libXrender-0.9.10-5.fc28.x86_64.rpm 75 kB/s | 32 kB 00:00 (238/249): libX11-common-1.6.5-7.fc28.noarch.rp 200 kB/s | 167 kB 00:00 (239/249): java-1.8.0-openjdk-devel-1.8.0.162-3 2.1 MB/s | 9.8 MB 00:04 (240/249): xorg-x11-fonts-Type1-7.5-19.fc28.noa 333 kB/s | 521 kB 00:01 (241/249): libxcb-1.13-1.fc28.x86_64.rpm 239 kB/s | 228 kB 00:00 (242/249): libXau-1.0.8-11.fc28.x86_64.rpm 81 kB/s | 34 kB 00:00 (243/249): ttmkfdir-3.0.9-54.fc28.x86_64.rpm 111 kB/s | 60 kB 00:00 (244/249): fontconfig-2.13.0-4.fc28.x86_64.rpm 1.1 MB/s | 253 kB 00:00 (245/249): xorg-x11-font-utils-7.5-38.fc28.x86_ 478 kB/s | 95 kB 00:00 (246/249): fontpackages-filesystem-1.44-21.fc28 37 kB/s | 15 kB 00:00 (247/249): libgcc-8.1.1-5.fc28.x86_64.rpm 837 kB/s | 95 kB 00:00 (248/249): libfontenc-1.1.3-7.fc28.x86_64.rpm 70 kB/s | 36 kB 00:00 (249/249): aajohan-comfortaa-fonts-3.001-2.fc28 180 kB/s | 147 kB 00:00 -------------------------------------------------------------------------------- Total 2.7 MB/s | 204 MB 01:16 Running transaction check Transaction check succeeded. Running transaction test Transaction test succeeded. Running transaction Running scriptlet: copy-jdk-configs-3.7-1.fc28.noarch 1/1 Running scriptlet: java-1.8.0-openjdk-headless-1:1.8.0.162-3.b12.fc28.x 1/1 Preparing : 1/1 Installing : perl-libs-4:5.26.2-413.fc28.x86_64 1/250 Installing : perl-Carp-1.42-396.fc28.noarch 2/250 Installing : perl-Exporter-5.72-396.fc28.noarch 3/250 Upgrading : libgcc-8.1.1-5.fc28.x86_64 4/250 Running scriptlet: libgcc-8.1.1-5.fc28.x86_64 4/250 Installing : libstdc++-8.1.1-5.fc28.x86_64 5/250 Running scriptlet: libstdc++-8.1.1-5.fc28.x86_64 5/250 Installing : perl-Scalar-List-Utils-3:1.49-2.fc28.x86_64 6/250 Installing : python2-libs-2.7.15-2.fc28.x86_64 7/250 Installing : python2-pip-9.0.3-2.fc28.noarch 8/250 Installing : python2-setuptools-39.2.0-6.fc28.noarch 9/250 Installing : python2-2.7.15-2.fc28.x86_64 10/250 Installing : fipscheck-1.5.0-4.fc28.x86_64 11/250 Installing : fipscheck-lib-1.5.0-4.fc28.x86_64 12/250 Running scriptlet: fipscheck-lib-1.5.0-4.fc28.x86_64 12/250 Installing : perl-Text-ParseWords-3.30-395.fc28.noarch 13/250 Installing : fontpackages-filesystem-1.44-21.fc28.noarch 14/250 Installing : libpng-2:1.6.34-6.fc28.x86_64 15/250 Installing : freetype-2.8-10.fc28.x86_64 16/250 Installing : libjpeg-turbo-1.5.3-6.fc28.x86_64 17/250 Installing : emacs-filesystem-1:26.1-3.fc28.noarch 18/250 Installing : libmpc-1.0.2-9.fc28.x86_64 19/250 Running scriptlet: libmpc-1.0.2-9.fc28.x86_64 19/250 Installing : cpp-8.1.1-5.fc28.x86_64 20/250 Running scriptlet: cpp-8.1.1-5.fc28.x86_64 20/250 Installing : ttmkfdir-3.0.9-54.fc28.x86_64 21/250 Installing : aajohan-comfortaa-fonts-3.001-2.fc28.noarch 22/250 Installing : fontconfig-2.13.0-4.fc28.x86_64 23/250 Running scriptlet: fontconfig-2.13.0-4.fc28.x86_64 23/250 Running scriptlet: openssh-7.7p1-5.fc28.x86_64 24/250 Installing : openssh-7.7p1-5.fc28.x86_64 24/250 Installing : groff-base-1.22.3-15.fc28.x86_64 25/250 Installing : protobuf-3.5.0-4.fc28.x86_64 26/250 Installing : perl-Term-ANSIColor-4.06-396.fc28.noarch 27/250 Installing : perl-macros-4:5.26.2-413.fc28.x86_64 28/250 Installing : perl-constant-1.33-396.fc28.noarch 29/250 Installing : perl-parent-1:0.236-395.fc28.noarch 30/250 Installing : perl-Socket-4:2.027-2.fc28.x86_64 31/250 Installing : perl-Text-Tabs+Wrap-2013.0523-395.fc28.noarch 32/250 Installing : perl-File-Path-2.15-2.fc28.noarch 33/250 Installing : perl-Unicode-Normalize-1.25-396.fc28.x86_64 34/250 Installing : perl-threads-shared-1.58-2.fc28.x86_64 35/250 Installing : perl-threads-1:2.21-2.fc28.x86_64 36/250 Installing : perl-Errno-1.28-413.fc28.x86_64 37/250 Installing : perl-PathTools-3.74-1.fc28.x86_64 38/250 Installing : perl-interpreter-4:5.26.2-413.fc28.x86_64 39/250 Installing : perl-IO-1.38-413.fc28.x86_64 40/250 Installing : perl-MIME-Base64-3.15-396.fc28.x86_64 41/250 Installing : perl-Time-Local-1:1.280-1.fc28.noarch 42/250 Installing : perl-HTTP-Tiny-0.070-395.fc28.noarch 43/250 Installing : perl-File-Temp-0.230.600-1.fc28.noarch 44/250 Installing : perl-IO-Socket-IP-0.39-5.fc28.noarch 45/250 Installing : perl-Net-SSLeay-1.85-1.fc28.x86_64 46/250 Installing : perl-Digest-1.17-395.fc28.noarch 47/250 Installing : perl-Digest-MD5-2.55-396.fc28.x86_64 48/250 Installing : perl-libnet-3.11-3.fc28.noarch 49/250 Installing : perl-Storable-1:3.11-2.fc28.x86_64 50/250 Installing : perl-TermReadKey-2.37-7.fc28.x86_64 51/250 Installing : perl-Error-1:0.17025-2.fc28.noarch 52/250 Installing : perl-Pod-Escapes-1:1.07-395.fc28.noarch 53/250 Installing : perl-Data-Dumper-2.167-399.fc28.x86_64 54/250 Installing : libfontenc-1.1.3-7.fc28.x86_64 55/250 Running scriptlet: libfontenc-1.1.3-7.fc28.x86_64 55/250 Installing : libXau-1.0.8-11.fc28.x86_64 56/250 Running scriptlet: libXau-1.0.8-11.fc28.x86_64 56/250 Installing : libxcb-1.13-1.fc28.x86_64 57/250 Running scriptlet: libxcb-1.13-1.fc28.x86_64 57/250 Installing : libX11-common-1.6.5-7.fc28.noarch 58/250 Installing : libX11-1.6.5-7.fc28.x86_64 59/250 Running scriptlet: libX11-1.6.5-7.fc28.x86_64 59/250 Installing : libXext-1.3.3-8.fc28.x86_64 60/250 Running scriptlet: libXext-1.3.3-8.fc28.x86_64 60/250 Installing : libXi-1.7.9-6.fc28.x86_64 61/250 Running scriptlet: libXi-1.7.9-6.fc28.x86_64 61/250 Installing : libXtst-1.2.3-5.fc28.x86_64 62/250 Running scriptlet: libXtst-1.2.3-5.fc28.x86_64 62/250 Installing : libXcomposite-0.4.4-12.fc28.x86_64 63/250 Running scriptlet: libXcomposite-0.4.4-12.fc28.x86_64 63/250 Installing : libXrender-0.9.10-5.fc28.x86_64 64/250 Running scriptlet: libXrender-0.9.10-5.fc28.x86_64 64/250 Installing : giflib-5.1.4-1.fc28.x86_64 65/250 Installing : alsa-lib-1.1.6-2.fc28.x86_64 66/250 Running scriptlet: alsa-lib-1.1.6-2.fc28.x86_64 66/250 Installing : kernel-headers-4.17.12-1.fc28.x86_64 67/250 Running scriptlet: glibc-headers-2.27-8.fc28.x86_64 68/250 Installing : glibc-headers-2.27-8.fc28.x86_64 68/250 Installing : lua-5.3.4-10.fc28.x86_64 69/250 Installing : libgomp-8.1.1-5.fc28.x86_64 70/250 Running scriptlet: libgomp-8.1.1-5.fc28.x86_64 70/250 Installing : binutils-2.29.1-23.fc28.x86_64 71/250 Running scriptlet: binutils-2.29.1-23.fc28.x86_64 71/250 install-info: No such file or directory for /usr/share/info/as.info.gz install-info: No such file or directory for /usr/share/info/binutils.info.gz install-info: No such file or directory for /usr/share/info/gprof.info.gz install-info: No such file or directory for /usr/share/info/ld.info.gz Installing : libedit-3.1-23.20170329cvs.fc28.x86_64 72/250 Installing : openssh-clients-7.7p1-5.fc28.x86_64 73/250 Installing : libpkgconf-1.4.2-1.fc28.x86_64 74/250 Installing : pkgconf-1.4.2-1.fc28.x86_64 75/250 Installing : pkgconf-m4-1.4.2-1.fc28.noarch 76/250 Installing : pkgconf-pkg-config-1.4.2-1.fc28.x86_64 77/250 Installing : glibc-devel-2.27-8.fc28.x86_64 78/250 Running scriptlet: glibc-devel-2.27-8.fc28.x86_64 78/250 Installing : libxcrypt-devel-4.0.0-5.fc28.x86_64 79/250 Installing : xorg-x11-font-utils-1:7.5-38.fc28.x86_64 80/250 Installing : xorg-x11-fonts-Type1-7.5-19.fc28.noarch 81/250 Running scriptlet: xorg-x11-fonts-Type1-7.5-19.fc28.noarch 81/250 Installing : cyrus-sasl-gssapi-2.1.27-0.1rc7.fc28.x86_64 82/250 Running scriptlet: cyrus-sasl-2.1.27-0.1rc7.fc28.x86_64 83/250 Installing : cyrus-sasl-2.1.27-0.1rc7.fc28.x86_64 83/250 Running scriptlet: cyrus-sasl-2.1.27-0.1rc7.fc28.x86_64 83/250 Installing : avahi-libs-0.7-13.fc28.x86_64 84/250 Installing : capstone-3.0.4-16.fc28.x86_64 85/250 Running scriptlet: capstone-3.0.4-16.fc28.x86_64 85/250 Installing : libtool-ltdl-2.4.6-24.fc28.x86_64 86/250 Running scriptlet: libtool-ltdl-2.4.6-24.fc28.x86_64 86/250 Installing : publicsuffix-list-20180514-1.fc28.noarch 87/250 Installing : tzdata-java-2018e-1.fc28.noarch 88/250 Installing : lua-posix-33.3.1-8.fc28.x86_64 89/250 Installing : copy-jdk-configs-3.7-1.fc28.noarch 90/250 Installing : ncurses-6.1-4.20180224.fc28.x86_64 91/250 Installing : perl-Term-Cap-1.17-395.fc28.noarch 92/250 Installing : perl-Pod-Simple-1:3.35-395.fc28.noarch 93/250 Installing : perl-Pod-Usage-4:1.69-395.fc28.noarch 94/250 Installing : perl-Getopt-Long-1:2.50-4.fc28.noarch 95/250 Installing : perl-Encode-4:2.97-3.fc28.x86_64 96/250 Installing : perl-podlators-4.11-1.fc28.noarch 97/250 Installing : perl-Pod-Perldoc-3.28-396.fc28.noarch 98/250 Installing : perl-URI-1.73-2.fc28.noarch 99/250 Installing : apr-1.6.3-5.fc28.x86_64 100/250 Running scriptlet: apr-1.6.3-5.fc28.x86_64 100/250 Installing : lksctp-tools-1.0.16-9.fc28.x86_64 101/250 Running scriptlet: lksctp-tools-1.0.16-9.fc28.x86_64 101/250 Installing : which-2.21-8.fc28.x86_64 102/250 Running scriptlet: which-2.21-8.fc28.x86_64 102/250 install-info: No such file or directory for /usr/share/info/which.info.gz Installing : libaio-0.3.110-11.fc28.x86_64 103/250 Installing : hicolor-icon-theme-0.17-2.fc28.noarch 104/250 Installing : findutils-1:4.6.0-19.fc28.x86_64 105/250 Running scriptlet: findutils-1:4.6.0-19.fc28.x86_64 105/250 Installing : java-1.8.0-openjdk-headless-1:1.8.0.162-3.b12.fc 106/250 Running scriptlet: java-1.8.0-openjdk-headless-1:1.8.0.162-3.b12.fc 106/250 Installing : javapackages-tools-5.0.0-14.fc28.noarch 107/250 Installing : slf4j-1.7.25-4.fc28.noarch 108/250 Installing : maven-resolver-api-1:1.1.0-2.fc28.noarch 109/250 Installing : plexus-utils-3.0.24-5.fc28.noarch 110/250 Installing : maven-wagon-provider-api-3.0.0-1.fc28.noarch 111/250 Installing : maven-resolver-spi-1:1.1.0-2.fc28.noarch 112/250 Installing : maven-resolver-util-1:1.1.0-2.fc28.noarch 113/250 Installing : atinject-1-27.20100611svn86.fc28.noarch 114/250 Installing : httpcomponents-core-4.4.9-4.fc28.noarch 115/250 Installing : xml-commons-apis-1.4.01-25.fc28.noarch 116/250 Installing : objectweb-asm-6.1.1-1.fc28.noarch 117/250 Installing : apache-commons-cli-1.4-4.fc28.noarch 118/250 Installing : apache-commons-codec-1.11-3.fc28.noarch 119/250 Installing : apache-commons-io-1:2.6-3.fc28.noarch 120/250 Installing : glassfish-servlet-api-3.1.0-14.fc28.noarch 121/250 Installing : hawtjni-runtime-1.15-3.fc28.noarch 122/250 Installing : jackson-core-2.9.4-2.fc28.noarch 123/250 Installing : objenesis-2.6-1.fc28.noarch 124/250 Installing : plexus-cipher-1.7-14.fc28.noarch 125/250 Installing : plexus-classworlds-2.5.2-9.fc28.noarch 126/250 Installing : plexus-containers-component-annotations-1.7.1-5. 127/250 Installing : apache-commons-logging-1.2-13.fc28.noarch 128/250 Installing : httpcomponents-client-4.5.5-4.fc28.noarch 129/250 Installing : guava20-20.0-6.fc28.noarch 130/250 Installing : jetty-util-9.4.11-2.v20180605.fc28.noarch 131/250 Installing : jetty-io-9.4.11-2.v20180605.fc28.noarch 132/250 Installing : plexus-sec-dispatcher-1.4-24.fc28.noarch 133/250 Installing : jansi-native-1.7-5.fc28.x86_64 134/250 Installing : jansi-1.16-3.fc28.noarch 135/250 Installing : reflectasm-1.11.0-6.fc28.noarch 136/250 Installing : maven-resolver-impl-1:1.1.0-2.fc28.noarch 137/250 Installing : jcl-over-slf4j-1.7.25-4.fc28.noarch 138/250 Installing : apache-commons-lang-2.6-21.fc28.noarch 139/250 Installing : apache-commons-lang3-3.7-3.fc28.noarch 140/250 Installing : apache-ivy-2.4.0-10.fc28.noarch 141/250 Installing : base64coder-20101219-20.fc28.noarch 142/250 Installing : snakeyaml-1.17-5.fc28.noarch 143/250 Installing : beust-jcommander-1.71-3.fc28.noarch 144/250 Installing : jackson-annotations-2.9.4-2.fc28.noarch 145/250 Installing : jackson-databind-2.9.4-3.fc28.noarch 146/250 Installing : joda-time-2.9.9-2.tzdata2017b.fc28.noarch 147/250 Installing : minlog-1.3.0-5.fc27.noarch 148/250 Installing : plexus-interpolation-1.22-9.fc28.noarch 149/250 Installing : tesla-polyglot-common-0.2.0-2.fc28.noarch 150/250 Installing : jzlib-1.1.3-8.fc28.noarch 151/250 Installing : ant-lib-1.10.1-10.fc28.noarch 152/250 Installing : bouncycastle-1.59-1.fc28.noarch 153/250 Running scriptlet: bouncycastle-1.59-1.fc28.noarch 153/250 Installing : bouncycastle-pg-1.59-1.fc28.noarch 154/250 Installing : jsch-0.1.54-6.fc28.noarch 155/250 Installing : kryo-3.0.3-5.fc28.noarch 156/250 Installing : testng-6.14.3-2.fc28.noarch 157/250 Installing : jatl-0.2.2-13.fc28.noarch 158/250 Installing : jline-2.14.6-1.fc28.noarch 159/250 Installing : rhino-1.7.7.1-4.fc28.noarch 160/250 Installing : jetty-http-9.4.11-2.v20180605.fc28.noarch 161/250 Installing : jetty-server-9.4.11-2.v20180605.fc28.noarch 162/250 Installing : apache-commons-compress-1.16.1-1.fc28.noarch 163/250 Installing : jackson-dataformat-cbor-2.9.4-3.fc28.noarch 164/250 Installing : aws-sdk-java-core-1.11.3-5.fc28.noarch 165/250 Installing : aws-sdk-java-kms-1.11.3-5.fc28.noarch 166/250 Installing : aws-sdk-java-s3-1.11.3-5.fc28.noarch 167/250 Installing : jcifs-1.3.18-7.fc28.noarch 168/250 Installing : maven-shared-utils-3.1.0-5.fc27.noarch 169/250 Installing : maven-resolver-connector-basic-1:1.1.0-2.fc28.no 170/250 Installing : maven-resolver-transport-wagon-1:1.1.0-2.fc28.no 171/250 Installing : maven-wagon-file-3.0.0-1.fc28.noarch 172/250 Installing : jul-to-slf4j-1.7.25-4.fc28.noarch 173/250 Installing : log4j-over-slf4j-1.7.25-4.fc28.noarch 174/250 Installing : ecj-1:4.7.3a-1.fc28.noarch 175/250 Installing : google-gson-2.8.2-1.fc28.noarch 176/250 Installing : jcip-annotations-1-23.20060626.fc28.noarch 177/250 Installing : jsr-305-0-0.22.20130910svn.fc28.noarch 178/250 Installing : native-platform-0.14-12.fc28.x86_64 179/250 Installing : osgi-annotation-6.0.0-7.fc28.noarch 180/250 Installing : osgi-compendium-6.0.0-5.fc28.noarch 181/250 Installing : osgi-core-6.0.0-6.fc28.noarch 182/250 Installing : aqute-bndlib-3.5.0-2.fc28.noarch 183/250 Installing : aopalliance-1.0-17.fc28.noarch 184/250 Installing : google-guice-4.1-10.fc28.noarch 185/250 Installing : hamcrest-core-1.3-23.fc28.noarch 186/250 Installing : junit-1:4.12-8.fc28.noarch 187/250 Installing : geronimo-annotation-1.0-22.fc28.noarch 188/250 Installing : jsoup-1.11.2-2.fc28.noarch 189/250 Installing : maven-wagon-http-shared-3.0.0-1.fc28.noarch 190/250 Installing : maven-wagon-http-3.0.0-1.fc28.noarch 191/250 Installing : bcel-6.2-2.fc28.noarch 192/250 Installing : xml-commons-resolver-1.2-24.fc28.noarch 193/250 Installing : xalan-j2-2.7.1-34.fc28.noarch 194/250 Running scriptlet: xalan-j2-2.7.1-34.fc28.noarch 194/250 Installing : xerces-j2-2.11.0-31.fc28.noarch 195/250 Running scriptlet: xerces-j2-2.11.0-31.fc28.noarch 195/250 Installing : nekohtml-1.9.22-5.fc27.noarch 196/250 Installing : glassfish-el-api-3.0.1-0.7.b08.fc28.noarch 197/250 Installing : jboss-interceptors-1.2-api-1.0.0-8.fc28.noarch 198/250 Installing : cdi-api-1.2-7.fc28.noarch 199/250 Installing : sisu-inject-1:0.3.3-3.fc28.noarch 200/250 Installing : sisu-plexus-1:0.3.3-3.fc28.noarch 201/250 Installing : maven-lib-1:3.5.2-5.fc28.noarch 202/250 Installing : antlr-tool-2.7.7-53.fc28.noarch 203/250 Installing : extra166y-1.7.0-9.fc28.noarch 204/250 Installing : multiverse-0.7.0-7.fc27.noarch 205/250 Installing : xpp3-1.1.4-18.c.fc28.noarch 206/250 Installing : xpp3-minimal-1.1.4-18.c.fc28.noarch 207/250 Installing : xstream-1.4.9-7.fc28.noarch 208/250 Installing : felix-osgi-core-1.4.0-23.fc28.noarch 209/250 Installing : jcsp-1.1-0.8.rc5.fc28.noarch 210/250 Installing : apache-commons-collections-3.2.2-8.fc28.noarch 211/250 Installing : java-1.8.0-openjdk-1:1.8.0.162-3.b12.fc28.x86_64 212/250 Running scriptlet: java-1.8.0-openjdk-1:1.8.0.162-3.b12.fc28.x86_64 212/250 Installing : isl-0.16.1-6.fc28.x86_64 213/250 Running scriptlet: isl-0.16.1-6.fc28.x86_64 213/250 Installing : less-530-1.fc28.x86_64 214/250 Installing : git-core-2.17.1-3.fc28.x86_64 215/250 Installing : git-core-doc-2.17.1-3.fc28.noarch 216/250 Installing : libsecret-0.18.6-1.fc28.x86_64 217/250 Installing : perl-Git-2.17.1-3.fc28.noarch 218/250 Installing : git-2.17.1-3.fc28.x86_64 219/250 Installing : libatomic_ops-7.6.2-3.fc28.x86_64 220/250 Installing : gc-7.6.4-3.fc28.x86_64 221/250 Installing : guile-5:2.0.14-7.fc28.x86_64 222/250 Running scriptlet: guile-5:2.0.14-7.fc28.x86_64 222/250 Installing : make-1:4.2.1-6.fc28.x86_64 223/250 Running scriptlet: make-1:4.2.1-6.fc28.x86_64 223/250 Installing : compat-openssl10-1:1.0.2o-1.fc28.x86_64 224/250 Running scriptlet: compat-openssl10-1:1.0.2o-1.fc28.x86_64 224/250 Installing : netty-tcnative-1.1.30-9.fc28.x86_64 225/250 Installing : netty3-3.10.6-3.fc27.noarch 226/250 Installing : groovy-lib-2.4.8-6.fc28.noarch 227/250 Installing : gpars-1.2.1-11.fc28.noarch 228/250 Installing : tesla-polyglot-groovy-0.2.0-2.fc28.noarch 229/250 Installing : xbean-4.5-9.fc28.noarch 230/250 Installing : yajl-2.1.0-10.fc28.x86_64 231/250 Installing : numactl-libs-2.0.11-8.fc28.x86_64 232/250 Running scriptlet: numactl-libs-2.0.11-8.fc28.x86_64 232/250 Installing : nmap-ncat-2:7.60-12.fc28.x86_64 233/250 Installing : libwsman1-2.6.5-2.fc28.x86_64 234/250 Running scriptlet: libwsman1-2.6.5-2.fc28.x86_64 234/250 Installing : libssh2-1.8.0-7.fc28.x86_64 235/250 Installing : libnl3-3.4.0-3.fc28.x86_64 236/250 Running scriptlet: libnl3-3.4.0-3.fc28.x86_64 236/250 Installing : libvirt-libs-4.2.0-1.fc28.x86_64 237/250 Installing : rsync-3.1.3-3.fc28.x86_64 238/250 Installing : rsync-daemon-3.1.3-3.fc28.noarch 239/250 Running scriptlet: rsync-daemon-3.1.3-3.fc28.noarch 239/250 Installing : libvirt-devel-4.2.0-1.fc28.x86_64 240/250 Installing : gradle-4.3.1-7.fc28.noarch 241/250 Installing : gcc-8.1.1-5.fc28.x86_64 242/250 Running scriptlet: gcc-8.1.1-5.fc28.x86_64 242/250 Installing : java-1.8.0-openjdk-devel-1:1.8.0.162-3.b12.fc28. 243/250 Running scriptlet: java-1.8.0-openjdk-devel-1:1.8.0.162-3.b12.fc28. 243/250 Installing : qemu-img-2:2.12.0-0.5.rc1.fc28.x86_64 244/250 Installing : perl-IO-Socket-SSL-2.056-1.fc28.noarch 245/250 Installing : perl-Mozilla-CA-20160104-7.fc28.noarch 246/250 Installing : protobuf-compiler-3.5.0-4.fc28.x86_64 247/250 Installing : mercurial-4.4.2-4.fc28.x86_64 248/250 Installing : sudo-1.8.23-1.fc28.x86_64 249/250 Running scriptlet: sudo-1.8.23-1.fc28.x86_64 249/250 Cleanup : libgcc-8.0.1-0.20.fc28.x86_64 250/250 Running scriptlet: libgcc-8.0.1-0.20.fc28.x86_64 250/250 Running scriptlet: copy-jdk-configs-3.7-1.fc28.noarch 250/250 Running scriptlet: java-1.8.0-openjdk-1:1.8.0.162-3.b12.fc28.x86_64 250/250 Running scriptlet: guile-5:2.0.14-7.fc28.x86_64 250/250 Running scriptlet: java-1.8.0-openjdk-devel-1:1.8.0.162-3.b12.fc28. 250/250 Running scriptlet: libgcc-8.0.1-0.20.fc28.x86_64 250/250 Running scriptlet: fontconfig-2.13.0-4.fc28.x86_64 250/250 Running scriptlet: hicolor-icon-theme-0.17-2.fc28.noarch 250/250 Verifying : libvirt-devel-4.2.0-1.fc28.x86_64 1/250 Verifying : make-1:4.2.1-6.fc28.x86_64 2/250 Verifying : mercurial-4.4.2-4.fc28.x86_64 3/250 Verifying : rsync-daemon-3.1.3-3.fc28.noarch 4/250 Verifying : rsync-3.1.3-3.fc28.x86_64 5/250 Verifying : libvirt-libs-4.2.0-1.fc28.x86_64 6/250 Verifying : gc-7.6.4-3.fc28.x86_64 7/250 Verifying : guile-5:2.0.14-7.fc28.x86_64 8/250 Verifying : libnl3-3.4.0-3.fc28.x86_64 9/250 Verifying : libssh2-1.8.0-7.fc28.x86_64 10/250 Verifying : libwsman1-2.6.5-2.fc28.x86_64 11/250 Verifying : nmap-ncat-2:7.60-12.fc28.x86_64 12/250 Verifying : numactl-libs-2.0.11-8.fc28.x86_64 13/250 Verifying : yajl-2.1.0-10.fc28.x86_64 14/250 Verifying : libatomic_ops-7.6.2-3.fc28.x86_64 15/250 Verifying : git-2.17.1-3.fc28.x86_64 16/250 Verifying : git-core-2.17.1-3.fc28.x86_64 17/250 Verifying : git-core-doc-2.17.1-3.fc28.noarch 18/250 Verifying : perl-Git-2.17.1-3.fc28.noarch 19/250 Verifying : libsecret-0.18.6-1.fc28.x86_64 20/250 Verifying : perl-Getopt-Long-1:2.50-4.fc28.noarch 21/250 Verifying : perl-PathTools-3.74-1.fc28.x86_64 22/250 Verifying : perl-TermReadKey-2.37-7.fc28.x86_64 23/250 Verifying : less-530-1.fc28.x86_64 24/250 Verifying : perl-Error-1:0.17025-2.fc28.noarch 25/250 Verifying : perl-Exporter-5.72-396.fc28.noarch 26/250 Verifying : perl-Pod-Usage-4:1.69-395.fc28.noarch 27/250 Verifying : perl-Text-ParseWords-3.30-395.fc28.noarch 28/250 Verifying : perl-constant-1.33-396.fc28.noarch 29/250 Verifying : perl-Scalar-List-Utils-3:1.49-2.fc28.x86_64 30/250 Verifying : perl-Pod-Perldoc-3.28-396.fc28.noarch 31/250 Verifying : groff-base-1.22.3-15.fc28.x86_64 32/250 Verifying : perl-HTTP-Tiny-0.070-395.fc28.noarch 33/250 Verifying : perl-Pod-Simple-1:3.35-395.fc28.noarch 34/250 Verifying : perl-parent-1:0.236-395.fc28.noarch 35/250 Verifying : perl-MIME-Base64-3.15-396.fc28.x86_64 36/250 Verifying : perl-Socket-4:2.027-2.fc28.x86_64 37/250 Verifying : perl-Pod-Escapes-1:1.07-395.fc28.noarch 38/250 Verifying : perl-Text-Tabs+Wrap-2013.0523-395.fc28.noarch 39/250 Verifying : sudo-1.8.23-1.fc28.x86_64 40/250 Verifying : gcc-8.1.1-5.fc28.x86_64 41/250 Verifying : cpp-8.1.1-5.fc28.x86_64 42/250 Verifying : isl-0.16.1-6.fc28.x86_64 43/250 Verifying : libmpc-1.0.2-9.fc28.x86_64 44/250 Verifying : findutils-1:4.6.0-19.fc28.x86_64 45/250 Verifying : gradle-4.3.1-7.fc28.noarch 46/250 Verifying : apache-commons-cli-1.4-4.fc28.noarch 47/250 Verifying : apache-commons-codec-1.11-3.fc28.noarch 48/250 Verifying : apache-commons-compress-1.16.1-1.fc28.noarch 49/250 Verifying : apache-commons-io-1:2.6-3.fc28.noarch 50/250 Verifying : apache-commons-lang-2.6-21.fc28.noarch 51/250 Verifying : apache-commons-lang3-3.7-3.fc28.noarch 52/250 Verifying : apache-ivy-2.4.0-10.fc28.noarch 53/250 Verifying : aqute-bndlib-3.5.0-2.fc28.noarch 54/250 Verifying : atinject-1-27.20100611svn86.fc28.noarch 55/250 Verifying : aws-sdk-java-core-1.11.3-5.fc28.noarch 56/250 Verifying : aws-sdk-java-kms-1.11.3-5.fc28.noarch 57/250 Verifying : aws-sdk-java-s3-1.11.3-5.fc28.noarch 58/250 Verifying : base64coder-20101219-20.fc28.noarch 59/250 Verifying : beust-jcommander-1.71-3.fc28.noarch 60/250 Verifying : ecj-1:4.7.3a-1.fc28.noarch 61/250 Verifying : glassfish-servlet-api-3.1.0-14.fc28.noarch 62/250 Verifying : google-gson-2.8.2-1.fc28.noarch 63/250 Verifying : google-guice-4.1-10.fc28.noarch 64/250 Verifying : hawtjni-runtime-1.15-3.fc28.noarch 65/250 Verifying : hicolor-icon-theme-0.17-2.fc28.noarch 66/250 Verifying : httpcomponents-client-4.5.5-4.fc28.noarch 67/250 Verifying : httpcomponents-core-4.4.9-4.fc28.noarch 68/250 Verifying : jackson-annotations-2.9.4-2.fc28.noarch 69/250 Verifying : jackson-core-2.9.4-2.fc28.noarch 70/250 Verifying : jackson-databind-2.9.4-3.fc28.noarch 71/250 Verifying : jansi-1.16-3.fc28.noarch 72/250 Verifying : jansi-native-1.7-5.fc28.x86_64 73/250 Verifying : jatl-0.2.2-13.fc28.noarch 74/250 Verifying : jcifs-1.3.18-7.fc28.noarch 75/250 Verifying : jcip-annotations-1-23.20060626.fc28.noarch 76/250 Verifying : jcl-over-slf4j-1.7.25-4.fc28.noarch 77/250 Verifying : joda-time-2.9.9-2.tzdata2017b.fc28.noarch 78/250 Verifying : jsch-0.1.54-6.fc28.noarch 79/250 Verifying : jsr-305-0-0.22.20130910svn.fc28.noarch 80/250 Verifying : jul-to-slf4j-1.7.25-4.fc28.noarch 81/250 Verifying : junit-1:4.12-8.fc28.noarch 82/250 Verifying : kryo-3.0.3-5.fc28.noarch 83/250 Verifying : log4j-over-slf4j-1.7.25-4.fc28.noarch 84/250 Verifying : maven-lib-1:3.5.2-5.fc28.noarch 85/250 Verifying : maven-resolver-api-1:1.1.0-2.fc28.noarch 86/250 Verifying : maven-resolver-connector-basic-1:1.1.0-2.fc28.no 87/250 Verifying : maven-resolver-impl-1:1.1.0-2.fc28.noarch 88/250 Verifying : maven-resolver-spi-1:1.1.0-2.fc28.noarch 89/250 Verifying : maven-resolver-transport-wagon-1:1.1.0-2.fc28.no 90/250 Verifying : maven-resolver-util-1:1.1.0-2.fc28.noarch 91/250 Verifying : maven-wagon-file-3.0.0-1.fc28.noarch 92/250 Verifying : maven-wagon-http-3.0.0-1.fc28.noarch 93/250 Verifying : maven-wagon-http-shared-3.0.0-1.fc28.noarch 94/250 Verifying : maven-wagon-provider-api-3.0.0-1.fc28.noarch 95/250 Verifying : minlog-1.3.0-5.fc27.noarch 96/250 Verifying : native-platform-0.14-12.fc28.x86_64 97/250 Verifying : nekohtml-1.9.22-5.fc27.noarch 98/250 Verifying : objenesis-2.6-1.fc28.noarch 99/250 Verifying : plexus-cipher-1.7-14.fc28.noarch 100/250 Verifying : plexus-classworlds-2.5.2-9.fc28.noarch 101/250 Verifying : plexus-containers-component-annotations-1.7.1-5. 102/250 Verifying : plexus-interpolation-1.22-9.fc28.noarch 103/250 Verifying : plexus-sec-dispatcher-1.4-24.fc28.noarch 104/250 Verifying : plexus-utils-3.0.24-5.fc28.noarch 105/250 Verifying : reflectasm-1.11.0-6.fc28.noarch 106/250 Verifying : rhino-1.7.7.1-4.fc28.noarch 107/250 Verifying : sisu-inject-1:0.3.3-3.fc28.noarch 108/250 Verifying : sisu-plexus-1:0.3.3-3.fc28.noarch 109/250 Verifying : slf4j-1.7.25-4.fc28.noarch 110/250 Verifying : snakeyaml-1.17-5.fc28.noarch 111/250 Verifying : tesla-polyglot-common-0.2.0-2.fc28.noarch 112/250 Verifying : tesla-polyglot-groovy-0.2.0-2.fc28.noarch 113/250 Verifying : testng-6.14.3-2.fc28.noarch 114/250 Verifying : xbean-4.5-9.fc28.noarch 115/250 Verifying : xerces-j2-2.11.0-31.fc28.noarch 116/250 Verifying : xml-commons-apis-1.4.01-25.fc28.noarch 117/250 Verifying : osgi-annotation-6.0.0-7.fc28.noarch 118/250 Verifying : osgi-compendium-6.0.0-5.fc28.noarch 119/250 Verifying : osgi-core-6.0.0-6.fc28.noarch 120/250 Verifying : apache-commons-logging-1.2-13.fc28.noarch 121/250 Verifying : jackson-dataformat-cbor-2.9.4-3.fc28.noarch 122/250 Verifying : aopalliance-1.0-17.fc28.noarch 123/250 Verifying : jzlib-1.1.3-8.fc28.noarch 124/250 Verifying : hamcrest-core-1.3-23.fc28.noarch 125/250 Verifying : geronimo-annotation-1.0-22.fc28.noarch 126/250 Verifying : maven-shared-utils-3.1.0-5.fc27.noarch 127/250 Verifying : jsoup-1.11.2-2.fc28.noarch 128/250 Verifying : bcel-6.2-2.fc28.noarch 129/250 Verifying : cdi-api-1.2-7.fc28.noarch 130/250 Verifying : xalan-j2-2.7.1-34.fc28.noarch 131/250 Verifying : xml-commons-resolver-1.2-24.fc28.noarch 132/250 Verifying : glassfish-el-api-3.0.1-0.7.b08.fc28.noarch 133/250 Verifying : jboss-interceptors-1.2-api-1.0.0-8.fc28.noarch 134/250 Verifying : qemu-img-2:2.12.0-0.5.rc1.fc28.x86_64 135/250 Verifying : libaio-0.3.110-11.fc28.x86_64 136/250 Verifying : protobuf-compiler-3.5.0-4.fc28.x86_64 137/250 Verifying : protobuf-3.5.0-4.fc28.x86_64 138/250 Verifying : javapackages-tools-5.0.0-14.fc28.noarch 139/250 Verifying : java-1.8.0-openjdk-headless-1:1.8.0.162-3.b12.fc 140/250 Verifying : which-2.21-8.fc28.x86_64 141/250 Verifying : freetype-2.8-10.fc28.x86_64 142/250 Verifying : lksctp-tools-1.0.16-9.fc28.x86_64 143/250 Verifying : groovy-lib-2.4.8-6.fc28.noarch 144/250 Verifying : antlr-tool-2.7.7-53.fc28.noarch 145/250 Verifying : gpars-1.2.1-11.fc28.noarch 146/250 Verifying : xstream-1.4.9-7.fc28.noarch 147/250 Verifying : extra166y-1.7.0-9.fc28.noarch 148/250 Verifying : jcsp-1.1-0.8.rc5.fc28.noarch 149/250 Verifying : multiverse-0.7.0-7.fc27.noarch 150/250 Verifying : netty3-3.10.6-3.fc27.noarch 151/250 Verifying : xpp3-1.1.4-18.c.fc28.noarch 152/250 Verifying : xpp3-minimal-1.1.4-18.c.fc28.noarch 153/250 Verifying : felix-osgi-core-1.4.0-23.fc28.noarch 154/250 Verifying : netty-tcnative-1.1.30-9.fc28.x86_64 155/250 Verifying : apr-1.6.3-5.fc28.x86_64 156/250 Verifying : compat-openssl10-1:1.0.2o-1.fc28.x86_64 157/250 Verifying : objectweb-asm-6.1.1-1.fc28.noarch 158/250 Verifying : jline-2.14.6-1.fc28.noarch 159/250 Verifying : libstdc++-8.1.1-5.fc28.x86_64 160/250 Verifying : perl-interpreter-4:5.26.2-413.fc28.x86_64 161/250 Verifying : perl-libs-4:5.26.2-413.fc28.x86_64 162/250 Verifying : perl-File-Path-2.15-2.fc28.noarch 163/250 Verifying : perl-Unicode-Normalize-1.25-396.fc28.x86_64 164/250 Verifying : perl-threads-1:2.21-2.fc28.x86_64 165/250 Verifying : perl-threads-shared-1.58-2.fc28.x86_64 166/250 Verifying : perl-Errno-1.28-413.fc28.x86_64 167/250 Verifying : perl-Carp-1.42-396.fc28.noarch 168/250 Verifying : perl-podlators-4.11-1.fc28.noarch 169/250 Verifying : perl-Term-ANSIColor-4.06-396.fc28.noarch 170/250 Verifying : perl-Term-Cap-1.17-395.fc28.noarch 171/250 Verifying : ncurses-6.1-4.20180224.fc28.x86_64 172/250 Verifying : perl-Encode-4:2.97-3.fc28.x86_64 173/250 Verifying : perl-File-Temp-0.230.600-1.fc28.noarch 174/250 Verifying : perl-IO-1.38-413.fc28.x86_64 175/250 Verifying : perl-Time-Local-1:1.280-1.fc28.noarch 176/250 Verifying : perl-Storable-1:3.11-2.fc28.x86_64 177/250 Verifying : emacs-filesystem-1:26.1-3.fc28.noarch 178/250 Verifying : python2-2.7.15-2.fc28.x86_64 179/250 Verifying : python2-libs-2.7.15-2.fc28.x86_64 180/250 Verifying : guava20-20.0-6.fc28.noarch 181/250 Verifying : copy-jdk-configs-3.7-1.fc28.noarch 182/250 Verifying : lua-posix-33.3.1-8.fc28.x86_64 183/250 Verifying : libjpeg-turbo-1.5.3-6.fc28.x86_64 184/250 Verifying : tzdata-java-2018e-1.fc28.noarch 185/250 Verifying : publicsuffix-list-20180514-1.fc28.noarch 186/250 Verifying : libtool-ltdl-2.4.6-24.fc28.x86_64 187/250 Verifying : libpng-2:1.6.34-6.fc28.x86_64 188/250 Verifying : capstone-3.0.4-16.fc28.x86_64 189/250 Verifying : avahi-libs-0.7-13.fc28.x86_64 190/250 Verifying : cyrus-sasl-2.1.27-0.1rc7.fc28.x86_64 191/250 Verifying : cyrus-sasl-gssapi-2.1.27-0.1rc7.fc28.x86_64 192/250 Verifying : pkgconf-pkg-config-1.4.2-1.fc28.x86_64 193/250 Verifying : pkgconf-1.4.2-1.fc28.x86_64 194/250 Verifying : pkgconf-m4-1.4.2-1.fc28.noarch 195/250 Verifying : libpkgconf-1.4.2-1.fc28.x86_64 196/250 Verifying : python2-pip-9.0.3-2.fc28.noarch 197/250 Verifying : python2-setuptools-39.2.0-6.fc28.noarch 198/250 Verifying : perl-macros-4:5.26.2-413.fc28.x86_64 199/250 Verifying : ant-lib-1.10.1-10.fc28.noarch 200/250 Verifying : apache-commons-collections-3.2.2-8.fc28.noarch 201/250 Verifying : bouncycastle-1.59-1.fc28.noarch 202/250 Verifying : bouncycastle-pg-1.59-1.fc28.noarch 203/250 Verifying : jetty-server-9.4.11-2.v20180605.fc28.noarch 204/250 Verifying : jetty-http-9.4.11-2.v20180605.fc28.noarch 205/250 Verifying : jetty-io-9.4.11-2.v20180605.fc28.noarch 206/250 Verifying : jetty-util-9.4.11-2.v20180605.fc28.noarch 207/250 Verifying : openssh-clients-7.7p1-5.fc28.x86_64 208/250 Verifying : openssh-7.7p1-5.fc28.x86_64 209/250 Verifying : fipscheck-lib-1.5.0-4.fc28.x86_64 210/250 Verifying : libedit-3.1-23.20170329cvs.fc28.x86_64 211/250 Verifying : fipscheck-1.5.0-4.fc28.x86_64 212/250 Verifying : binutils-2.29.1-23.fc28.x86_64 213/250 Verifying : glibc-devel-2.27-8.fc28.x86_64 214/250 Verifying : libxcrypt-devel-4.0.0-5.fc28.x86_64 215/250 Verifying : glibc-headers-2.27-8.fc28.x86_64 216/250 Verifying : libgomp-8.1.1-5.fc28.x86_64 217/250 Verifying : lua-5.3.4-10.fc28.x86_64 218/250 Verifying : kernel-headers-4.17.12-1.fc28.x86_64 219/250 Verifying : perl-Mozilla-CA-20160104-7.fc28.noarch 220/250 Verifying : perl-IO-Socket-SSL-2.056-1.fc28.noarch 221/250 Verifying : perl-IO-Socket-IP-0.39-5.fc28.noarch 222/250 Verifying : perl-Net-SSLeay-1.85-1.fc28.x86_64 223/250 Verifying : perl-URI-1.73-2.fc28.noarch 224/250 Verifying : perl-Data-Dumper-2.167-399.fc28.x86_64 225/250 Verifying : perl-libnet-3.11-3.fc28.noarch 226/250 Verifying : perl-Digest-MD5-2.55-396.fc28.x86_64 227/250 Verifying : perl-Digest-1.17-395.fc28.noarch 228/250 Verifying : java-1.8.0-openjdk-devel-1:1.8.0.162-3.b12.fc28. 229/250 Verifying : java-1.8.0-openjdk-1:1.8.0.162-3.b12.fc28.x86_64 230/250 Verifying : libX11-1.6.5-7.fc28.x86_64 231/250 Verifying : alsa-lib-1.1.6-2.fc28.x86_64 232/250 Verifying : giflib-5.1.4-1.fc28.x86_64 233/250 Verifying : libXcomposite-0.4.4-12.fc28.x86_64 234/250 Verifying : libXext-1.3.3-8.fc28.x86_64 235/250 Verifying : libXi-1.7.9-6.fc28.x86_64 236/250 Verifying : libXrender-0.9.10-5.fc28.x86_64 237/250 Verifying : libXtst-1.2.3-5.fc28.x86_64 238/250 Verifying : xorg-x11-fonts-Type1-7.5-19.fc28.noarch 239/250 Verifying : libX11-common-1.6.5-7.fc28.noarch 240/250 Verifying : libxcb-1.13-1.fc28.x86_64 241/250 Verifying : ttmkfdir-3.0.9-54.fc28.x86_64 242/250 Verifying : libXau-1.0.8-11.fc28.x86_64 243/250 Verifying : fontconfig-2.13.0-4.fc28.x86_64 244/250 Verifying : fontpackages-filesystem-1.44-21.fc28.noarch 245/250 Verifying : xorg-x11-font-utils-1:7.5-38.fc28.x86_64 246/250 Verifying : libfontenc-1.1.3-7.fc28.x86_64 247/250 Verifying : aajohan-comfortaa-fonts-3.001-2.fc28.noarch 248/250 Verifying : libgcc-8.1.1-5.fc28.x86_64 249/250 Verifying : libgcc-8.0.1-0.20.fc28.x86_64 250/250 Installed: findutils.x86_64 1:4.6.0-19.fc28 gcc.x86_64 8.1.1-5.fc28 git.x86_64 2.17.1-3.fc28 gradle.noarch 4.3.1-7.fc28 libvirt-devel.x86_64 4.2.0-1.fc28 make.x86_64 1:4.2.1-6.fc28 mercurial.x86_64 4.4.2-4.fc28 protobuf-compiler.x86_64 3.5.0-4.fc28 qemu-img.x86_64 2:2.12.0-0.5.rc1.fc28 rsync.x86_64 3.1.3-3.fc28 rsync-daemon.noarch 3.1.3-3.fc28 sudo.x86_64 1.8.23-1.fc28 java-1.8.0-openjdk-devel.x86_64 1:1.8.0.162-3.b12.fc28 perl-IO-Socket-SSL.noarch 2.056-1.fc28 perl-Mozilla-CA.noarch 20160104-7.fc28 aajohan-comfortaa-fonts.noarch 3.001-2.fc28 alsa-lib.x86_64 1.1.6-2.fc28 ant-lib.noarch 1.10.1-10.fc28 antlr-tool.noarch 2.7.7-53.fc28 aopalliance.noarch 1.0-17.fc28 apache-commons-cli.noarch 1.4-4.fc28 apache-commons-codec.noarch 1.11-3.fc28 apache-commons-collections.noarch 3.2.2-8.fc28 apache-commons-compress.noarch 1.16.1-1.fc28 apache-commons-io.noarch 1:2.6-3.fc28 apache-commons-lang.noarch 2.6-21.fc28 apache-commons-lang3.noarch 3.7-3.fc28 apache-commons-logging.noarch 1.2-13.fc28 apache-ivy.noarch 2.4.0-10.fc28 apr.x86_64 1.6.3-5.fc28 aqute-bndlib.noarch 3.5.0-2.fc28 atinject.noarch 1-27.20100611svn86.fc28 avahi-libs.x86_64 0.7-13.fc28 aws-sdk-java-core.noarch 1.11.3-5.fc28 aws-sdk-java-kms.noarch 1.11.3-5.fc28 aws-sdk-java-s3.noarch 1.11.3-5.fc28 base64coder.noarch 20101219-20.fc28 bcel.noarch 6.2-2.fc28 beust-jcommander.noarch 1.71-3.fc28 binutils.x86_64 2.29.1-23.fc28 bouncycastle.noarch 1.59-1.fc28 bouncycastle-pg.noarch 1.59-1.fc28 capstone.x86_64 3.0.4-16.fc28 cdi-api.noarch 1.2-7.fc28 compat-openssl10.x86_64 1:1.0.2o-1.fc28 copy-jdk-configs.noarch 3.7-1.fc28 cpp.x86_64 8.1.1-5.fc28 cyrus-sasl.x86_64 2.1.27-0.1rc7.fc28 cyrus-sasl-gssapi.x86_64 2.1.27-0.1rc7.fc28 ecj.noarch 1:4.7.3a-1.fc28 emacs-filesystem.noarch 1:26.1-3.fc28 extra166y.noarch 1.7.0-9.fc28 felix-osgi-core.noarch 1.4.0-23.fc28 fipscheck.x86_64 1.5.0-4.fc28 fipscheck-lib.x86_64 1.5.0-4.fc28 fontconfig.x86_64 2.13.0-4.fc28 fontpackages-filesystem.noarch 1.44-21.fc28 freetype.x86_64 2.8-10.fc28 gc.x86_64 7.6.4-3.fc28 geronimo-annotation.noarch 1.0-22.fc28 giflib.x86_64 5.1.4-1.fc28 git-core.x86_64 2.17.1-3.fc28 git-core-doc.noarch 2.17.1-3.fc28 glassfish-el-api.noarch 3.0.1-0.7.b08.fc28 glassfish-servlet-api.noarch 3.1.0-14.fc28 glibc-devel.x86_64 2.27-8.fc28 glibc-headers.x86_64 2.27-8.fc28 google-gson.noarch 2.8.2-1.fc28 google-guice.noarch 4.1-10.fc28 gpars.noarch 1.2.1-11.fc28 groff-base.x86_64 1.22.3-15.fc28 groovy-lib.noarch 2.4.8-6.fc28 guava20.noarch 20.0-6.fc28 guile.x86_64 5:2.0.14-7.fc28 hamcrest-core.noarch 1.3-23.fc28 hawtjni-runtime.noarch 1.15-3.fc28 hicolor-icon-theme.noarch 0.17-2.fc28 httpcomponents-client.noarch 4.5.5-4.fc28 httpcomponents-core.noarch 4.4.9-4.fc28 isl.x86_64 0.16.1-6.fc28 jackson-annotations.noarch 2.9.4-2.fc28 jackson-core.noarch 2.9.4-2.fc28 jackson-databind.noarch 2.9.4-3.fc28 jackson-dataformat-cbor.noarch 2.9.4-3.fc28 jansi.noarch 1.16-3.fc28 jansi-native.x86_64 1.7-5.fc28 jatl.noarch 0.2.2-13.fc28 java-1.8.0-openjdk.x86_64 1:1.8.0.162-3.b12.fc28 java-1.8.0-openjdk-headless.x86_64 1:1.8.0.162-3.b12.fc28 javapackages-tools.noarch 5.0.0-14.fc28 jboss-interceptors-1.2-api.noarch 1.0.0-8.fc28 jcifs.noarch 1.3.18-7.fc28 jcip-annotations.noarch 1-23.20060626.fc28 jcl-over-slf4j.noarch 1.7.25-4.fc28 jcsp.noarch 1.1-0.8.rc5.fc28 jetty-http.noarch 9.4.11-2.v20180605.fc28 jetty-io.noarch 9.4.11-2.v20180605.fc28 jetty-server.noarch 9.4.11-2.v20180605.fc28 jetty-util.noarch 9.4.11-2.v20180605.fc28 jline.noarch 2.14.6-1.fc28 joda-time.noarch 2.9.9-2.tzdata2017b.fc28 jsch.noarch 0.1.54-6.fc28 jsoup.noarch 1.11.2-2.fc28 jsr-305.noarch 0-0.22.20130910svn.fc28 jul-to-slf4j.noarch 1.7.25-4.fc28 junit.noarch 1:4.12-8.fc28 jzlib.noarch 1.1.3-8.fc28 kernel-headers.x86_64 4.17.12-1.fc28 kryo.noarch 3.0.3-5.fc28 less.x86_64 530-1.fc28 libX11.x86_64 1.6.5-7.fc28 libX11-common.noarch 1.6.5-7.fc28 libXau.x86_64 1.0.8-11.fc28 libXcomposite.x86_64 0.4.4-12.fc28 libXext.x86_64 1.3.3-8.fc28 libXi.x86_64 1.7.9-6.fc28 libXrender.x86_64 0.9.10-5.fc28 libXtst.x86_64 1.2.3-5.fc28 libaio.x86_64 0.3.110-11.fc28 libatomic_ops.x86_64 7.6.2-3.fc28 libedit.x86_64 3.1-23.20170329cvs.fc28 libfontenc.x86_64 1.1.3-7.fc28 libgomp.x86_64 8.1.1-5.fc28 libjpeg-turbo.x86_64 1.5.3-6.fc28 libmpc.x86_64 1.0.2-9.fc28 libnl3.x86_64 3.4.0-3.fc28 libpkgconf.x86_64 1.4.2-1.fc28 libpng.x86_64 2:1.6.34-6.fc28 libsecret.x86_64 0.18.6-1.fc28 libssh2.x86_64 1.8.0-7.fc28 libstdc++.x86_64 8.1.1-5.fc28 libtool-ltdl.x86_64 2.4.6-24.fc28 libvirt-libs.x86_64 4.2.0-1.fc28 libwsman1.x86_64 2.6.5-2.fc28 libxcb.x86_64 1.13-1.fc28 libxcrypt-devel.x86_64 4.0.0-5.fc28 lksctp-tools.x86_64 1.0.16-9.fc28 log4j-over-slf4j.noarch 1.7.25-4.fc28 lua.x86_64 5.3.4-10.fc28 lua-posix.x86_64 33.3.1-8.fc28 maven-lib.noarch 1:3.5.2-5.fc28 maven-resolver-api.noarch 1:1.1.0-2.fc28 maven-resolver-connector-basic.noarch 1:1.1.0-2.fc28 maven-resolver-impl.noarch 1:1.1.0-2.fc28 maven-resolver-spi.noarch 1:1.1.0-2.fc28 maven-resolver-transport-wagon.noarch 1:1.1.0-2.fc28 maven-resolver-util.noarch 1:1.1.0-2.fc28 maven-shared-utils.noarch 3.1.0-5.fc27 maven-wagon-file.noarch 3.0.0-1.fc28 maven-wagon-http.noarch 3.0.0-1.fc28 maven-wagon-http-shared.noarch 3.0.0-1.fc28 maven-wagon-provider-api.noarch 3.0.0-1.fc28 minlog.noarch 1.3.0-5.fc27 multiverse.noarch 0.7.0-7.fc27 native-platform.x86_64 0.14-12.fc28 ncurses.x86_64 6.1-4.20180224.fc28 nekohtml.noarch 1.9.22-5.fc27 netty-tcnative.x86_64 1.1.30-9.fc28 netty3.noarch 3.10.6-3.fc27 nmap-ncat.x86_64 2:7.60-12.fc28 numactl-libs.x86_64 2.0.11-8.fc28 objectweb-asm.noarch 6.1.1-1.fc28 objenesis.noarch 2.6-1.fc28 openssh.x86_64 7.7p1-5.fc28 openssh-clients.x86_64 7.7p1-5.fc28 osgi-annotation.noarch 6.0.0-7.fc28 osgi-compendium.noarch 6.0.0-5.fc28 osgi-core.noarch 6.0.0-6.fc28 perl-Carp.noarch 1.42-396.fc28 perl-Data-Dumper.x86_64 2.167-399.fc28 perl-Digest.noarch 1.17-395.fc28 perl-Digest-MD5.x86_64 2.55-396.fc28 perl-Encode.x86_64 4:2.97-3.fc28 perl-Errno.x86_64 1.28-413.fc28 perl-Error.noarch 1:0.17025-2.fc28 perl-Exporter.noarch 5.72-396.fc28 perl-File-Path.noarch 2.15-2.fc28 perl-File-Temp.noarch 0.230.600-1.fc28 perl-Getopt-Long.noarch 1:2.50-4.fc28 perl-Git.noarch 2.17.1-3.fc28 perl-HTTP-Tiny.noarch 0.070-395.fc28 perl-IO.x86_64 1.38-413.fc28 perl-IO-Socket-IP.noarch 0.39-5.fc28 perl-MIME-Base64.x86_64 3.15-396.fc28 perl-Net-SSLeay.x86_64 1.85-1.fc28 perl-PathTools.x86_64 3.74-1.fc28 perl-Pod-Escapes.noarch 1:1.07-395.fc28 perl-Pod-Perldoc.noarch 3.28-396.fc28 perl-Pod-Simple.noarch 1:3.35-395.fc28 perl-Pod-Usage.noarch 4:1.69-395.fc28 perl-Scalar-List-Utils.x86_64 3:1.49-2.fc28 perl-Socket.x86_64 4:2.027-2.fc28 perl-Storable.x86_64 1:3.11-2.fc28 perl-Term-ANSIColor.noarch 4.06-396.fc28 perl-Term-Cap.noarch 1.17-395.fc28 perl-TermReadKey.x86_64 2.37-7.fc28 perl-Text-ParseWords.noarch 3.30-395.fc28 perl-Text-Tabs+Wrap.noarch 2013.0523-395.fc28 perl-Time-Local.noarch 1:1.280-1.fc28 perl-URI.noarch 1.73-2.fc28 perl-Unicode-Normalize.x86_64 1.25-396.fc28 perl-constant.noarch 1.33-396.fc28 perl-interpreter.x86_64 4:5.26.2-413.fc28 perl-libnet.noarch 3.11-3.fc28 perl-libs.x86_64 4:5.26.2-413.fc28 perl-macros.x86_64 4:5.26.2-413.fc28 perl-parent.noarch 1:0.236-395.fc28 perl-podlators.noarch 4.11-1.fc28 perl-threads.x86_64 1:2.21-2.fc28 perl-threads-shared.x86_64 1.58-2.fc28 pkgconf.x86_64 1.4.2-1.fc28 pkgconf-m4.noarch 1.4.2-1.fc28 pkgconf-pkg-config.x86_64 1.4.2-1.fc28 plexus-cipher.noarch 1.7-14.fc28 plexus-classworlds.noarch 2.5.2-9.fc28 plexus-containers-component-annotations.noarch 1.7.1-5.fc28 plexus-interpolation.noarch 1.22-9.fc28 plexus-sec-dispatcher.noarch 1.4-24.fc28 plexus-utils.noarch 3.0.24-5.fc28 protobuf.x86_64 3.5.0-4.fc28 publicsuffix-list.noarch 20180514-1.fc28 python2.x86_64 2.7.15-2.fc28 python2-libs.x86_64 2.7.15-2.fc28 python2-pip.noarch 9.0.3-2.fc28 python2-setuptools.noarch 39.2.0-6.fc28 reflectasm.noarch 1.11.0-6.fc28 rhino.noarch 1.7.7.1-4.fc28 sisu-inject.noarch 1:0.3.3-3.fc28 sisu-plexus.noarch 1:0.3.3-3.fc28 slf4j.noarch 1.7.25-4.fc28 snakeyaml.noarch 1.17-5.fc28 tesla-polyglot-common.noarch 0.2.0-2.fc28 tesla-polyglot-groovy.noarch 0.2.0-2.fc28 testng.noarch 6.14.3-2.fc28 ttmkfdir.x86_64 3.0.9-54.fc28 tzdata-java.noarch 2018e-1.fc28 which.x86_64 2.21-8.fc28 xalan-j2.noarch 2.7.1-34.fc28 xbean.noarch 4.5-9.fc28 xerces-j2.noarch 2.11.0-31.fc28 xml-commons-apis.noarch 1.4.01-25.fc28 xml-commons-resolver.noarch 1.2-24.fc28 xorg-x11-font-utils.x86_64 1:7.5-38.fc28 xorg-x11-fonts-Type1.noarch 7.5-19.fc28 xpp3.noarch 1.1.4-18.c.fc28 xpp3-minimal.noarch 1.1.4-18.c.fc28 xstream.noarch 1.4.9-7.fc28 yajl.x86_64 2.1.0-10.fc28 Upgraded: libgcc.x86_64 8.1.1-5.fc28 Complete! 23 files removed ---> eb371a3c3dd7 Removing intermediate container dd34c74dbdd3 Step 5/12 : ENV GIMME_GO_VERSION 1.10 ---> Running in 8faf708ee963 ---> 00bd41e23b72 Removing intermediate container 8faf708ee963 Step 6/12 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Running in d3b4a8b6101f  ---> 2eff0f9cf187 Removing intermediate container d3b4a8b6101f Step 7/12 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Running in d1395297b05d ---> 2c077e0348e1 Removing intermediate container d1395297b05d Step 8/12 : ADD rsyncd.conf /etc/rsyncd.conf ---> 0a4b9ff54405 Removing intermediate container 0a96343541c9 Step 9/12 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/mattn/goveralls && go get -u github.com/Masterminds/glide && go get golang.org/x/tools/cmd/goimports && git clone https://github.com/mvdan/sh.git $GOPATH/src/mvdan.cc/sh && cd /go/src/mvdan.cc/sh/cmd/shfmt && git checkout v2.5.0 && go get mvdan.cc/sh/cmd/shfmt && go install && go get -u github.com/golang/mock/gomock && go get -u github.com/rmohr/mock/mockgen && go get -u github.com/rmohr/go-swagger-utils/swagger-doc && go get -u github.com/onsi/ginkgo/ginkgo && go get -u -d k8s.io/code-generator/cmd/deepcopy-gen && go get -u -d k8s.io/code-generator/cmd/defaulter-gen && go get -u -d k8s.io/code-generator/cmd/openapi-gen && cd /go/src/k8s.io/code-generator/cmd/deepcopy-gen && git checkout release-1.9 && go install && cd /go/src/k8s.io/code-generator/cmd/defaulter-gen && git checkout release-1.9 && go install && cd /go/src/k8s.io/code-generator/cmd/openapi-gen && git checkout release-1.9 && go install && go get -u -d github.com/golang/protobuf/protoc-gen-go && cd /go/src/github.com/golang/protobuf/protoc-gen-go && git checkout 1643683e1b54a9e88ad26d98f81400c8c9d9f4f9 && go install ---> Running in 03784251263b  go version go1.10 linux/amd64 Cloning into '/go/src/mvdan.cc/sh'... Note: checking out 'v2.5.0'. You are in 'detached HEAD' state. You can look around, make experimental changes and commit them, and you can discard any commits you make in this state without impacting any branches by performing another checkout. If you want to create a new branch to retain commits you create, you may do so (now or later) by using -b with the checkout command again. Example: git checkout -b HEAD is now at 5f66499 all: bump to 2.5.0 Switched to a new branch 'release-1.9' Branch 'release-1.9' set up to track remote branch 'release-1.9' from 'origin'. Already on 'release-1.9' Your branch is up to date with 'origin/release-1.9'. Already on 'release-1.9' Your branch is up to date with 'origin/release-1.9'. Note: checking out '1643683e1b54a9e88ad26d98f81400c8c9d9f4f9'. You are in 'detached HEAD' state. You can look around, make experimental changes and commit them, and you can discard any commits you make in this state without impacting any branches by performing another checkout. If you want to create a new branch to retain commits you create, you may do so (now or later) by using -b with the checkout command again. Example: git checkout -b HEAD is now at 1643683 Add godoc badge (#444)  ---> af8b0399fb8c Removing intermediate container 03784251263b Step 10/12 : RUN pip install j2cli ---> Running in 20498022ce54 WARNING: Running pip install with root privileges is generally not a good idea. Try `pip install --user` instead. Collecting j2cli Downloading https://files.pythonhosted.org/packages/6a/fb/c67a5da25bc7f5fd840727ea742748df981ee425350cc33d57ed7e2cc78d/j2cli-0.3.1_0-py2-none-any.whl Collecting jinja2>=2.7.2 (from j2cli) Downloading https://files.pythonhosted.org/packages/7f/ff/ae64bacdfc95f27a016a7bed8e8686763ba4d277a78ca76f32659220a731/Jinja2-2.10-py2.py3-none-any.whl (126kB) Collecting MarkupSafe>=0.23 (from jinja2>=2.7.2->j2cli) Downloading https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz Installing collected packages: MarkupSafe, jinja2, j2cli Running setup.py install for MarkupSafe: started Running setup.py install for MarkupSafe: finished with status 'done' Successfully installed MarkupSafe-1.0 j2cli-0.3.1-0 jinja2-2.10 ---> 7f5aa591f4d3 Removing intermediate container 20498022ce54 Step 11/12 : ADD entrypoint.sh /entrypoint.sh ---> 884caaba5a50 Removing intermediate container e8ce675fe5ab Step 12/12 : ENTRYPOINT /entrypoint.sh ---> Running in b0b045cf7b56 ---> 14dca350ccfd Removing intermediate container b0b045cf7b56 Successfully built 14dca350ccfd go version go1.10 linux/amd64 go version go1.10 linux/amd64 make[1]: Entering directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' hack/dockerized "./hack/check.sh && KUBEVIRT_VERSION= ./hack/build-go.sh install " && ./hack/build-copy-artifacts.sh Sending build context to Docker daemon 7.168 kB Step 1/12 : FROM fedora:28 ---> cc510acfcd70 Step 2/12 : ENV LIBVIRT_VERSION 4.2.0 ---> Using cache ---> b1088795aeb6 Step 3/12 : COPY fedora-virt-preview.repo /etc/yum.repos.d/fedora-virt-preview.repo ---> Using cache ---> 1966e43b79cf Step 4/12 : RUN dnf -y install libvirt-devel-${LIBVIRT_VERSION} make git mercurial sudo gcc findutils gradle rsync-daemon rsync qemu-img protobuf-compiler && dnf -y clean all ---> Using cache ---> eb371a3c3dd7 Step 5/12 : ENV GIMME_GO_VERSION 1.10 ---> Using cache ---> 00bd41e23b72 Step 6/12 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 2eff0f9cf187 Step 7/12 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 2c077e0348e1 Step 8/12 : ADD rsyncd.conf /etc/rsyncd.conf ---> Using cache ---> 0a4b9ff54405 Step 9/12 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/mattn/goveralls && go get -u github.com/Masterminds/glide && go get golang.org/x/tools/cmd/goimports && git clone https://github.com/mvdan/sh.git $GOPATH/src/mvdan.cc/sh && cd /go/src/mvdan.cc/sh/cmd/shfmt && git checkout v2.5.0 && go get mvdan.cc/sh/cmd/shfmt && go install && go get -u github.com/golang/mock/gomock && go get -u github.com/rmohr/mock/mockgen && go get -u github.com/rmohr/go-swagger-utils/swagger-doc && go get -u github.com/onsi/ginkgo/ginkgo && go get -u -d k8s.io/code-generator/cmd/deepcopy-gen && go get -u -d k8s.io/code-generator/cmd/defaulter-gen && go get -u -d k8s.io/code-generator/cmd/openapi-gen && cd /go/src/k8s.io/code-generator/cmd/deepcopy-gen && git checkout release-1.9 && go install && cd /go/src/k8s.io/code-generator/cmd/defaulter-gen && git checkout release-1.9 && go install && cd /go/src/k8s.io/code-generator/cmd/openapi-gen && git checkout release-1.9 && go install && go get -u -d github.com/golang/protobuf/protoc-gen-go && cd /go/src/github.com/golang/protobuf/protoc-gen-go && git checkout 1643683e1b54a9e88ad26d98f81400c8c9d9f4f9 && go install ---> Using cache ---> af8b0399fb8c Step 10/12 : RUN pip install j2cli ---> Using cache ---> 7f5aa591f4d3 Step 11/12 : ADD entrypoint.sh /entrypoint.sh ---> Using cache ---> 884caaba5a50 Step 12/12 : ENTRYPOINT /entrypoint.sh ---> Using cache ---> 14dca350ccfd Successfully built 14dca350ccfd go version go1.10 linux/amd64 go version go1.10 linux/amd64 find: '/root/go/src/kubevirt.io/kubevirt/_out/cmd': No such file or directory Compiling tests... compiled tests.test hack/build-docker.sh build Sending build context to Docker daemon 40.4 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-controller ---> Using cache ---> b00c84523b53 Step 4/8 : WORKDIR /home/virt-controller ---> Using cache ---> b76b8bd8cd39 Step 5/8 : USER 1001 ---> Using cache ---> b6d9ad9ed232 Step 6/8 : COPY virt-controller /usr/bin/virt-controller ---> a7866f39119c Removing intermediate container 1d5b81c2fce4 Step 7/8 : ENTRYPOINT /usr/bin/virt-controller ---> Running in 70c83b5af7c8 ---> 486ea13c01e7 Removing intermediate container 70c83b5af7c8 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-controller" '' ---> Running in 65b64766a092 ---> ae58c5cd6e43 Removing intermediate container 65b64766a092 Successfully built ae58c5cd6e43 Sending build context to Docker daemon 43.34 MB Step 1/10 : FROM kubevirt/libvirt:4.2.0 ---> 5f0bfe81a3e0 Step 2/10 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> 945996802736 Step 3/10 : RUN dnf -y install socat genisoimage util-linux libcgroup-tools ethtool net-tools sudo && dnf -y clean all && test $(id -u qemu) = 107 # make sure that the qemu user really is 107 ---> Using cache ---> 672f9ab56316 Step 4/10 : COPY virt-launcher /usr/bin/virt-launcher ---> 154250ab6e4b Removing intermediate container 3d75b5af93d1 Step 5/10 : COPY kubevirt-sudo /etc/sudoers.d/kubevirt ---> b4e3e4056194 Removing intermediate container bed769bc8da9 Step 6/10 : RUN setcap CAP_NET_BIND_SERVICE=+eip /usr/bin/qemu-system-x86_64 ---> Running in 8cf8bdd73eb4  ---> 0ab577a2b51c Removing intermediate container 8cf8bdd73eb4 Step 7/10 : RUN mkdir -p /usr/share/kubevirt/virt-launcher ---> Running in 4599d7183438  ---> 11ab97abd5f9 Removing intermediate container 4599d7183438 Step 8/10 : COPY entrypoint.sh libvirtd.sh sock-connector /usr/share/kubevirt/virt-launcher/ ---> c5071973935e Removing intermediate container c5a0469f630d Step 9/10 : ENTRYPOINT /usr/share/kubevirt/virt-launcher/entrypoint.sh ---> Running in 42782087ff83 ---> e52a80695701 Removing intermediate container 42782087ff83 Step 10/10 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-launcher" '' ---> Running in fe82ac289742 ---> ee09f3573cfc Removing intermediate container fe82ac289742 Successfully built ee09f3573cfc Sending build context to Docker daemon 39.88 MB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/5 : COPY virt-handler /usr/bin/virt-handler ---> a31fcdd97f8b Removing intermediate container b2254c4dc82c Step 4/5 : ENTRYPOINT /usr/bin/virt-handler ---> Running in 10e07cdf7518 ---> 38e8908ab6fb Removing intermediate container 10e07cdf7518 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-handler" '' ---> Running in 4432ce5f8ef0 ---> c5c181c8da32 Removing intermediate container 4432ce5f8ef0 Successfully built c5c181c8da32 Sending build context to Docker daemon 38.84 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virt-api ---> Using cache ---> ed1ebf600ee1 Step 4/8 : WORKDIR /home/virt-api ---> Using cache ---> 0769dad023e5 Step 5/8 : USER 1001 ---> Using cache ---> 0cb65afb0c2b Step 6/8 : COPY virt-api /usr/bin/virt-api ---> 3f90973ddd85 Removing intermediate container ef6b4a219789 Step 7/8 : ENTRYPOINT /usr/bin/virt-api ---> Running in f4ccde2492ba ---> 76b5eb1b7d99 Removing intermediate container f4ccde2492ba Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "virt-api" '' ---> Running in fe880057ec3f ---> 6c8e3b108543 Removing intermediate container fe880057ec3f Successfully built 6c8e3b108543 Sending build context to Docker daemon 4.096 kB Step 1/7 : FROM fedora:28 ---> cc510acfcd70 Step 2/7 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/7 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/7 : RUN mkdir -p /images/custom /images/alpine && truncate -s 64M /images/custom/disk.img && curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /images/alpine/disk.img ---> Using cache ---> 02134835a6aa Step 5/7 : ADD entrypoint.sh / ---> Using cache ---> ec0843818da7 Step 6/7 : CMD /entrypoint.sh ---> Using cache ---> 754029bb4bd2 Step 7/7 : LABEL "disks-images-provider" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> 62865f4ecc63 Successfully built 62865f4ecc63 Sending build context to Docker daemon 2.56 kB Step 1/5 : FROM fedora:28 ---> cc510acfcd70 Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/5 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/5 : RUN dnf -y install procps-ng nmap-ncat && dnf -y clean all ---> Using cache ---> 207487abe7b2 Step 5/5 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "vm-killer" '' ---> Using cache ---> 5010ae5d394d Successfully built 5010ae5d394d Sending build context to Docker daemon 5.12 kB Step 1/7 : FROM debian:sid ---> 68f33cf86aab Step 2/7 : MAINTAINER "David Vossel" \ ---> Using cache ---> 5734d749eb5c Step 3/7 : ENV container docker ---> Using cache ---> f8775a77966f Step 4/7 : RUN apt-get update && apt-get install -y bash curl bzip2 qemu-utils && mkdir -p /disk && rm -rf /var/lib/apt/lists/* ---> Using cache ---> 1a40cf222a61 Step 5/7 : ADD entry-point.sh / ---> Using cache ---> 77b545d92fe7 Step 6/7 : CMD /entry-point.sh ---> Using cache ---> dfe20d463305 Step 7/7 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "registry-disk-v1alpha" '' ---> Using cache ---> 0b5943290e54 Successfully built 0b5943290e54 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33801/kubevirt/registry-disk-v1alpha:devel ---> 0b5943290e54 Step 2/4 : MAINTAINER "David Vossel" \ ---> Using cache ---> e9efaa3526f9 Step 3/4 : RUN curl https://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img > /disk/cirros.img ---> Using cache ---> d6391c453bab Step 4/4 : LABEL "cirros-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> 80e77e1b44a6 Successfully built 80e77e1b44a6 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33801/kubevirt/registry-disk-v1alpha:devel ---> 0b5943290e54 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> ac03f50b9da5 Step 3/4 : RUN curl -g -L https://download.fedoraproject.org/pub/fedora/linux/releases/27/CloudImages/x86_64/images/Fedora-Cloud-Base-27-1.6.x86_64.qcow2 > /disk/fedora.qcow2 ---> Using cache ---> 1a3318492264 Step 4/4 : LABEL "fedora-cloud-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> 80de376602a4 Successfully built 80de376602a4 Sending build context to Docker daemon 2.56 kB Step 1/4 : FROM localhost:33801/kubevirt/registry-disk-v1alpha:devel ---> 0b5943290e54 Step 2/4 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> ac03f50b9da5 Step 3/4 : RUN curl http://dl-cdn.alpinelinux.org/alpine/v3.7/releases/x86_64/alpine-virt-3.7.0-x86_64.iso > /disk/alpine.iso ---> Using cache ---> 079e1a5c8014 Step 4/4 : LABEL "alpine-registry-disk-demo" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Using cache ---> f50d303718d1 Successfully built f50d303718d1 Sending build context to Docker daemon 35.59 MB Step 1/8 : FROM fedora:28 ---> cc510acfcd70 Step 2/8 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/8 : RUN useradd -u 1001 --create-home -s /bin/bash virtctl ---> Using cache ---> 985fe391c056 Step 4/8 : WORKDIR /home/virtctl ---> Using cache ---> 3b2cae8ac543 Step 5/8 : USER 1001 ---> Using cache ---> 0c06e5b4a900 Step 6/8 : COPY subresource-access-test /subresource-access-test ---> f70276db7e56 Removing intermediate container a7e80650a578 Step 7/8 : ENTRYPOINT /subresource-access-test ---> Running in 689ee09e47e8 ---> 9ab45b0babe4 Removing intermediate container 689ee09e47e8 Step 8/8 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "subresource-access-test" '' ---> Running in 7e35497ccdfe ---> 6e744ee9b1f9 Removing intermediate container 7e35497ccdfe Successfully built 6e744ee9b1f9 Sending build context to Docker daemon 3.072 kB Step 1/9 : FROM fedora:28 ---> cc510acfcd70 Step 2/9 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> bfe77d5699ed Step 3/9 : ENV container docker ---> Using cache ---> 62847a2a1fa8 Step 4/9 : RUN dnf -y install make git gcc && dnf -y clean all ---> Using cache ---> d3456b1644b1 Step 5/9 : ENV GIMME_GO_VERSION 1.9.2 ---> Using cache ---> 0ba81fddbba1 Step 6/9 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 5d33abe3f819 Step 7/9 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 783826523be1 Step 8/9 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/masterzen/winrm-cli ---> Using cache ---> 711bc8d15952 Step 9/9 : LABEL "kubevirt-functional-tests-openshift-3.10-release0" '' "winrmcli" '' ---> Using cache ---> 0126e68c086f Successfully built 0126e68c086f Sending build context to Docker daemon 36.8 MB Step 1/5 : FROM fedora:27 ---> 9110ae7f579f Step 2/5 : MAINTAINER "The KubeVirt Project" ---> Using cache ---> e3238544ad97 Step 3/5 : COPY example-hook-sidecar /example-hook-sidecar ---> 400bf92dbece Removing intermediate container 7289c14aaf87 Step 4/5 : ENTRYPOINT /example-hook-sidecar ---> Running in 00d715307e14 ---> cfed7521643a Removing intermediate container 00d715307e14 Step 5/5 : LABEL "example-hook-sidecar" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in c251f66090a1 ---> 647d9a5d50f1 Removing intermediate container c251f66090a1 Successfully built 647d9a5d50f1 Sending build context to Docker daemon 2.048 kB Step 1/4 : FROM fedora:27 ---> 9110ae7f579f Step 2/4 : RUN dnf install -y iproute ---> Running in e56ad4d4fd31  Fedora 27 - x86_64 - Updates 3.9 MB/s | 25 MB 00:06 Fedora 27 - x86_64 2.6 MB/s | 58 MB 00:22 Last metadata expiration check: 0:00:18 ago on Fri Aug 10 12:59:03 2018. Dependencies resolved. ================================================================================ Package Arch Version Repository Size ================================================================================ Installing: iproute x86_64 4.15.0-1.fc27 updates 534 k Installing dependencies: libmnl x86_64 1.0.4-4.fc27 fedora 28 k linux-atm-libs x86_64 2.5.1-19.fc27 fedora 40 k Installing weak dependencies: iproute-tc x86_64 4.15.0-1.fc27 updates 389 k Transaction Summary ================================================================================ Install 4 Packages Total download size: 992 k Installed size: 2.1 M Downloading Packages: (1/4): libmnl-1.0.4-4.fc27.x86_64.rpm 90 kB/s | 28 kB 00:00 (2/4): iproute-4.15.0-1.fc27.x86_64.rpm 1.2 MB/s | 534 kB 00:00 (3/4): iproute-tc-4.15.0-1.fc27.x86_64.rpm 891 kB/s | 389 kB 00:00 (4/4): linux-atm-libs-2.5.1-19.fc27.x86_64.rpm 250 kB/s | 40 kB 00:00 -------------------------------------------------------------------------------- Total 713 kB/s | 992 kB 00:01 Running transaction check Transaction check succeeded. Running transaction test Transaction test succeeded. Running transaction Preparing : 1/1 Installing : libmnl-1.0.4-4.fc27.x86_64 1/4 Running scriptlet: libmnl-1.0.4-4.fc27.x86_64 1/4 Installing : iproute-4.15.0-1.fc27.x86_64 2/4 Installing : linux-atm-libs-2.5.1-19.fc27.x86_64 3/4 Running scriptlet: linux-atm-libs-2.5.1-19.fc27.x86_64 3/4 Installing : iproute-tc-4.15.0-1.fc27.x86_64 4/4 Verifying : iproute-4.15.0-1.fc27.x86_64 1/4 Verifying : libmnl-1.0.4-4.fc27.x86_64 2/4 Verifying : iproute-tc-4.15.0-1.fc27.x86_64 3/4 Verifying : linux-atm-libs-2.5.1-19.fc27.x86_64 4/4 Installed: iproute.x86_64 4.15.0-1.fc27 iproute-tc.x86_64 4.15.0-1.fc27 libmnl.x86_64 1.0.4-4.fc27 linux-atm-libs.x86_64 2.5.1-19.fc27 Complete! ---> 44a843668d44 Removing intermediate container e56ad4d4fd31 Step 3/4 : ENTRYPOINT ip link ---> Running in a59a03183cac ---> 221ee0c7da70 Removing intermediate container a59a03183cac Step 4/4 : LABEL "iproute" '' "kubevirt-functional-tests-openshift-3.10-release0" '' ---> Running in 2a954d5d338d ---> 3cd87235e4bc Removing intermediate container 2a954d5d338d Successfully built 3cd87235e4bc hack/build-docker.sh push The push refers to a repository [localhost:33801/kubevirt/virt-controller] 56638acf3823: Preparing aa89340cf7a8: Preparing 891e1e4ef82a: Preparing aa89340cf7a8: Pushed 56638acf3823: Pushed 891e1e4ef82a: Pushed devel: digest: sha256:45ec64a7906ea892bd6dc8775aaa21b6a510266186109cd413e10fdf3e95768d size: 949 The push refers to a repository [localhost:33801/kubevirt/virt-launcher] 791ebddf07ed: Preparing d41f4099a391: Preparing da7aab95343d: Preparing 101636a3b888: Preparing 1d7ddb750c7a: Preparing 633427c64a24: Preparing da38cf808aa5: Preparing b83399358a92: Preparing 186d8b3e4fd8: Preparing b83399358a92: Waiting fa6154170bf5: Preparing 5eefb9960a36: Preparing fa6154170bf5: Waiting 633427c64a24: Waiting 891e1e4ef82a: Preparing 5eefb9960a36: Waiting 891e1e4ef82a: Waiting 101636a3b888: Pushed 791ebddf07ed: Pushed d41f4099a391: Pushed da38cf808aa5: Pushed b83399358a92: Pushed fa6154170bf5: Pushed 186d8b3e4fd8: Pushed 891e1e4ef82a: Mounted from kubevirt/virt-controller da7aab95343d: Pushed 633427c64a24: Pushed 1d7ddb750c7a: Pushed 5eefb9960a36: Pushed devel: digest: sha256:5d7845c583c27c73b0b412149d621b4ecb4c215d893837a07efc03c23f43977c size: 2828 The push refers to a repository [localhost:33801/kubevirt/virt-handler] a4a45e4abcf4: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-launcher a4a45e4abcf4: Pushed devel: digest: sha256:efda95665fa61e64a9379f256fa29fa7d2d4daaa9ff77a388d7fee059055695a size: 741 The push refers to a repository [localhost:33801/kubevirt/virt-api] 14e27e01a68b: Preparing 82fc744c99b4: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-handler 82fc744c99b4: Pushed 14e27e01a68b: Pushed devel: digest: sha256:8b822162ca81e4b927f37f63af42618417c316442de0d7b8f6c37d312ea52527 size: 948 The push refers to a repository [localhost:33801/kubevirt/disks-images-provider] 71ad31feb2c5: Preparing 21d4b721776e: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/virt-api 71ad31feb2c5: Pushed 21d4b721776e: Pushed devel: digest: sha256:45c1398f0e0e0dfa1d0ffd9a5820c35b0265a0704c97ad0042f3aabc2c90a8d0 size: 948 The push refers to a repository [localhost:33801/kubevirt/vm-killer] c4cfadeeaf5f: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/disks-images-provider c4cfadeeaf5f: Pushed devel: digest: sha256:dcef0057ad57b5de85bda952040ef5e0b443583eddb4c5f19cffe52687b36933 size: 740 The push refers to a repository [localhost:33801/kubevirt/registry-disk-v1alpha] 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 661cce8d8e52: Pushed 41e0baba3077: Pushed 25edbec0eaea: Pushed devel: digest: sha256:2aa3a343034ec8c6c91473d67cb4ed114ecef63d7bf47b9d16a9b730c01c09a4 size: 948 The push refers to a repository [localhost:33801/kubevirt/cirros-registry-disk-demo] 67be3c17199c: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 41e0baba3077: Mounted from kubevirt/registry-disk-v1alpha 25edbec0eaea: Mounted from kubevirt/registry-disk-v1alpha 661cce8d8e52: Mounted from kubevirt/registry-disk-v1alpha 67be3c17199c: Pushed devel: digest: sha256:e961f556a7512377f9f2ead1091f53f043fcd96f9ed5fdd1b094bb05765fd07f size: 1160 The push refers to a repository [localhost:33801/kubevirt/fedora-cloud-registry-disk-demo] 7b3286b1c2c3: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 41e0baba3077: Mounted from kubevirt/cirros-registry-disk-demo 661cce8d8e52: Mounted from kubevirt/cirros-registry-disk-demo 25edbec0eaea: Mounted from kubevirt/cirros-registry-disk-demo 7b3286b1c2c3: Pushed devel: digest: sha256:f0897a5762bf1421a548da17563c4aefaf8a2cf940e983f833292f5cbee2cb68 size: 1161 The push refers to a repository [localhost:33801/kubevirt/alpine-registry-disk-demo] a76b2bda8de9: Preparing 661cce8d8e52: Preparing 41e0baba3077: Preparing 25edbec0eaea: Preparing 25edbec0eaea: Mounted from kubevirt/fedora-cloud-registry-disk-demo 661cce8d8e52: Mounted from kubevirt/fedora-cloud-registry-disk-demo 41e0baba3077: Mounted from kubevirt/fedora-cloud-registry-disk-demo a76b2bda8de9: Pushed devel: digest: sha256:8fdf5fd97dcaff2cd5d450a438b6715157f1912badfa2440add61ff2cd26baa5 size: 1160 The push refers to a repository [localhost:33801/kubevirt/subresource-access-test] e9b1601213c0: Preparing 25cb73590a9d: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/vm-killer 25cb73590a9d: Pushed e9b1601213c0: Pushed devel: digest: sha256:5e6dce38329bdbaa7ba3622787271851991f61f66865ea6c016874442674fa6f size: 948 The push refers to a repository [localhost:33801/kubevirt/winrmcli] f8083e002d0b: Preparing 53c709abc882: Preparing 9ca98a0f492b: Preparing 891e1e4ef82a: Preparing 891e1e4ef82a: Mounted from kubevirt/subresource-access-test f8083e002d0b: Pushed 9ca98a0f492b: Pushed 53c709abc882: Pushed devel: digest: sha256:fce668ff8b26f3e1da97c445bfc5c8046721ddcf125aa9d4a3b94fcba9692c13 size: 1165 The push refers to a repository [localhost:33801/kubevirt/example-hook-sidecar] b599ffa08672: Preparing 39bae602f753: Preparing b599ffa08672: Pushed 39bae602f753: Pushed devel: digest: sha256:bc17952ff133250490812f3f3696ac09d66bf80b3b0822fa68bf80f72bdc3e57 size: 740 The push refers to a repository [localhost:33801/kubevirt/iproute] 241f938eb21e: Preparing 39bae602f753: Preparing 39bae602f753: Mounted from kubevirt/example-hook-sidecar 241f938eb21e: Pushed devel: digest: sha256:db4a0f1d0093ec79c91264b0afd437d0b62e0e4386276a8f7d3031e62c74aab3 size: 742 make[1]: Leaving directory `/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt' Done ./cluster/clean.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release0 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release0 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.8.0-alpha.0-15-g5670416 ++ KUBEVIRT_VERSION=v0.8.0-alpha.0-15-g5670416 + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:14ffc4a28e24a2510c9b455b56f35f6193a00b71c9150705f6afec41b003fc76 ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace image_pull_policy ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar images/iproute' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system +++ image_pull_policy=IfNotPresent ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33801/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace image_pull_policy + echo 'Cleaning up ...' Cleaning up ... + cluster/kubectl.sh get vmis --all-namespaces -o=custom-columns=NAME:.metadata.name,NAMESPACE:.metadata.namespace,FINALIZERS:.metadata.finalizers --no-headers + grep foregroundDeleteVirtualMachine + read p error: the server doesn't have a resource type "vmis" + _kubectl delete ds -l kubevirt.io -n kube-system --cascade=false --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=libvirt --force --grace-period 0 No resources found + _kubectl delete pods -n kube-system -l=kubevirt.io=virt-handler --force --grace-period 0 No resources found + namespaces=(default ${namespace}) + for i in '${namespaces[@]}' + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete deployment -l kubevirt.io No resources found + _kubectl -n default delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rs -l kubevirt.io No resources found + _kubectl -n default delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete services -l kubevirt.io No resources found + _kubectl -n default delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete apiservices -l kubevirt.io No resources found + _kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n default delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete secrets -l kubevirt.io No resources found + _kubectl -n default delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pv -l kubevirt.io No resources found + _kubectl -n default delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pvc -l kubevirt.io No resources found + _kubectl -n default delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete ds -l kubevirt.io No resources found + _kubectl -n default delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n default delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete pods -l kubevirt.io No resources found + _kubectl -n default delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n default delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete rolebinding -l kubevirt.io No resources found + _kubectl -n default delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete roles -l kubevirt.io No resources found + _kubectl -n default delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete clusterroles -l kubevirt.io No resources found + _kubectl -n default delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n default delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n default get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n default get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + for i in '${namespaces[@]}' + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete deployment -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete deployment -l kubevirt.io No resources found + _kubectl -n kube-system delete rs -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rs -l kubevirt.io No resources found + _kubectl -n kube-system delete services -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete services -l kubevirt.io No resources found + _kubectl -n kube-system delete apiservices -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete apiservices -l kubevirt.io No resources found + _kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete validatingwebhookconfiguration -l kubevirt.io No resources found + _kubectl -n kube-system delete secrets -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete secrets -l kubevirt.io No resources found + _kubectl -n kube-system delete pv -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pv -l kubevirt.io No resources found + _kubectl -n kube-system delete pvc -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pvc -l kubevirt.io No resources found + _kubectl -n kube-system delete ds -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete ds -l kubevirt.io No resources found + _kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete customresourcedefinitions -l kubevirt.io No resources found + _kubectl -n kube-system delete pods -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete pods -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterrolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterrolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete rolebinding -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete rolebinding -l kubevirt.io No resources found + _kubectl -n kube-system delete roles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete roles -l kubevirt.io No resources found + _kubectl -n kube-system delete clusterroles -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete clusterroles -l kubevirt.io No resources found + _kubectl -n kube-system delete serviceaccounts -l kubevirt.io + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl -n kube-system delete serviceaccounts -l kubevirt.io No resources found ++ _kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io ++ wc -l ++ export KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ KUBECONFIG=cluster/os-3.10.0/.kubeconfig ++ cluster/os-3.10.0/.kubectl -n kube-system get crd offlinevirtualmachines.kubevirt.io Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "offlinevirtualmachines.kubevirt.io" not found + '[' 0 -gt 0 ']' + sleep 2 + echo Done Done ./cluster/deploy.sh + source hack/common.sh ++++ dirname 'hack/common.sh[0]' +++ cd hack/../ +++ pwd ++ KUBEVIRT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt ++ OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out ++ VENDOR_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/vendor ++ CMD_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/cmd ++ TESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/tests ++ APIDOCS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/apidocs ++ MANIFESTS_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests ++ MANIFEST_TEMPLATES_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/templates/manifests ++ PYTHON_CLIENT_OUT_DIR=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/client-python ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ KUBEVIRT_NUM_NODES=2 ++ '[' -z kubevirt-functional-tests-openshift-3.10-release ']' ++ provider_prefix=kubevirt-functional-tests-openshift-3.10-release0 ++ job_prefix=kubevirt-functional-tests-openshift-3.10-release0 +++ kubevirt_version +++ '[' -n '' ']' +++ '[' -d /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/.git ']' ++++ git describe --always --tags +++ echo v0.8.0-alpha.0-15-g5670416 ++ KUBEVIRT_VERSION=v0.8.0-alpha.0-15-g5670416 + source cluster/os-3.10.0/provider.sh ++ set -e ++ image=os-3.10.0@sha256:14ffc4a28e24a2510c9b455b56f35f6193a00b71c9150705f6afec41b003fc76 ++ source cluster/ephemeral-provider-common.sh +++ set -e +++ _cli='docker run --privileged --net=host --rm -v /var/run/docker.sock:/var/run/docker.sock kubevirtci/gocli@sha256:aa7f295a7908fa333ab5e98ef3af0bfafbabfd3cee2b83f9af47f722e3000f6a' + source hack/config.sh ++ unset binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig manifest_docker_prefix namespace image_pull_policy ++ KUBEVIRT_PROVIDER=os-3.10.0 ++ source hack/config-default.sh source hack/config-os-3.10.0.sh +++ binaries='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virtctl cmd/fake-qemu-process cmd/virt-api cmd/subresource-access-test cmd/example-hook-sidecar' +++ docker_images='cmd/virt-controller cmd/virt-launcher cmd/virt-handler cmd/virt-api images/disks-images-provider images/vm-killer cmd/registry-disk-v1alpha images/cirros-registry-disk-demo images/fedora-cloud-registry-disk-demo images/alpine-registry-disk-demo cmd/subresource-access-test images/winrmcli cmd/example-hook-sidecar images/iproute' +++ docker_prefix=kubevirt +++ docker_tag=latest +++ master_ip=192.168.200.2 +++ network_provider=flannel +++ namespace=kube-system +++ image_pull_policy=IfNotPresent ++ test -f hack/config-provider-os-3.10.0.sh ++ source hack/config-provider-os-3.10.0.sh +++ master_ip=127.0.0.1 +++ docker_tag=devel +++ kubeconfig=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubeconfig +++ kubectl=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/cluster/os-3.10.0/.kubectl +++ docker_prefix=localhost:33801/kubevirt +++ manifest_docker_prefix=registry:5000/kubevirt ++ test -f hack/config-local.sh ++ export binaries docker_images docker_prefix docker_tag manifest_templates master_ip network_provider kubeconfig namespace image_pull_policy + echo 'Deploying ...' Deploying ... + [[ -z openshift-3.10-release ]] + [[ openshift-3.10-release =~ .*-dev ]] + [[ openshift-3.10-release =~ .*-release ]] + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/demo-content.yaml =~ .*demo.* ]] + continue + for manifest in '${MANIFESTS_OUT_DIR}/release/*' + [[ /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml =~ .*demo.* ]] + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/release/kubevirt.yaml clusterrole.rbac.authorization.k8s.io "kubevirt.io:admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:edit" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:view" created serviceaccount "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-apiserver-auth-delegator" created rolebinding.rbac.authorization.k8s.io "kubevirt-apiserver" created role.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-apiserver" created clusterrole.rbac.authorization.k8s.io "kubevirt-controller" created serviceaccount "kubevirt-controller" created serviceaccount "kubevirt-privileged" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-controller" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-privileged-cluster-admin" created clusterrole.rbac.authorization.k8s.io "kubevirt.io:default" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt.io:default" created service "virt-api" created deployment.extensions "virt-api" created deployment.extensions "virt-controller" created daemonset.extensions "virt-handler" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstances.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancereplicasets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachineinstancepresets.kubevirt.io" created customresourcedefinition.apiextensions.k8s.io "virtualmachines.kubevirt.io" created + _kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl create -f /var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/go/src/kubevirt.io/kubevirt/_out/manifests/testing -R persistentvolumeclaim "disk-alpine" created persistentvolume "host-path-disk-alpine" created persistentvolumeclaim "disk-custom" created persistentvolume "host-path-disk-custom" created daemonset.extensions "disks-images-provider" created serviceaccount "kubevirt-testing" created clusterrolebinding.rbac.authorization.k8s.io "kubevirt-testing-cluster-admin" created + [[ os-3.10.0 =~ os-* ]] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-controller -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-controller"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-testing -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-testing"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-privileged -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-privileged"] + _kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged -z kubevirt-apiserver -n kube-system scc "privileged" added to: ["system:serviceaccount:kube-system:kubevirt-apiserver"] + _kubectl adm policy add-scc-to-user privileged admin + export KUBECONFIG=cluster/os-3.10.0/.kubeconfig + KUBECONFIG=cluster/os-3.10.0/.kubeconfig + cluster/os-3.10.0/.kubectl adm policy add-scc-to-user privileged admin scc "privileged" added to: ["admin"] + echo Done Done + namespaces=(kube-system default) + [[ kube-system != \k\u\b\e\-\s\y\s\t\e\m ]] + timeout=300 + sample=30 + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n 'disks-images-provider-9dvv5 0/1 ContainerCreating 0 2s disks-images-provider-whnc4 0/1 ContainerCreating 0 2s virt-api-7586947775-m8zhx 0/1 ContainerCreating 0 4s virt-api-7586947775-wlwq8 0/1 ContainerCreating 0 4s virt-controller-7d57d96b65-gcsvg 0/1 ContainerCreating 0 4s virt-controller-7d57d96b65-jv2nk 0/1 ContainerCreating 0 4s virt-handler-642wc 0/1 ContainerCreating 0 4s virt-handler-fkj9p 0/1 ContainerCreating 0 4s' ']' + echo 'Waiting for kubevirt pods to enter the Running state ...' Waiting for kubevirt pods to enter the Running state ... + kubectl get pods -n kube-system --no-headers + cluster/kubectl.sh get pods -n kube-system --no-headers + grep -v Running disks-images-provider-9dvv5 0/1 ContainerCreating 0 3s disks-images-provider-whnc4 0/1 ContainerCreating 0 3s virt-api-7586947775-m8zhx 0/1 ContainerCreating 0 5s virt-api-7586947775-wlwq8 0/1 ContainerCreating 0 5s virt-controller-7d57d96b65-gcsvg 0/1 ContainerCreating 0 5s virt-controller-7d57d96b65-jv2nk 0/1 ContainerCreating 0 5s virt-handler-642wc 0/1 ContainerCreating 0 5s virt-handler-fkj9p 0/1 ContainerCreating 0 5s + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system --no-headers ++ cluster/kubectl.sh get pods -n kube-system --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + '[' -n false ']' + echo 'Waiting for KubeVirt containers to become ready ...' Waiting for KubeVirt containers to become ready ... + kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers + grep false + cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers false + sleep 30 + current_time=30 + '[' 30 -gt 300 ']' ++ kubectl get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ cluster/kubectl.sh get pods -n kube-system '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false + '[' -n '' ']' + kubectl get pods -n kube-system + cluster/kubectl.sh get pods -n kube-system NAME READY STATUS RESTARTS AGE disks-images-provider-9dvv5 1/1 Running 0 1m disks-images-provider-whnc4 1/1 Running 0 1m master-api-node01 1/1 Running 1 8d master-controllers-node01 1/1 Running 1 8d master-etcd-node01 1/1 Running 1 8d virt-api-7586947775-m8zhx 1/1 Running 0 1m virt-api-7586947775-wlwq8 1/1 Running 0 1m virt-controller-7d57d96b65-gcsvg 1/1 Running 0 1m virt-controller-7d57d96b65-jv2nk 1/1 Running 0 1m virt-handler-642wc 1/1 Running 0 1m virt-handler-fkj9p 1/1 Running 0 1m + for i in '${namespaces[@]}' + current_time=0 ++ kubectl get pods -n default --no-headers ++ cluster/kubectl.sh get pods -n default --no-headers ++ grep -v Running + '[' -n '' ']' + current_time=0 ++ kubectl get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ cluster/kubectl.sh get pods -n default '-ocustom-columns=status:status.containerStatuses[*].ready' --no-headers ++ grep false + '[' -n '' ']' + kubectl get pods -n default + cluster/kubectl.sh get pods -n default NAME READY STATUS RESTARTS AGE docker-registry-1-92ssr 1/1 Running 1 8d local-volume-provisioner-bhs9g 1/1 Running 0 25m local-volume-provisioner-l9xff 1/1 Running 0 25m registry-console-1-k87ct 1/1 Running 1 8d router-1-49jwl 1/1 Running 1 8d + kubectl version + cluster/kubectl.sh version oc v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 features: Basic-Auth GSSAPI Kerberos SPNEGO Server https://127.0.0.1:33798 openshift v3.10.0-rc.0+c20e215 kubernetes v1.10.0+b81c8f8 + ginko_params='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + [[ openshift-3.10-release =~ windows.* ]] + FUNC_TEST_ARGS='--ginkgo.noColor --junit-output=/var/lib/swarm/workspace/kubevirt-functional-tests-openshift-3.10-release/junit.xml' + make functest hack/dockerized "hack/build-func-tests.sh" Sending build context to Docker daemon 7.168 kB Step 1/12 : FROM fedora:28 ---> cc510acfcd70 Step 2/12 : ENV LIBVIRT_VERSION 4.2.0 ---> Using cache ---> b1088795aeb6 Step 3/12 : COPY fedora-virt-preview.repo /etc/yum.repos.d/fedora-virt-preview.repo ---> Using cache ---> 1966e43b79cf Step 4/12 : RUN dnf -y install libvirt-devel-${LIBVIRT_VERSION} make git mercurial sudo gcc findutils gradle rsync-daemon rsync qemu-img protobuf-compiler && dnf -y clean all ---> Using cache ---> eb371a3c3dd7 Step 5/12 : ENV GIMME_GO_VERSION 1.10 ---> Using cache ---> 00bd41e23b72 Step 6/12 : RUN mkdir -p /gimme && curl -sL https://raw.githubusercontent.com/travis-ci/gimme/master/gimme | HOME=/gimme bash >> /etc/profile.d/gimme.sh ---> Using cache ---> 2eff0f9cf187 Step 7/12 : ENV GOPATH "/go" GOBIN "/usr/bin" ---> Using cache ---> 2c077e0348e1 Step 8/12 : ADD rsyncd.conf /etc/rsyncd.conf ---> Using cache ---> 0a4b9ff54405 Step 9/12 : RUN mkdir -p /go && source /etc/profile.d/gimme.sh && go get github.com/mattn/goveralls && go get -u github.com/Masterminds/glide && go get golang.org/x/tools/cmd/goimports && git clone https://github.com/mvdan/sh.git $GOPATH/src/mvdan.cc/sh && cd /go/src/mvdan.cc/sh/cmd/shfmt && git checkout v2.5.0 && go get mvdan.cc/sh/cmd/shfmt && go install && go get -u github.com/golang/mock/gomock && go get -u github.com/rmohr/mock/mockgen && go get -u github.com/rmohr/go-swagger-utils/swagger-doc && go get -u github.com/onsi/ginkgo/ginkgo && go get -u -d k8s.io/code-generator/cmd/deepcopy-gen && go get -u -d k8s.io/code-generator/cmd/defaulter-gen && go get -u -d k8s.io/code-generator/cmd/openapi-gen && cd /go/src/k8s.io/code-generator/cmd/deepcopy-gen && git checkout release-1.9 && go install && cd /go/src/k8s.io/code-generator/cmd/defaulter-gen && git checkout release-1.9 && go install && cd /go/src/k8s.io/code-generator/cmd/openapi-gen && git checkout release-1.9 && go install && go get -u -d github.com/golang/protobuf/protoc-gen-go && cd /go/src/github.com/golang/protobuf/protoc-gen-go && git checkout 1643683e1b54a9e88ad26d98f81400c8c9d9f4f9 && go install ---> Using cache ---> af8b0399fb8c Step 10/12 : RUN pip install j2cli ---> Using cache ---> 7f5aa591f4d3 Step 11/12 : ADD entrypoint.sh /entrypoint.sh ---> Using cache ---> 884caaba5a50 Step 12/12 : ENTRYPOINT /entrypoint.sh ---> Using cache ---> 14dca350ccfd Successfully built 14dca350ccfd go version go1.10 linux/amd64 go version go1.10 linux/amd64 Compiling tests... compiled tests.test hack/functests.sh Running Suite: Tests Suite ========================== Random Seed: 1533906630 Will run 163 of 163 specs • [SLOW TEST:18.672 seconds] VNC /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:46 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:54 with VNC connection /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:62 should allow accessing the VNC device /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:64 ------------------------------ •• ------------------------------ • [SLOW TEST:39.597 seconds] LeaderElection /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:43 Start a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:53 when the controller pod is not running /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:54 should success /root/go/src/kubevirt.io/kubevirt/tests/controller_leader_election_test.go:55 ------------------------------ • [SLOW TEST:51.968 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:48 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:82 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:83 should have cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:84 ------------------------------ • [SLOW TEST:117.990 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:48 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:82 with cloudInitNoCloud userDataBase64 source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:83 with injected ssh-key /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:94 should have ssh-key under authorized keys /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:95 ------------------------------ • [SLOW TEST:59.046 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:48 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:82 with cloudInitNoCloud userData source /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:120 should process provided cloud-init data /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:121 ------------------------------ • [SLOW TEST:50.034 seconds] CloudInit UserData /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:48 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:82 should take user-data from k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_userdata_test.go:164 ------------------------------ • [SLOW TEST:36.799 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:34.948 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:116.257 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with Disk PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:124.539 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 with Alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:71 should be successfully started and stopped multiple times /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 with CDRom PVC /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:49.117 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:113 should create a writeable emptyDisk with the right capacity /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:115 ------------------------------ • [SLOW TEST:52.976 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With an emptyDisk defined and a specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:163 should create a writeable emptyDisk with the specified serial number /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:165 ------------------------------ • [SLOW TEST:35.715 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should be successfully started /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:207 ------------------------------ • [SLOW TEST:78.538 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With ephemeral alpine PVC /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:205 should not persist data /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:218 ------------------------------ Pod name: disks-images-provider-9dvv5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-whnc4 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7586947775-m8zhx Pod phase: Running level=info timestamp=2018-08-10T13:26:54.818821Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-10T13:26:57.557178Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/openapi/v2 proto=HTTP/2.0 statusCode=404 contentLength=19 level=info timestamp=2018-08-10T13:26:57.562334Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/swagger.json proto=HTTP/2.0 statusCode=404 contentLength=19 2018/08/10 13:26:57 http: TLS handshake error from 10.129.0.1:54958: EOF level=info timestamp=2018-08-10T13:26:59.073195Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-10T13:26:59.300857Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/10 13:27:07 http: TLS handshake error from 10.129.0.1:54964: EOF level=info timestamp=2018-08-10T13:27:09.489290Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/10 13:27:17 http: TLS handshake error from 10.129.0.1:54970: EOF level=info timestamp=2018-08-10T13:27:19.677912Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-10T13:27:23.000969Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-10T13:27:24.974748Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/10 13:27:27 http: TLS handshake error from 10.129.0.1:54976: EOF level=info timestamp=2018-08-10T13:27:29.258929Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-10T13:27:29.871497Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7586947775-wlwq8 Pod phase: Running 2018/08/10 13:25:20 http: TLS handshake error from 10.128.0.1:40120: EOF 2018/08/10 13:25:30 http: TLS handshake error from 10.128.0.1:40162: EOF 2018/08/10 13:25:40 http: TLS handshake error from 10.128.0.1:40210: EOF 2018/08/10 13:25:50 http: TLS handshake error from 10.128.0.1:40252: EOF 2018/08/10 13:26:00 http: TLS handshake error from 10.128.0.1:40294: EOF 2018/08/10 13:26:10 http: TLS handshake error from 10.128.0.1:40340: EOF 2018/08/10 13:26:20 http: TLS handshake error from 10.128.0.1:40382: EOF 2018/08/10 13:26:30 http: TLS handshake error from 10.128.0.1:40426: EOF 2018/08/10 13:26:40 http: TLS handshake error from 10.128.0.1:40472: EOF 2018/08/10 13:26:50 http: TLS handshake error from 10.128.0.1:40514: EOF 2018/08/10 13:27:00 http: TLS handshake error from 10.128.0.1:40556: EOF level=info timestamp=2018-08-10T13:27:07.485906Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/10 13:27:10 http: TLS handshake error from 10.128.0.1:40602: EOF 2018/08/10 13:27:20 http: TLS handshake error from 10.128.0.1:40644: EOF 2018/08/10 13:27:30 http: TLS handshake error from 10.128.0.1:40686: EOF Pod name: virt-controller-7d57d96b65-6vnn4 Pod phase: Running level=info timestamp=2018-08-10T13:10:52.902960Z pos=application.go:176 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-gcsvg Pod phase: Running level=info timestamp=2018-08-10T13:23:03.644677Z pos=vmi.go:158 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmifbmc9qz8dkw9n26nwx9w5b2mzvqtxzcb7l4hx82wkfqpmjxrzt6bmnjbl4dn4w6\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmifbmc9qz8dkw9n26nwx9w5b2mzvqtxzcb7l4hx82wkfqpmjxrzt6bmnjbl4dn4w6" level=info timestamp=2018-08-10T13:23:39.068653Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5wz65hqhpbzwnzb87m2g527trv2przqq2bshz6lbkcmnf27tgptspvpqswrq5s kind= uid=97d06a07-9ca0-11e8-943d-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-10T13:23:39.068960Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5wz65hqhpbzwnzb87m2g527trv2przqq2bshz6lbkcmnf27tgptspvpqswrq5s kind= uid=97d06a07-9ca0-11e8-943d-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-10T13:23:39.189413Z pos=vmi.go:158 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif5wz65hqhpbzwnzb87m2g527trv2przqq2bshz6lbkcmnf27tgptspvpqswrq5s\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif5wz65hqhpbzwnzb87m2g527trv2przqq2bshz6lbkcmnf27tgptspvpqswrq5s" level=info timestamp=2018-08-10T13:24:21.301269Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5wz65hqhpbzwnzb87m2g527trv2przqq2bshz6lbkcmnf27tgptspvpqswrq5s kind= uid=b0fa69ec-9ca0-11e8-943d-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-10T13:24:21.302499Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmif5wz65hqhpbzwnzb87m2g527trv2przqq2bshz6lbkcmnf27tgptspvpqswrq5s kind= uid=b0fa69ec-9ca0-11e8-943d-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-10T13:24:21.438645Z pos=vmi.go:158 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif5wz65hqhpbzwnzb87m2g527trv2przqq2bshz6lbkcmnf27tgptspvpqswrq5s\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif5wz65hqhpbzwnzb87m2g527trv2przqq2bshz6lbkcmnf27tgptspvpqswrq5s" level=info timestamp=2018-08-10T13:24:21.471085Z pos=vmi.go:158 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmif5wz65hqhpbzwnzb87m2g527trv2przqq2bshz6lbkcmnf27tgptspvpqswrq5s\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmif5wz65hqhpbzwnzb87m2g527trv2przqq2bshz6lbkcmnf27tgptspvpqswrq5s" level=info timestamp=2018-08-10T13:24:57.874697Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind= uid=c6c701d9-9ca0-11e8-943d-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-10T13:24:57.875543Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind= uid=c6c701d9-9ca0-11e8-943d-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-10T13:24:57.995170Z pos=vmi.go:158 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh" level=info timestamp=2018-08-10T13:25:31.012343Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind= uid=da88a46d-9ca0-11e8-943d-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-10T13:25:31.013097Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind= uid=da88a46d-9ca0-11e8-943d-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-10T13:25:31.167990Z pos=vmi.go:158 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh" level=info timestamp=2018-08-10T13:25:31.221204Z pos=vmi.go:158 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh" Pod name: virt-handler-642wc Pod phase: Running level=info timestamp=2018-08-10T13:23:02.897670Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmiv4nzmd2kw6xsrjmrh2dnrf4vxwzhqltprfnz9895pdvzvknfmpz4f7b6sj798kh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-10T13:23:03.116681Z pos=vm.go:783 component=virt-handler namespace=kubevirt-test-default name=testvmi2ffjvv9d7rdkxq4z27fvjdnrjzwb5fdzfj452wkf5b2qxcx4644bm7k248bc6t9 kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-10T13:23:03.116968Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi2ffjvv9d7rdkxq4z27fvjdnrjzwb5fdzfj452wkf5b2qxcx4644bm7k248bc6t9, existing: false\n" level=info timestamp=2018-08-10T13:23:03.117041Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-10T13:23:03.117152Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi2ffjvv9d7rdkxq4z27fvjdnrjzwb5fdzfj452wkf5b2qxcx4644bm7k248bc6t9 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-10T13:23:03.117433Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi2ffjvv9d7rdkxq4z27fvjdnrjzwb5fdzfj452wkf5b2qxcx4644bm7k248bc6t9 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-10T13:23:03.118081Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmi2ffjvv9d7rdkxq4z27fvjdnrjzwb5fdzfj452wkf5b2qxcx4644bm7k248bc6t9, existing: false\n" level=info timestamp=2018-08-10T13:23:03.118257Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-10T13:23:03.118453Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmi2ffjvv9d7rdkxq4z27fvjdnrjzwb5fdzfj452wkf5b2qxcx4644bm7k248bc6t9 kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-10T13:23:03.118684Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmi2ffjvv9d7rdkxq4z27fvjdnrjzwb5fdzfj452wkf5b2qxcx4644bm7k248bc6t9 kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-10T13:23:48.117678Z pos=vm.go:783 component=virt-handler namespace=kubevirt-test-default name=testvmiv4nzmd2kw6xsrjmrh2dnrf4vxwzhqltprfnz9895pdvzvknfmpz4f7b6sj798kh kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-10T13:23:48.118516Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiv4nzmd2kw6xsrjmrh2dnrf4vxwzhqltprfnz9895pdvzvknfmpz4f7b6sj798kh, existing: false\n" level=info timestamp=2018-08-10T13:23:48.118751Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-10T13:23:48.118936Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmiv4nzmd2kw6xsrjmrh2dnrf4vxwzhqltprfnz9895pdvzvknfmpz4f7b6sj798kh kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-10T13:23:48.119215Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmiv4nzmd2kw6xsrjmrh2dnrf4vxwzhqltprfnz9895pdvzvknfmpz4f7b6sj798kh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-handler-fkj9p Pod phase: Running level=info timestamp=2018-08-10T13:25:18.540273Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind= uid=c6c701d9-9ca0-11e8-943d-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-10T13:25:30.907083Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh, existing: false\n" level=info timestamp=2018-08-10T13:25:30.907509Z pos=vm.go:331 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-10T13:25:30.907568Z pos=vm.go:333 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-10T13:25:30.907884Z pos=vm.go:360 component=virt-handler namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-10T13:25:30.908000Z pos=vm.go:412 component=virt-handler namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-10T13:25:30.908403Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-10T13:25:38.812970Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh, existing: true\n" level=info timestamp=2018-08-10T13:25:38.813601Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Scheduled\n" level=info timestamp=2018-08-10T13:25:38.813973Z pos=vm.go:327 component=virt-handler namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind= uid=da88a46d-9ca0-11e8-943d-525500d15501 msg="Ignoring domain from an older VMI, will be handled by its own VMI." level=info timestamp=2018-08-10T13:25:54.996453Z pos=vm.go:783 component=virt-handler namespace=kubevirt-test-default name=testvmif5wz65hqhpbzwnzb87m2g527trv2przqq2bshz6lbkcmnf27tgptspvpqswrq5s kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-10T13:25:54.997373Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmif5wz65hqhpbzwnzb87m2g527trv2przqq2bshz6lbkcmnf27tgptspvpqswrq5s, existing: false\n" level=info timestamp=2018-08-10T13:25:54.997485Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-10T13:25:54.997790Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmif5wz65hqhpbzwnzb87m2g527trv2przqq2bshz6lbkcmnf27tgptspvpqswrq5s kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-10T13:25:54.998091Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmif5wz65hqhpbzwnzb87m2g527trv2przqq2bshz6lbkcmnf27tgptspvpqswrq5s kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p4sf4n Pod phase: Running level=info timestamp=2018-08-10T13:25:36.091683Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-10T13:25:36.091844Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-10T13:25:36.150295Z pos=virt-launcher.go:113 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh" level=info timestamp=2018-08-10T13:25:36.151272Z pos=virt-launcher.go:59 component=virt-launcher msg="Marked as ready" • Failure [154.405 seconds] Storage /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:46 Starting a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:70 With VirtualMachineInstance with two PVCs /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:266 should start vmi multiple times [It] /root/go/src/kubevirt.io/kubevirt/tests/storage_test.go:278 Timed out after 120.110s. Timed out waiting for VMI to enter Running phase Expected : false to equal : true /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1062 ------------------------------ STEP: Starting and stopping the VirtualMachineInstance number of times STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-08-10T13:24:58.363696Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind=VirtualMachineInstance uid=c6c701d9-9ca0-11e8-943d-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66pczhtb" level=info timestamp=2018-08-10T13:25:06.291233Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind=VirtualMachineInstance uid=c6c701d9-9ca0-11e8-943d-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66pczhtb" level=info timestamp=2018-08-10T13:25:06.346607Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind=VirtualMachineInstance uid=c6c701d9-9ca0-11e8-943d-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-08-10T13:25:17.957251Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind=VirtualMachineInstance uid=c6c701d9-9ca0-11e8-943d-525500d15501 msg="VirtualMachineInstance started." STEP: Starting a VirtualMachineInstance STEP: Waiting until the VirtualMachineInstance will start level=info timestamp=2018-08-10T13:25:31.439726Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind=VirtualMachineInstance uid=c6c701d9-9ca0-11e8-943d-525500d15501 msg="Created virtual machine pod virt-launcher-testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66pczhtb" level=info timestamp=2018-08-10T13:25:31.440537Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind=VirtualMachineInstance uid=c6c701d9-9ca0-11e8-943d-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66pczhtb" level=info timestamp=2018-08-10T13:25:31.440847Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind=VirtualMachineInstance uid=c6c701d9-9ca0-11e8-943d-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-08-10T13:25:31.444141Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmiz8bcdcm9ng65sb7pmmkbtd54v7nclw7hgb66p8hr94fvrlxpnvchkn6rtt9d2sh kind=VirtualMachineInstance uid=c6c701d9-9ca0-11e8-943d-525500d15501 msg="VirtualMachineInstance started." volumedisk0 compute • [SLOW TEST:45.243 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with 3 CPU cores /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:56 should report 3 cpu cores under guest OS /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:62 ------------------------------ • [SLOW TEST:46.222 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with diverging guest memory from requested memory /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:108 should show the requested guest memory inside the VMI /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:109 ------------------------------ • ------------------------------ • [SLOW TEST:24.614 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:194 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-2Mi /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ S [SKIPPING] [0.294 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 VirtualMachineInstance definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:55 with hugepages /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:194 should consume hugepages /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 hugepages-1Gi [It] /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 No node with hugepages hugepages-1Gi capacity /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:246 ------------------------------ • ------------------------------ • [SLOW TEST:114.429 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:324 when CPU model defined /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:370 should report defined CPU model /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:371 ------------------------------ • [SLOW TEST:110.089 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:324 when CPU model equals to passthrough /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:398 should report exactly the same model as node CPU /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:399 ------------------------------ • [SLOW TEST:109.710 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 with CPU spec /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:324 when CPU model not defined /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:422 should report CPU model from libvirt capabilities /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:423 ------------------------------ • [SLOW TEST:54.712 seconds] Configurations /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:44 New VirtualMachineInstance with all supported drives /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:443 should have all the device nodes /root/go/src/kubevirt.io/kubevirt/tests/vmi_configuration_test.go:466 ------------------------------ ••••••••••• ------------------------------ • [SLOW TEST:5.556 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:48 with correct permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:51 should be allowed to access subresource endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:52 ------------------------------ • [SLOW TEST:5.918 seconds] Subresource Api /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:37 Rbac Authorization /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:48 Without permissions /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:56 should not be able to access subresource endpoint /root/go/src/kubevirt.io/kubevirt/tests/subresource_api_test.go:57 ------------------------------ •• ------------------------------ • [SLOW TEST:106.449 seconds] Slirp /root/go/src/kubevirt.io/kubevirt/tests/vmi_slirp_interface_test.go:39 should be able to /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 VirtualMachineInstance with slirp interface /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • ------------------------------ • [SLOW TEST:47.026 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a cirros image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:67 should return that we are running cirros /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:68 ------------------------------ • [SLOW TEST:51.766 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 with a fedora image /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:77 should return that we are running fedora /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:78 ------------------------------ • [SLOW TEST:43.148 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should be able to reconnect to console multiple times /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:87 ------------------------------ • [SLOW TEST:27.795 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should wait until the virtual machine is in running state and return a stream interface /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:103 ------------------------------ • [SLOW TEST:30.354 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the virtual machine instance to be running /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:111 ------------------------------ • [SLOW TEST:30.300 seconds] Console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:37 A new VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:65 with a serial console /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:66 should fail waiting for the expecter /root/go/src/kubevirt.io/kubevirt/tests/console_test.go:134 ------------------------------ • [SLOW TEST:47.929 seconds] Health Monitoring /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:37 A VirtualMachineInstance with a watchdog device /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:56 should be shut down when the watchdog expires /root/go/src/kubevirt.io/kubevirt/tests/vmi_monitoring_test.go:57 ------------------------------ ••• ------------------------------ • [SLOW TEST:6.120 seconds] Templates /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:42 Launching VMI from VM Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:60 with given Fedora Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:193 with given VM JSON from the Template /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:152 with given VM from the VM JSON /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:158 with given VMI from the VM /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:163 should succeed to terminate the VMI using oc-patch command /root/go/src/kubevirt.io/kubevirt/tests/template_test.go:166 ------------------------------ • ------------------------------ • [SLOW TEST:8.763 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should scale /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 to five, to six and then to zero replicas /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ •• ------------------------------ • [SLOW TEST:25.681 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should update readyReplicas once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:157 ------------------------------ •• ------------------------------ • [SLOW TEST:5.595 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should not scale when paused and scale when resume /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:223 ------------------------------ • [SLOW TEST:6.796 seconds] VirtualMachineInstanceReplicaSet /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:46 should remove the finished VM /root/go/src/kubevirt.io/kubevirt/tests/replicaset_test.go:279 ------------------------------ •• ------------------------------ • [SLOW TEST:25.561 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:48 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:116 should update VirtualMachine once VMIs are up /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:196 ------------------------------ •• ------------------------------ • [SLOW TEST:31.891 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:48 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:116 should recreate VirtualMachineInstance if it gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:246 ------------------------------ • [SLOW TEST:72.359 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:48 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:116 should recreate VirtualMachineInstance if the VirtualMachineInstance's pod gets deleted /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:266 ------------------------------ • [SLOW TEST:31.862 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:48 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:116 should stop VirtualMachineInstance if running set to false /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:326 ------------------------------ • [SLOW TEST:193.271 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:48 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:116 should start and stop VirtualMachineInstance multiple times /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:334 ------------------------------ • [SLOW TEST:66.205 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:48 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:116 should not update the VirtualMachineInstance spec if Running /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:347 ------------------------------ • [SLOW TEST:223.829 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:48 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:116 should survive guest shutdown, multiple times /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:388 ------------------------------ VM testvmixvrqqjr9s8kwblbl7g4dnlhps7v88thjtl6ktz7gn5lqbs5cbwr7lsmzb9rftlm was scheduled to start • [SLOW TEST:27.795 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:48 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:116 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:436 should start a VirtualMachineInstance once /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:437 ------------------------------ VM testvmistpvgvbj849mc5gsnnkngm7gf8825wzrwd2bzfccv68lmwl2jsfjb7zdqtqldzp was scheduled to stop • [SLOW TEST:27.797 seconds] VirtualMachine /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:48 A valid VirtualMachine given /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:116 Using virtctl interface /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:436 should stop a VirtualMachineInstance once /root/go/src/kubevirt.io/kubevirt/tests/vm_test.go:468 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.017 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to start a vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:133 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1404 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.015 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 should succeed to stop a running vmi [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:139 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1404 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.015 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have correct UUID /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:192 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1404 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.013 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with winrm connection [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:150 should have pod IP /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:208 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1404 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.016 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to start a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:242 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1404 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.012 seconds] Windows VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:57 with kubectl command [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:226 should succeed to stop a vmi /root/go/src/kubevirt.io/kubevirt/tests/windows_test.go:250 Skip Windows tests that requires PVC disk-windows /root/go/src/kubevirt.io/kubevirt/tests/utils.go:1404 ------------------------------ • [SLOW TEST:25.503 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should successfully start with hook sidecar annotation /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:60 ------------------------------ • [SLOW TEST:25.353 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should call Collect and OnDefineDomain on the hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:67 ------------------------------ • [SLOW TEST:25.453 seconds] HookSidecars /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:40 VMI definition /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:58 with SM BIOS hook sidecar /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:59 should update domain XML with SM BIOS properties /root/go/src/kubevirt.io/kubevirt/tests/vmi_hook_sidecar_test.go:83 ------------------------------ • ------------------------------ • [SLOW TEST:24.710 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 should start it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:78 ------------------------------ • [SLOW TEST:28.709 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 should attach virt-launcher to it /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:84 ------------------------------ Pod name: disks-images-provider-9dvv5 Pod phase: Running copy all images to host mount directory Pod name: disks-images-provider-whnc4 Pod phase: Running copy all images to host mount directory Pod name: virt-api-7586947775-m8zhx Pod phase: Running 2018/08/10 13:57:47 http: TLS handshake error from 10.129.0.1:56124: EOF level=info timestamp=2018-08-10T13:57:54.180049Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/10 13:57:57 http: TLS handshake error from 10.129.0.1:56130: EOF level=info timestamp=2018-08-10T13:58:04.391376Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-10T13:58:07.154829Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/10 13:58:07 http: TLS handshake error from 10.129.0.1:56136: EOF level=info timestamp=2018-08-10T13:58:08.901835Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-10T13:58:08.907511Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-10T13:58:12.459373Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-10T13:58:14.606488Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 2018/08/10 13:58:17 http: TLS handshake error from 10.129.0.1:56142: EOF level=info timestamp=2018-08-10T13:58:22.617540Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-10T13:58:22.660539Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-10T13:58:22.673321Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=10s" proto=HTTP/2.0 statusCode=200 contentLength=136 level=info timestamp=2018-08-10T13:58:24.825981Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url="/apis/subresources.kubevirt.io/v1alpha2?timeout=32s" proto=HTTP/2.0 statusCode=200 contentLength=136 Pod name: virt-api-7586947775-wlwq8 Pod phase: Running 2018/08/10 13:56:10 http: TLS handshake error from 10.128.0.1:48984: EOF 2018/08/10 13:56:20 http: TLS handshake error from 10.128.0.1:49026: EOF 2018/08/10 13:56:30 http: TLS handshake error from 10.128.0.1:49068: EOF 2018/08/10 13:56:40 http: TLS handshake error from 10.128.0.1:49114: EOF 2018/08/10 13:56:50 http: TLS handshake error from 10.128.0.1:49158: EOF 2018/08/10 13:57:00 http: TLS handshake error from 10.128.0.1:49200: EOF level=info timestamp=2018-08-10T13:57:07.690382Z pos=filter.go:46 component=virt-api remoteAddress=10.128.0.1 username=- method=GET url=/ proto=HTTP/1.1 statusCode=404 contentLength=19 2018/08/10 13:57:10 http: TLS handshake error from 10.128.0.1:49254: EOF 2018/08/10 13:57:20 http: TLS handshake error from 10.128.0.1:1024: EOF 2018/08/10 13:57:30 http: TLS handshake error from 10.128.0.1:1025: EOF 2018/08/10 13:57:40 http: TLS handshake error from 10.128.0.1:49384: EOF 2018/08/10 13:57:50 http: TLS handshake error from 10.128.0.1:49426: EOF 2018/08/10 13:58:00 http: TLS handshake error from 10.128.0.1:49468: EOF 2018/08/10 13:58:10 http: TLS handshake error from 10.128.0.1:49516: EOF 2018/08/10 13:58:20 http: TLS handshake error from 10.128.0.1:49654: EOF Pod name: virt-controller-7d57d96b65-6vnn4 Pod phase: Running level=info timestamp=2018-08-10T13:10:52.902960Z pos=application.go:176 component=virt-controller service=http action=listening interface=0.0.0.0 port=8182 Pod name: virt-controller-7d57d96b65-gcsvg Pod phase: Running level=info timestamp=2018-08-10T13:56:43.229079Z pos=vmi.go:158 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmingjbhp8qbgsc69nhp4jpk4wsmsnm6dr59l9jlf6rqgmv8tf695dn6nhvstgw7qm\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmingjbhp8qbgsc69nhp4jpk4wsmsnm6dr59l9jlf6rqgmv8tf695dn6nhvstgw7qm" level=info timestamp=2018-08-10T13:57:08.563271Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimxnng794tgxcsdxmdlfrwbxxlzxzjdchjx8rrj95bz9sqwrrq4wr9vw8q55jzh4 kind= uid=458eba22-9ca5-11e8-943d-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-10T13:57:08.565229Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmimxnng794tgxcsdxmdlfrwbxxlzxzjdchjx8rrj95bz9sqwrrq4wr9vw8q55jzh4 kind= uid=458eba22-9ca5-11e8-943d-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-10T13:57:08.715729Z pos=vmi.go:158 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimxnng794tgxcsdxmdlfrwbxxlzxzjdchjx8rrj95bz9sqwrrq4wr9vw8q55jzh4\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimxnng794tgxcsdxmdlfrwbxxlzxzjdchjx8rrj95bz9sqwrrq4wr9vw8q55jzh4" level=info timestamp=2018-08-10T13:57:08.865996Z pos=vmi.go:158 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmimxnng794tgxcsdxmdlfrwbxxlzxzjdchjx8rrj95bz9sqwrrq4wr9vw8q55jzh4\": StorageError: invalid object, Code: 4, Key: /kubernetes.io/kubevirt.io/virtualmachineinstances/kubevirt-test-default/testvmimxnng794tgxcsdxmdlfrwbxxlzxzjdchjx8rrj95bz9sqwrrq4wr9vw8q55jzh4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 458eba22-9ca5-11e8-943d-525500d15501, UID in object meta: " msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmimxnng794tgxcsdxmdlfrwbxxlzxzjdchjx8rrj95bz9sqwrrq4wr9vw8q55jzh4" level=info timestamp=2018-08-10T13:57:08.993030Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6q5bm2tgd2pkmjk5n7d6bjnbsgchqmdfsq5n2bxkzx5tt69d2vgcfxckx5zjkfn kind= uid=45d272b2-9ca5-11e8-943d-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-10T13:57:08.994707Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmi6q5bm2tgd2pkmjk5n7d6bjnbsgchqmdfsq5n2bxkzx5tt69d2vgcfxckx5zjkfn kind= uid=45d272b2-9ca5-11e8-943d-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-10T13:57:09.125549Z pos=vmi.go:158 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6q5bm2tgd2pkmjk5n7d6bjnbsgchqmdfsq5n2bxkzx5tt69d2vgcfxckx5zjkfn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6q5bm2tgd2pkmjk5n7d6bjnbsgchqmdfsq5n2bxkzx5tt69d2vgcfxckx5zjkfn" level=info timestamp=2018-08-10T13:57:09.167053Z pos=vmi.go:158 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmi6q5bm2tgd2pkmjk5n7d6bjnbsgchqmdfsq5n2bxkzx5tt69d2vgcfxckx5zjkfn\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmi6q5bm2tgd2pkmjk5n7d6bjnbsgchqmdfsq5n2bxkzx5tt69d2vgcfxckx5zjkfn" level=info timestamp=2018-08-10T13:57:33.602922Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixgz9rhwv6w4nkv2vnwdcz4llxb5jd9mhbvxgdf5g9kvkrthg8kfjzlp6f2lhhsn kind= uid=547a22e6-9ca5-11e8-943d-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-10T13:57:33.603401Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmixgz9rhwv6w4nkv2vnwdcz4llxb5jd9mhbvxgdf5g9kvkrthg8kfjzlp6f2lhhsn kind= uid=547a22e6-9ca5-11e8-943d-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-10T13:58:02.267939Z pos=preset.go:142 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Initializing VirtualMachineInstance" level=info timestamp=2018-08-10T13:58:02.268723Z pos=preset.go:171 component=virt-controller service=http namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Marking VirtualMachineInstance as initialized" level=info timestamp=2018-08-10T13:58:02.432764Z pos=vmi.go:158 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc" level=info timestamp=2018-08-10T13:58:02.527399Z pos=vmi.go:158 component=virt-controller service=http reason="Operation cannot be fulfilled on virtualmachineinstances.kubevirt.io \"testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc\": the object has been modified; please apply your changes to the latest version and try again" msg="reenqueuing VirtualMachineInstance kubevirt-test-default/testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc" Pod name: virt-handler-642wc Pod phase: Running level=info timestamp=2018-08-10T13:58:26.048765Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-10T13:58:26.049530Z pos=vm.go:793 component=virt-handler namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind=Domain uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Domain is in state Running reason Unknown" level=info timestamp=2018-08-10T13:58:26.050689Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc, existing: true\n" level=info timestamp=2018-08-10T13:58:26.051151Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-08-10T13:58:26.051418Z pos=vm.go:331 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-10T13:58:26.051732Z pos=vm.go:333 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-10T13:58:26.052045Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-10T13:58:26.054292Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Synchronization loop succeeded." level=info timestamp=2018-08-10T13:58:26.087640Z pos=server.go:75 component=virt-handler msg="Received Domain Event of type MODIFIED" level=info timestamp=2018-08-10T13:58:26.088116Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc, existing: true\n" level=info timestamp=2018-08-10T13:58:26.088204Z pos=vm.go:317 component=virt-handler msg="vmi is in phase: Running\n" level=info timestamp=2018-08-10T13:58:26.088275Z pos=vm.go:331 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-10T13:58:26.088371Z pos=vm.go:333 component=virt-handler msg="Domain status: Running, reason: Unknown\n" level=info timestamp=2018-08-10T13:58:26.088566Z pos=vm.go:418 component=virt-handler namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Processing vmi update" level=info timestamp=2018-08-10T13:58:26.101288Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Synchronization loop succeeded." Pod name: virt-handler-fkj9p Pod phase: Running level=info timestamp=2018-08-10T13:43:50.049294Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmij5wfbh6bfgp5dfm7dbng567mwblkb4zm9492w4r9c7jm4656v8w2grdb, existing: false\n" level=info timestamp=2018-08-10T13:43:50.049414Z pos=vm.go:331 component=virt-handler msg="Domain: existing: true\n" level=info timestamp=2018-08-10T13:43:50.049449Z pos=vm.go:333 component=virt-handler msg="Domain status: Shutoff, reason: Destroyed\n" level=info timestamp=2018-08-10T13:43:50.049564Z pos=vm.go:360 component=virt-handler namespace=kubevirt-test-default name=testvmij5wfbh6bfgp5dfm7dbng567mwblkb4zm9492w4r9c7jm4656v8w2grdb kind=VirtualMachineInstance uid= msg="Shutting down domain for deleted VirtualMachineInstance object." level=info timestamp=2018-08-10T13:43:50.049648Z pos=vm.go:412 component=virt-handler namespace=kubevirt-test-default name=testvmij5wfbh6bfgp5dfm7dbng567mwblkb4zm9492w4r9c7jm4656v8w2grdb kind=VirtualMachineInstance uid= msg="Processing deletion." level=info timestamp=2018-08-10T13:43:50.049890Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmij5wfbh6bfgp5dfm7dbng567mwblkb4zm9492w4r9c7jm4656v8w2grdb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-10T13:44:39.998331Z pos=vm.go:783 component=virt-handler namespace=kubevirt-test-default name=testvmij5wfbh6bfgp5dfm7dbng567mwblkb4zm9492w4r9c7jm4656v8w2grdb kind=Domain uid= msg="Domain deleted" level=info timestamp=2018-08-10T13:44:39.999550Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmij5wfbh6bfgp5dfm7dbng567mwblkb4zm9492w4r9c7jm4656v8w2grdb, existing: false\n" level=info timestamp=2018-08-10T13:44:39.999746Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-10T13:44:39.999959Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmij5wfbh6bfgp5dfm7dbng567mwblkb4zm9492w4r9c7jm4656v8w2grdb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-10T13:44:40.006776Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmij5wfbh6bfgp5dfm7dbng567mwblkb4zm9492w4r9c7jm4656v8w2grdb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." level=info timestamp=2018-08-10T13:44:40.009284Z pos=vm.go:315 component=virt-handler msg="Processing vmi testvmij5wfbh6bfgp5dfm7dbng567mwblkb4zm9492w4r9c7jm4656v8w2grdb, existing: false\n" level=info timestamp=2018-08-10T13:44:40.009428Z pos=vm.go:331 component=virt-handler msg="Domain: existing: false\n" level=info timestamp=2018-08-10T13:44:40.009571Z pos=vm.go:415 component=virt-handler namespace=kubevirt-test-default name=testvmij5wfbh6bfgp5dfm7dbng567mwblkb4zm9492w4r9c7jm4656v8w2grdb kind=VirtualMachineInstance uid= msg="Processing local ephemeral data cleanup for shutdown domain." level=info timestamp=2018-08-10T13:44:40.010790Z pos=vm.go:442 component=virt-handler namespace=kubevirt-test-default name=testvmij5wfbh6bfgp5dfm7dbng567mwblkb4zm9492w4r9c7jm4656v8w2grdb kind=VirtualMachineInstance uid= msg="Synchronization loop succeeded." Pod name: virt-launcher-testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr5fh4s Pod phase: Running 2018-08-10 13:58:25.213+0000: 56: error : virCgroupDetect:714 : At least one cgroup controller is required: No such device or address level=info timestamp=2018-08-10T13:58:25.239299Z pos=client.go:138 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-10T13:58:25.264073Z pos=client.go:164 component=virt-launcher msg="processed event" level=info timestamp=2018-08-10T13:58:25.296849Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-10T13:58:26.007279Z pos=client.go:155 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-10T13:58:26.046351Z pos=client.go:138 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-10T13:58:26.050381Z pos=client.go:164 component=virt-launcher msg="processed event" level=info timestamp=2018-08-10T13:58:26.051838Z pos=client.go:155 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-10T13:58:26.053340Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-10T13:58:26.055905Z pos=manager.go:319 component=virt-launcher namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Domain started." level=info timestamp=2018-08-10T13:58:26.085063Z pos=client.go:138 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-10T13:58:26.089869Z pos=client.go:164 component=virt-launcher msg="processed event" level=info timestamp=2018-08-10T13:58:26.100455Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-10T13:58:27.021503Z pos=virt-launcher.go:184 component=virt-launcher msg="Detected domain with UUID 6e426bd8-e985-421a-9eb8-422b5dd66658" level=info timestamp=2018-08-10T13:58:27.022427Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" Pod name: virt-launcher-testvmixgz9rhwv6w4nkv2vnwdcz4llxb5jd9mhbvxgdhnjct Pod phase: Running level=info timestamp=2018-08-10T13:57:59.817786Z pos=client.go:164 component=virt-launcher msg="processed event" level=info timestamp=2018-08-10T13:57:59.824938Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmixgz9rhwv6w4nkv2vnwdcz4llxb5jd9mhbvxgdf5g9kvkrthg8kfjzlp6f2lhhsn kind= uid=547a22e6-9ca5-11e8-943d-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-10T13:58:00.263916Z pos=virt-launcher.go:184 component=virt-launcher msg="Detected domain with UUID c192f9c3-8b4f-4473-bd5f-5d63587185ff" level=info timestamp=2018-08-10T13:58:00.265189Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" level=info timestamp=2018-08-10T13:58:01.269830Z pos=monitor.go:222 component=virt-launcher msg="Found PID for c192f9c3-8b4f-4473-bd5f-5d63587185ff: 225" level=info timestamp=2018-08-10T13:58:01.694494Z pos=client.go:155 component=virt-launcher msg="Libvirt event 5 with reason 1 received" level=info timestamp=2018-08-10T13:58:01.704906Z pos=manager.go:432 component=virt-launcher namespace=kubevirt-test-default name=testvmixgz9rhwv6w4nkv2vnwdcz4llxb5jd9mhbvxgdf5g9kvkrthg8kfjzlp6f2lhhsn kind= uid=547a22e6-9ca5-11e8-943d-525500d15501 msg="Domain stopped." level=info timestamp=2018-08-10T13:58:01.705272Z pos=server.go:96 component=virt-launcher namespace=kubevirt-test-default name=testvmixgz9rhwv6w4nkv2vnwdcz4llxb5jd9mhbvxgdf5g9kvkrthg8kfjzlp6f2lhhsn kind= uid=547a22e6-9ca5-11e8-943d-525500d15501 msg="Signaled vmi kill" level=info timestamp=2018-08-10T13:58:01.720075Z pos=client.go:138 component=virt-launcher msg="domain status: 5:2" level=info timestamp=2018-08-10T13:58:01.724623Z pos=client.go:164 component=virt-launcher msg="processed event" level=info timestamp=2018-08-10T13:58:01.738533Z pos=monitor.go:266 component=virt-launcher msg="Received signal 15." caught signal level=info timestamp=2018-08-10T13:58:02.267149Z pos=monitor.go:231 component=virt-launcher msg="Process c192f9c3-8b4f-4473-bd5f-5d63587185ff and pid 225 is gone!" level=info timestamp=2018-08-10T13:58:02.268978Z pos=manager.go:436 component=virt-launcher namespace=kubevirt-test-default name=testvmixgz9rhwv6w4nkv2vnwdcz4llxb5jd9mhbvxgdf5g9kvkrthg8kfjzlp6f2lhhsn kind=VirtualMachineInstance uid= msg="Domain not running or paused, nothing to do." level=info timestamp=2018-08-10T13:58:02.269229Z pos=virt-launcher.go:203 component=virt-launcher msg="Waiting on final notifications to be sent to virt-handler." • Failure [26.426 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 should log libvirtd logs [It] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:97 Timed out after 2.003s. Expected : level=info timestamp=2018-08-10T13:58:05.949921Z pos=manager.go:69 component=virt-launcher msg="Collected all requested hook sidecar sockets" level=info timestamp=2018-08-10T13:58:05.950105Z pos=manager.go:72 component=virt-launcher msg="Sorted all collected sidecar sockets per hook point based on their priority and name: map[]" level=info timestamp=2018-08-10T13:58:05.997569Z pos=virt-launcher.go:113 component=virt-launcher msg="Watchdog file created at /var/run/kubevirt/watchdog-files/kubevirt-test-default_testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc" level=info timestamp=2018-08-10T13:58:05.998007Z pos=virt-launcher.go:59 component=virt-launcher msg="Marked as ready" level=info timestamp=2018-08-10T13:58:12.081846Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-10T13:58:12.084783Z pos=libvirt.go:261 component=virt-launcher msg="Connecting to libvirt daemon: qemu:///system" + mkdir -p /var/log/kubevirt + touch /var/log/kubevirt/qemu-kube.log + chown qemu:qemu /var/log/kubevirt/qemu-kube.log + [[ -z '' ]] ++ ip -o -4 a ++ tr -s ' ' ++ cut '-d ' -f 2 ++ grep -v -e '^lo[0-9:]*$' ++ head -1 + LIBVIRTD_DEFAULT_NETWORK_DEVICE=eth0 + echo 'Selected "eth0" as primary interface' + [[ -n eth0 ]] + echo 'Setting libvirt default network to "eth0"' + mkdir -p /etc/libvirt/qemu/networks/autostart + cat + ln -s -f /etc/libvirt/qemu/networks/default.xml /etc/libvirt/qemu/networks/autostart/default.xml + echo 'cgroup_controllers = [ ]' + '[' -d /dev/hugepages ']' + echo 'log_outputs = "1:stderr"' + /usr/sbin/libvirtd 2018-08-10 13:58:12.644+0000: 67: info : libvirt version: 4.2.0, package: 1.fc28 (Unknown, 2018-04-04-03:04:18, a0570af3fea64d0ba2df52242c71403f) 2018-08-10 13:58:12.644+0000: 67: info : hostname: testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8h 2018-08-10 13:58:12.644+0000: 67: error : virDBusGetSystemBus:109 : internal error: Unable to get DBus system bus connection: Failed to connect to socket /run/dbus/system_bus_socket: No such file or directory 2018-08-10 13:58:13.806+0000: 67: error : virDBusGetSystemBus:109 : internal error: Unable to get DBus system bus connection: Failed to connect to socket /run/dbus/system_bus_socket: No such file or directory 2018-08-10 13:58:13.806+0000: 67: warning : networkStateInitialize:763 : DBus not available, disabling firewalld support in bridge_network_driver: internal error: Unable to get DBus system bus connection: Failed to connect to socket /run/dbus/system_bus_socket: No such file or directory 2018-08-10 13:58:13.954+0000: 79: error : virPCIDeviceConfigOpen:312 : Failed to open config space file '/sys/bus/pci/devices/0000:00:00.0/config': Read-only file system 2018-08-10 13:58:13.956+0000: 79: error : virPCIDeviceConfigOpen:312 : Failed to open config space file '/sys/bus/pci/devices/0000:00:01.0/config': Read-only file system 2018-08-10 13:58:13.957+0000: 79: error : virPCIDeviceConfigOpen:312 : Failed to open config space file '/sys/bus/pci/devices/0000:00:01.1/config': Read-only file system 2018-08-10 13:58:13.965+0000: 79: error : virPCIDeviceConfigOpen:312 : Failed to open config space file '/sys/bus/pci/devices/0000:00:01.3/config': Read-only file system 2018-08-10 13:58:13.996+0000: 79: error : virPCIDeviceConfigOpen:312 : Failed to open config space file '/sys/bus/pci/devices/0000:00:02.0/config': Read-only file system 2018-08-10 13:58:14.015+0000: 79: error : virPCIDeviceConfigOpen:312 : Failed to open config space file '/sys/bus/pci/devices/0000:00:03.0/config': Read-only file system 2018-08-10 13:58:14.017+0000: 79: error : virPCIDeviceConfigOpen:312 : Failed to open config space file '/sys/bus/pci/devices/0000:00:04.0/config': Read-only file system 2018-08-10 13:58:14.018+0000: 79: error : virPCIDeviceConfigOpen:312 : Failed to open config space file '/sys/bus/pci/devices/0000:00:05.0/config': Read-only file system 2018-08-10 13:58:16.933+0000: 67: error : virCommandWait:2600 : internal error: Child process (/usr/sbin/dmidecode -q -t 0,1,2,3,4,17) unexpected exit status 1: /dev/mem: No such file or directory 2018-08-10 13:58:16.988+0000: 67: error : virNodeSuspendSupportsTarget:336 : internal error: Cannot probe for supported suspend types 2018-08-10 13:58:16.988+0000: 67: warning : virQEMUCapsInit:1229 : Failed to get host power management capabilities level=info timestamp=2018-08-10T13:58:24.166568Z pos=libvirt.go:276 component=virt-launcher msg="Connected to libvirt daemon" level=info timestamp=2018-08-10T13:58:24.169645Z pos=client.go:171 component=virt-launcher msg="Registered libvirt event notify callback" level=error timestamp=2018-08-10T13:58:24.216940Z pos=common.go:126 component=virt-launcher msg="updated MAC for interface: eth0 - 0a:58:0a:1f:95:19" level=info timestamp=2018-08-10T13:58:24.224605Z pos=converter.go:779 component=virt-launcher msg="Found nameservers in /etc/resolv.conf: \ufffd\ufffdBf" level=info timestamp=2018-08-10T13:58:24.225008Z pos=converter.go:780 component=virt-launcher msg="Found search domains in /etc/resolv.conf: kubevirt-test-default.svc.cluster.local svc.cluster.local cluster.local" level=info timestamp=2018-08-10T13:58:24.225986Z pos=dhcp.go:62 component=virt-launcher msg="Starting SingleClientDHCPServer" level=info timestamp=2018-08-10T13:58:24.384979Z pos=manager.go:288 component=virt-launcher namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Domain defined." level=info timestamp=2018-08-10T13:58:24.385216Z pos=client.go:155 component=virt-launcher msg="Libvirt event 0 with reason 0 received" 2018-08-10 13:58:24.547+0000: 56: error : virDBusGetSystemBus:109 : internal error: Unable to get DBus system bus connection: Failed to connect to socket /run/dbus/system_bus_socket: No such file or directory 2018-08-10 13:58:24.563+0000: 56: warning : qemuInterfaceOpenVhostNet:687 : Unable to open vhost-net. Opened so far 0, requested 1 2018-08-10 13:58:25.213+0000: 56: error : virCgroupDetect:714 : At least one cgroup controller is required: No such device or address level=info timestamp=2018-08-10T13:58:25.239299Z pos=client.go:138 component=virt-launcher msg="domain status: 3:11" level=info timestamp=2018-08-10T13:58:25.264073Z pos=client.go:164 component=virt-launcher msg="processed event" level=info timestamp=2018-08-10T13:58:25.296849Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-10T13:58:26.007279Z pos=client.go:155 component=virt-launcher msg="Libvirt event 4 with reason 0 received" level=info timestamp=2018-08-10T13:58:26.046351Z pos=client.go:138 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-10T13:58:26.050381Z pos=client.go:164 component=virt-launcher msg="processed event" level=info timestamp=2018-08-10T13:58:26.051838Z pos=client.go:155 component=virt-launcher msg="Libvirt event 2 with reason 0 received" level=info timestamp=2018-08-10T13:58:26.053340Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-10T13:58:26.055905Z pos=manager.go:319 component=virt-launcher namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Domain started." level=info timestamp=2018-08-10T13:58:26.085063Z pos=client.go:138 component=virt-launcher msg="domain status: 1:1" level=info timestamp=2018-08-10T13:58:26.089869Z pos=client.go:164 component=virt-launcher msg="processed event" level=info timestamp=2018-08-10T13:58:26.100455Z pos=server.go:74 component=virt-launcher namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind= uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Synced vmi" level=info timestamp=2018-08-10T13:58:27.021503Z pos=virt-launcher.go:184 component=virt-launcher msg="Detected domain with UUID 6e426bd8-e985-421a-9eb8-422b5dd66658" level=info timestamp=2018-08-10T13:58:27.022427Z pos=monitor.go:253 component=virt-launcher msg="Monitoring loop: rate 1s start timeout 5m0s" to contain substring : info : hostname: testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:111 ------------------------------ level=info timestamp=2018-08-10T13:58:02.854252Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind=VirtualMachineInstance uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Created virtual machine pod virt-launcher-testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr5fh4s" level=info timestamp=2018-08-10T13:58:12.863064Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind=VirtualMachineInstance uid=659293c9-9ca5-11e8-943d-525500d15501 msg="Pod owner ship transferred to the node virt-launcher-testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr5fh4s" level=info timestamp=2018-08-10T13:58:12.960036Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind=VirtualMachineInstance uid=659293c9-9ca5-11e8-943d-525500d15501 msg="VirtualMachineInstance defined." level=info timestamp=2018-08-10T13:58:26.123904Z pos=utils.go:245 component=tests namespace=kubevirt-test-default name=testvmihcj5bwjnb48ztwlxq5tt8psh47jwqxsxbq8tr46nhvqvhkwjdrwhqp8hmqkllnc kind=VirtualMachineInstance uid=659293c9-9ca5-11e8-943d-525500d15501 msg="VirtualMachineInstance started." STEP: Getting virt-launcher logs •••• ------------------------------ • [SLOW TEST:45.613 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:189 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Alpine as first boot /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:37.149 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 with boot order /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:189 should be able to boot from selected disk /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 Cirros as first boot /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:11.088 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:220 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:221 should retry starting the VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:222 ------------------------------ • [SLOW TEST:26.192 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 with user-data /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:220 without k8s secret /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:221 should log warning and proceed once the secret is there /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:252 ------------------------------ • [SLOW TEST:49.025 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 when virt-launcher crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:300 should be stopped and have Failed phase /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:301 ------------------------------ • [SLOW TEST:37.211 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 when virt-handler crashes /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:324 should recover and continue management /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:325 ------------------------------ • [SLOW TEST:117.157 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 when virt-handler is responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:355 should indicate that a node is ready for vmis /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:356 ------------------------------ • [SLOW TEST:84.498 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 when virt-handler is not responsive /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:386 the node controller should react /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:425 ------------------------------ • [SLOW TEST:25.997 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 with node tainted /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:478 the vmi with tolerations should be scheduled /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:500 ------------------------------ • ------------------------------ • [SLOW TEST:26.356 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:550 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-default /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ • [SLOW TEST:40.969 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 with non default namespace /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:550 should log libvirt start and stop lifecycle events of the domain /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table.go:92 kubevirt-test-alternative /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go:46 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.259 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:607 should enable emulation in virt-launcher [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:619 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:615 ------------------------------ S [SKIPPING] in Spec Setup (BeforeEach) [0.160 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Creating a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:72 VirtualMachineInstance Emulation Mode /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:607 should be reflected in domain XML [BeforeEach] /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:656 Software emulation is not enabled on this cluster /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:615 ------------------------------ •• ------------------------------ • [SLOW TEST:27.316 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Delete a VirtualMachineInstance's Pod /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:765 should result in the VirtualMachineInstance moving to a finalized state /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:766 ------------------------------ • [SLOW TEST:30.052 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:797 with an active pod. /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:798 should result in pod being terminated /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:799 ------------------------------ • [SLOW TEST:53.614 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:797 with ACPI and 0 grace period seconds /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:823 should result in vmi status failed /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:824 ------------------------------ • [SLOW TEST:54.693 seconds] VMIlifecycle /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:51 Delete a VirtualMachineInstance /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:797 with ACPI and some grace period seconds /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:848 should result in vmi status succeeded /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:849 ------------------------------ panic: test timed out after 1h30m0s goroutine 6409 [running]: testing.(*M).startAlarm.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1240 +0xfc created by time.goFunc /gimme/.gimme/versions/go1.10.linux.amd64/src/time/sleep.go:172 +0x44 goroutine 1 [chan receive, 90 minutes]: testing.(*T).Run(0xc4205f13b0, 0x12b726c, 0x9, 0x1344078, 0x47fa16) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:825 +0x301 testing.runTests.func1(0xc4205f11d0) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1063 +0x64 testing.tRunner(0xc4205f11d0, 0xc420879df8) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 testing.runTests(0xc42085f900, 0x1bcee20, 0x1, 0x1, 0x412009) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:1061 +0x2c4 testing.(*M).Run(0xc4203cdb80, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:978 +0x171 main.main() _testmain.go:44 +0x151 goroutine 5 [chan receive]: kubevirt.io/kubevirt/vendor/github.com/golang/glog.(*loggingT).flushDaemon(0x1bf5be0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:879 +0x8b created by kubevirt.io/kubevirt/vendor/github.com/golang/glog.init.0 /root/go/src/kubevirt.io/kubevirt/vendor/github.com/golang/glog/glog.go:410 +0x203 goroutine 6 [syscall, 90 minutes]: os/signal.signal_recv(0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/sigqueue.go:139 +0xa6 os/signal.loop() /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:22 +0x22 created by os/signal.init.0 /gimme/.gimme/versions/go1.10.linux.amd64/src/os/signal/signal_unix.go:28 +0x41 goroutine 50 [chan receive, 30 minutes]: kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).Watch(0xc4216311c8, 0xc420abf080) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:290 +0x5be kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).WaitFor(0xc4216311c8, 0x12b3235, 0x6, 0x1124c00, 0x13bc1f0, 0x0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:295 +0xba kubevirt.io/kubevirt/tests_test.glob..func17.6.4.1() /root/go/src/kubevirt.io/kubevirt/tests/vmi_lifecycle_test.go:904 +0x969 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).runSync(0xc4202c0720, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:113 +0x9c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*runner).run(0xc4202c0720, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go:64 +0x13e kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes.(*ItNode).Run(0xc4203b40c0, 0x13c11e0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/leafnodes/it_node.go:26 +0x7f kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).runSample(0xc4205f6c30, 0x0, 0x13c11e0, 0xc4200c74c0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:203 +0x648 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec.(*Spec).Run(0xc4205f6c30, 0x13c11e0, 0xc4200c74c0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/spec/spec.go:138 +0xff kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpec(0xc4208a6a00, 0xc4205f6c30, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:200 +0x10d kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).runSpecs(0xc4208a6a00, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:170 +0x329 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run(0xc4208a6a00, 0xb) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:66 +0x11b kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite.(*Suite).Run(0xc4200ecaf0, 0x7f1ade1336f0, 0xc4205f13b0, 0x12b96ec, 0xb, 0xc42085f940, 0x2, 0x2, 0x13dbf40, 0xc4200c74c0, ...) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/suite/suite.go:62 +0x27c kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithCustomReporters(0x13c1f20, 0xc4205f13b0, 0x12b96ec, 0xb, 0xc42085f920, 0x2, 0x2, 0x2) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:221 +0x258 kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo.RunSpecsWithDefaultAndCustomReporters(0x13c1f20, 0xc4205f13b0, 0x12b96ec, 0xb, 0xc42038ead0, 0x1, 0x1, 0x1) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/ginkgo_dsl.go:209 +0xab kubevirt.io/kubevirt/tests_test.TestTests(0xc4205f13b0) /root/go/src/kubevirt.io/kubevirt/tests/tests_suite_test.go:43 +0xaa testing.tRunner(0xc4205f13b0, 0x1344078) /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:777 +0xd0 created by testing.(*T).Run /gimme/.gimme/versions/go1.10.linux.amd64/src/testing/testing.go:824 +0x2e0 goroutine 51 [chan receive, 90 minutes]: kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).registerForInterrupts(0xc4208a6a00, 0xc420049500) /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:223 +0xd1 created by kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner.(*SpecRunner).Run /root/go/src/kubevirt.io/kubevirt/vendor/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go:60 +0x88 goroutine 52 [select, 90 minutes, locked to thread]: runtime.gopark(0x1345ec0, 0x0, 0x12b3f73, 0x6, 0x18, 0x1) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/proc.go:291 +0x11a runtime.selectgo(0xc420094750, 0xc4200495c0) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/select.go:392 +0xe50 runtime.ensureSigM.func1() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/signal_unix.go:549 +0x1f4 runtime.goexit() /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/asm_amd64.s:2361 +0x1 goroutine 57 [IO wait, 30 minutes]: internal/poll.runtime_pollWait(0x7f1ade11df00, 0x72, 0xc420da7850) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/netpoll.go:173 +0x57 internal/poll.(*pollDesc).wait(0xc4208a0718, 0x72, 0xffffffffffffff00, 0x13c2ec0, 0x1ae6640) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:85 +0x9b internal/poll.(*pollDesc).waitRead(0xc4208a0718, 0xc420eee000, 0x8000, 0x8000) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_poll_runtime.go:90 +0x3d internal/poll.(*FD).Read(0xc4208a0700, 0xc420eee000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/internal/poll/fd_unix.go:157 +0x17d net.(*netFD).Read(0xc4208a0700, 0xc420eee000, 0x8000, 0x8000, 0x0, 0x8, 0x7ffb) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/fd_unix.go:202 +0x4f net.(*conn).Read(0xc420728548, 0xc420eee000, 0x8000, 0x8000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/net/net.go:176 +0x6a crypto/tls.(*block).readFromUntil(0xc4206b1650, 0x7f1ade1337c0, 0xc420728548, 0x5, 0xc420728548, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:493 +0x96 crypto/tls.(*Conn).readRecord(0xc4200f7500, 0x1346017, 0xc4200f7620, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:595 +0xe0 crypto/tls.(*Conn).Read(0xc4200f7500, 0xc420549000, 0x1000, 0x1000, 0x0, 0x0, 0x0) /gimme/.gimme/versions/go1.10.linux.amd64/src/crypto/tls/conn.go:1156 +0x100 bufio.(*Reader).Read(0xc4204fd8c0, 0xc42071aab8, 0x9, 0x9, 0x28, 0xc420da7c60, 0x7a622a) /gimme/.gimme/versions/go1.10.linux.amd64/src/bufio/bufio.go:216 +0x238 io.ReadAtLeast(0x13c0020, 0xc4204fd8c0, 0xc42071aab8, 0x9, 0x9, 0x9, 0xc4208ca460, 0x43f2c1, 0xc4205dcc00) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:309 +0x86 io.ReadFull(0x13c0020, 0xc4204fd8c0, 0xc42071aab8, 0x9, 0x9, 0x1346008, 0xc420da7d10, 0x462d33) /gimme/.gimme/versions/go1.10.linux.amd64/src/io/io.go:327 +0x58 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.readFrameHeader(0xc42071aab8, 0x9, 0x9, 0x13c0020, 0xc4204fd8c0, 0x0, 0xc400000000, 0x7bac80, 0xc420129a68) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:237 +0x7b kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Framer).ReadFrame(0xc42071aa80, 0xc4207e96e0, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/frame.go:492 +0xa4 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*clientConnReadLoop).run(0xc420da7fb0, 0x1344fd8, 0xc420094fb0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1428 +0x8e kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*ClientConn).readLoop(0xc420281040) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1354 +0x76 created by kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*Transport).newClientConn /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:579 +0x651 goroutine 48 [chan send, 89 minutes]: kubevirt.io/kubevirt/tests_test.glob..func24.1.2.1.1(0x13f95e0, 0xc420861680, 0xc4201463d8, 0xc420338720, 0xc420816818, 0xc420816838) /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:81 +0x138 created by kubevirt.io/kubevirt/tests_test.glob..func24.1.2.1 /root/go/src/kubevirt.io/kubevirt/tests/vnc_test.go:73 +0x386 goroutine 1495 [chan send, 75 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4205baf00) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 549 [chan send, 83 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc42053c3f0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 669 [chan send, 82 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc42053de90) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 858 [chan send, 81 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4205fb590) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 1013 [chan send, 80 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc4209951a0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 1316 [chan send, 76 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420509650) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 3439 [chan receive, 59 minutes]: kubevirt.io/kubevirt/pkg/kubecli.(*asyncWSRoundTripper).WebsocketCallback(0xc42027f610, 0xc420347040, 0xc4209b27e0, 0x0, 0x0, 0x18, 0xc421173ec8) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:163 +0x32b kubevirt.io/kubevirt/pkg/kubecli.(*asyncWSRoundTripper).WebsocketCallback-fm(0xc420347040, 0xc4209b27e0, 0x0, 0x0, 0xc420347040, 0xc4209b27e0) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:313 +0x52 kubevirt.io/kubevirt/pkg/kubecli.(*WebsocketRoundTripper).RoundTrip(0xc42027fd80, 0xc420f57400, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:142 +0xab kubevirt.io/kubevirt/pkg/kubecli.(*vmis).asyncSubresourceHelper.func1(0x13c04c0, 0xc42027fd80, 0xc420f57400, 0xc420aba720) /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:328 +0x56 created by kubevirt.io/kubevirt/pkg/kubecli.(*vmis).asyncSubresourceHelper /root/go/src/kubevirt.io/kubevirt/pkg/kubecli/vmi.go:326 +0x33a goroutine 6049 [chan send, 33 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420f9a0f0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 6040 [chan send, 34 minutes]: kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420753c50) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:114 +0x114 created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 6407 [semacquire, 30 minutes]: sync.runtime_notifyListWait(0xc420129a80, 0xc400000011) /gimme/.gimme/versions/go1.10.linux.amd64/src/runtime/sema.go:510 +0x10b sync.(*Cond).Wait(0xc420129a70) /gimme/.gimme/versions/go1.10.linux.amd64/src/sync/cond.go:56 +0x80 kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.(*pipe).Read(0xc420129a68, 0xc420a68601, 0x5ff, 0x5ff, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/pipe.go:64 +0x8f kubevirt.io/kubevirt/vendor/golang.org/x/net/http2.transportResponseBody.Read(0xc420129a40, 0xc420a68601, 0x5ff, 0x5ff, 0x0, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/golang.org/x/net/http2/transport.go:1674 +0xa1 encoding/json.(*Decoder).refill(0xc4201d7a40, 0x7fba0a, 0x9) /gimme/.gimme/versions/go1.10.linux.amd64/src/encoding/json/stream.go:159 +0x132 encoding/json.(*Decoder).readValue(0xc4201d7a40, 0x0, 0x0, 0x11355c0) /gimme/.gimme/versions/go1.10.linux.amd64/src/encoding/json/stream.go:134 +0x23d encoding/json.(*Decoder).Decode(0xc4201d7a40, 0x114ef80, 0xc420e60f00, 0x13c74a0, 0xc42089c000) /gimme/.gimme/versions/go1.10.linux.amd64/src/encoding/json/stream.go:63 +0x78 kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/util/framer.(*jsonFrameReader).Read(0xc420f9ff50, 0xc420776000, 0x800, 0xa80, 0xc420b3ac40, 0x40, 0x38) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/util/framer/framer.go:150 +0x295 kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/runtime/serializer/streaming.(*decoder).Decode(0xc420920d20, 0x0, 0x13c9160, 0xc420b3ac40, 0x400, 0x13c74a0, 0xc4208b93d8, 0x456ae0, 0xc4208b9380) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/runtime/serializer/streaming/streaming.go:77 +0x95 kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest/watch.(*Decoder).Decode(0xc420cc55c0, 0xc420da9fa8, 0x8, 0x13c74a0, 0xc42089c000, 0x0, 0x0) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/client-go/rest/watch/decoder.go:49 +0x7c kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.(*StreamWatcher).receive(0xc420f9ff80) /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:93 +0x12e created by kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch.NewStreamWatcher /root/go/src/kubevirt.io/kubevirt/vendor/k8s.io/apimachinery/pkg/watch/streamwatcher.go:60 +0xa8 goroutine 6408 [chan receive, 30 minutes]: kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).Watch.func3(0x13c96e0, 0xc420f9ff80, 0xc420809f30, 0xc4208b93e0) /root/go/src/kubevirt.io/kubevirt/tests/utils.go:276 +0x93 created by kubevirt.io/kubevirt/tests.(*ObjectEventWatcher).Watch /root/go/src/kubevirt.io/kubevirt/tests/utils.go:274 +0x4ae make: *** [functest] Error 2 + make cluster-down ./cluster/down.sh