You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
jenkinsfile_play/testresults.xml

383 lines
16 KiB

This file contains invisible Unicode characters!

This file contains invisible Unicode characters that may be processed differently from what appears below. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to reveal hidden characters.

<?xml version="1.0" encoding="UTF-8"?>
<testsuites
name="Vargrant-QA"
tests="32"
failures="3"
>
<testsuite
name="log/vcp6-tests/"
tests="32"
failures="3"
skipped="10"
>
<testcase
name="00-noop.sh"
classname="log/vcp6-tests/.00-noop.sh"
>
<testcase
name="01-setup-repo-prod.sh"
classname="log/vcp6-tests/.01-setup-repo-prod.sh"
>
<testcase
name="01-setup-repo-stage.sh"
classname="log/vcp6-tests/.01-setup-repo-stage.sh"
>
<testcase
name="02-install-vcp-debian.sh"
classname="log/vcp6-tests/.02-install-vcp-debian.sh"
>
<testcase
name="02-install-vcp-redhat.sh"
classname="log/vcp6-tests/.02-install-vcp-redhat.sh"
<skipped/>
>
<testcase
name="03-install-vmods-debian.sh"
classname="log/vcp6-tests/.03-install-vmods-debian.sh"
>
<testcase
name="03-install-vmods-redhat.sh"
classname="log/vcp6-tests/.03-install-vmods-redhat.sh"
<skipped/>
>
<testcase
name="04-install-vcs-debian.sh"
classname="log/vcp6-tests/.04-install-vcs-debian.sh"
>
<testcase
name="04-install-vcs-redhat.sh"
classname="log/vcp6-tests/.04-install-vcs-redhat.sh"
<skipped/>
>
<testcase
name="05-install-varnish-agent-debian.sh"
classname="log/vcp6-tests/.05-install-varnish-agent-debian.sh"
>
<testcase
name="05-install-varnish-agent-redhat.sh"
classname="log/vcp6-tests/.05-install-varnish-agent-redhat.sh"
<skipped/>
>
<testcase
name="07-install-vac-debian.sh"
classname="log/vcp6-tests/.07-install-vac-debian.sh"
>
<testcase
name="07-install-vac-redhat.sh"
classname="log/vcp6-tests/.07-install-vac-redhat.sh"
<skipped/>
>
<testcase
name="10-systemctl-vcp.sh"
classname="log/vcp6-tests/.10-systemctl-vcp.sh"
>
<testcase
name="11-systemctl-vcs.sh"
classname="log/vcp6-tests/.11-systemctl-vcs.sh"
>
<testcase
name="12-systemctl-varnish-agent.sh"
classname="log/vcp6-tests/.12-systemctl-varnish-agent.sh"
>
<testcase
name="14-systemtcl-vac.sh"
classname="log/vcp6-tests/.14-systemtcl-vac.sh"
>
<testcase
name="14-sysv-vac.sh"
classname="log/vcp6-tests/.14-sysv-vac.sh"
<skipped/>
>
<testcase
name="15-systemctl-varnish-vagent-vac.sh"
classname="log/vcp6-tests/.15-systemctl-varnish-vagent-vac.sh"
>
<testcase
name="15-sysv-varnish-agent-vac.sh"
classname="log/vcp6-tests/.15-sysv-varnish-agent-vac.sh"
<skipped/>
>
<testcase
name="23-varnishtest-all-versions.sh"
classname="log/vcp6-tests/.23-varnishtest-all-versions.sh"
>
<testcase
name="24-install-vha-debian.sh"
classname="log/vcp6-tests/.24-install-vha-debian.sh"
>
<testcase
name="24-install-vha-redhat.sh"
classname="log/vcp6-tests/.24-install-vha-redhat.sh"
<skipped/>
>
<testcase
name="25-varnishtest-vcp-specific.sh"
classname="log/vcp6-tests/.25-varnishtest-vcp-specific.sh"
<failure
type="failed"
message="failed" >
**** top 0.0 extmacro def pwd=/vagrant
**** top 0.0 extmacro def localhost=127.0.0.1
**** top 0.0 extmacro def bad_backend=127.0.0.1 40529
**** top 0.0 extmacro def bad_ip=192.0.2.255
**** top 0.0 macro def testdir=//vagrant/vcp6-tests/vtc
**** top 0.0 macro def tmpdir=/tmp/vtc.15127.2fc17a50
* top 0.0 TEST /vagrant/vcp6-tests/vtc/01-all-vmods.vtc starting
** top 0.0 === varnishtest Verify that Varnish starts with all VMODs impor...
* top 0.0 TEST Verify that Varnish starts with all VMODs imported
** top 0.0 === server s1 {
** s1 0.0 Starting server
**** s1 0.0 macro def s1_addr=127.0.0.1
**** s1 0.0 macro def s1_port=33331
**** s1 0.0 macro def s1_sock=127.0.0.1 33331
* s1 0.0 Listen on 127.0.0.1 33331
** top 0.0 === varnish v1 -vcl+backend {
** s1 0.0 Started on 127.0.0.1 33331
** v1 0.0 Launch
*** v1 0.0 CMD: cd ${pwd} ANDAND exec varnishd -d -n /tmp/vtc.15127.2fc17a50/v1 -l 2m -p auto_restart=off -p syslog_cli_traffic=off -p sigsegv_handler=on -p thread_pool_min=10 -p debug=+vtc_mode -a '127.0.0.1:0' -M '127.0.0.1 40091' -P /tmp/vtc.15127.2fc17a50/v1/varnishd.pid
*** v1 0.0 CMD: cd /vagrant ANDAND exec varnishd -d -n /tmp/vtc.15127.2fc17a50/v1 -l 2m -p auto_restart=off -p syslog_cli_traffic=off -p sigsegv_handler=on -p thread_pool_min=10 -p debug=+vtc_mode -a '127.0.0.1:0' -M '127.0.0.1 40091' -P /tmp/vtc.15127.2fc17a50/v1/varnishd.pid
*** v1 0.0 PID: 15133
**** v1 0.0 macro def v1_pid=15133
**** v1 0.0 macro def v1_name=/tmp/vtc.15127.2fc17a50/v1
*** v1 0.0 debug|Debug: Version: varnish-plus-6.0.1r5 revision 87748e9eb3054a0395e8d0df6e06306b5782a089
*** v1 0.0 debug|Debug: Platform: Linux,4.15.0-20-generic,x86_64,-junix,-sdefault,-sdefault,-hcritbit
*** v1 0.0 debug|200 325
*** v1 0.0 debug|-----------------------------
*** v1 0.0 debug|Varnish Cache CLI 1.0
*** v1 0.0 debug|-----------------------------
*** v1 0.0 debug|Linux,4.15.0-20-generic,x86_64,-junix,-sdefault,-sdefault,-hcritbit
*** v1 0.0 debug|varnish-plus-6.0.1r5 revision 87748e9eb3054a0395e8d0df6e06306b5782a089
*** v1 0.0 debug|
*** v1 0.0 debug|Type 'help' for command list.
*** v1 0.0 debug|Type 'quit' to close CLI session.
*** v1 0.0 debug|Type 'start' to launch worker process.
*** v1 0.0 debug|
**** v1 0.1 CLIPOLL 1 0x1 0x0
*** v1 0.1 CLI connection fd = 7
*** v1 0.1 CLI RX 107
**** v1 0.1 CLI RX|axiesfjrwtfjfsnumyjmltdlnnojqhbv
**** v1 0.1 CLI RX|
**** v1 0.1 CLI RX|Authentication required.
**** v1 0.1 CLI TX|auth 6a93a66bfbbce3eb8e643447d69d8921360e7bc735eb12e4851d9128ae0429c5
*** v1 0.1 CLI RX 200
**** v1 0.1 CLI RX|-----------------------------
**** v1 0.1 CLI RX|Varnish Cache CLI 1.0
**** v1 0.1 CLI RX|-----------------------------
**** v1 0.1 CLI RX|Linux,4.15.0-20-generic,x86_64,-junix,-sdefault,-sdefault,-hcritbit
**** v1 0.1 CLI RX|varnish-plus-6.0.1r5 revision 87748e9eb3054a0395e8d0df6e06306b5782a089
**** v1 0.1 CLI RX|
**** v1 0.1 CLI RX|Type 'help' for command list.
**** v1 0.1 CLI RX|Type 'quit' to close CLI session.
**** v1 0.1 CLI RX|Type 'start' to launch worker process.
**** v1 0.1 CLI TX|vcl.inline vcl1 LESSLESS %XJEIFLH|)Xspa8P
**** v1 0.1 CLI TX|vcl 4.1;
**** v1 0.1 CLI TX|backend s1 { .host = 127.0.0.1; .port = 33331; }
**** v1 0.1 CLI TX|
**** v1 0.1 CLI TX|
**** v1 0.1 CLI TX|\t# This list needs to be updated whenever new VMODs are added
**** v1 0.1 CLI TX|\t#import acl;
**** v1 0.1 CLI TX|\timport aclplus;
**** v1 0.1 CLI TX|\timport akamai;
**** v1 0.1 CLI TX|\timport deviceatlas;
**** v1 0.1 CLI TX|\timport bodyaccess;
**** v1 0.1 CLI TX|\timport cookieplus;
**** v1 0.1 CLI TX|\timport cookie;
**** v1 0.1 CLI TX|\timport curl;
**** v1 0.1 CLI TX|\timport digest;
**** v1 0.1 CLI TX|\timport directors;
**** v1 0.1 CLI TX|\timport edgestash;
**** v1 0.1 CLI TX|\timport geoip;
**** v1 0.1 CLI TX|\timport goto;
**** v1 0.1 CLI TX|\timport header;
**** v1 0.1 CLI TX|\timport http;
**** v1 0.1 CLI TX|\timport json;
**** v1 0.1 CLI TX|\timport kvstore;
**** v1 0.1 CLI TX|\timport leastconn;
**** v1 0.1 CLI TX|\timport memcached;
**** v1 0.1 CLI TX|\t#import named;
**** v1 0.1 CLI TX|\timport paywall;
**** v1 0.1 CLI TX|\timport rewrite;
**** v1 0.1 CLI TX|\timport rtstatus;
**** v1 0.1 CLI TX|\timport saintmode;
**** v1 0.1 CLI TX|\timport session;
**** v1 0.1 CLI TX|\t#import softpurge;
**** v1 0.1 CLI TX|\t#import sqlite3;
**** v1 0.1 CLI TX|\timport std;
**** v1 0.1 CLI TX|\timport tcp;
**** v1 0.1 CLI TX|\timport var;
**** v1 0.1 CLI TX|\timport vha;
**** v1 0.1 CLI TX|\timport vsthrottle;
**** v1 0.1 CLI TX|\timport xkey;
**** v1 0.1 CLI TX|
**** v1 0.1 CLI TX|%XJEIFLH|)Xspa8P
*** v1 0.1 CLI RX 106
**** v1 0.1 CLI RX|Message from VCC-compiler:
**** v1 0.1 CLI RX|Incompatible VMOD akamai
**** v1 0.1 CLI RX|\tFile name: /usr/lib/varnish-plus/vmods/libvmod_akamai.so
**** v1 0.1 CLI RX|\tABI mismatch, expected LESSVarnish Plus 6.0.1r5 87748e9eb3054a0395e8d0df6e06306b5782a089MORE, got LESSVarnish Plus 6.0.1r1 3baba64a96b9b339ba1839ee4bcaa9ee9bfbacacMORE
**** v1 0.1 CLI RX|('LESSvcl.inlineMORE' Line 8 Pos 16)
**** v1 0.1 CLI RX| import akamai;
**** v1 0.1 CLI RX|---------------######-
**** v1 0.1 CLI RX|
**** v1 0.1 CLI RX|Running VCC-compiler failed, exited with 2
**** v1 0.1 CLI RX|VCL compilation failed
---- v1 0.1 FAIL VCL does not compile
* top 0.1 RESETTING after /vagrant/vcp6-tests/vtc/01-all-vmods.vtc
** s1 0.1 Waiting for server (4/-1)
** v1 0.1 Wait
**** v1 0.1 CLI TX|panic.clear
*** v1 0.2 CLI RX 300
**** v1 0.2 CLI RX|No panic to clear
*** v1 0.2 debug|Info: manager stopping child
*** v1 0.2 debug|Info: manager dies
**** v1 0.2 STDOUT poll 0x10
*** v1 0.2 vsl|No VSL chunk found (child not started ?)
** v1 0.2 WAIT4 pid=15133 status=0x0000 (user 0.003926 sys 0.022808)
* top 0.2 TEST /vagrant/vcp6-tests/vtc/01-all-vmods.vtc FAILED
# top TEST /vagrant/vcp6-tests/vtc/01-all-vmods.vtc FAILED (0.214) exit=2
</failure>
>
<testcase
name="26-systemctl-vha.sh"
classname="log/vcp6-tests/.26-systemctl-vha.sh"
>
<testcase
name="27-install-vdisco-debian.sh"
classname="log/vcp6-tests/.27-install-vdisco-debian.sh"
>
<testcase
name="27-install-vdisco-redhat.sh"
classname="log/vcp6-tests/.27-install-vdisco-redhat.sh"
<skipped/>
>
<testcase
name="28-systemctl-vdisco.sh"
classname="log/vcp6-tests/.28-systemctl-vdisco.sh"
<failure
type="failed"
message="failed" >
INFO: systemctl found, running systemd test (ubuntu bionic)
Generating new nodefile /etc/varnish/nodes.conf (2019-01-09 15:00:50.62660743 +0000 UTC m=+0.002414597)
# /lib/systemd/system/vha-agent.service
[Unit]
Description=Varnish High Availability Agent.
After=varnish.service
Requisite=varnish.service
[Service]
ExecStart=/usr/bin/vha-agent -D -t off -P /run/vha-agent/vha-agent.pid -N /etc/varnish/nodes.conf -s /run/vha-agent/vha-status -T TOKEN
ExecReload=/bin/kill -HUP $MAINPID
PIDFile=/run/vha-agent/vha-agent.pid
User=varnish
Group=varnish
RuntimeDirectory=vha-agent
RuntimeDirectoryPreserve=yes
Type=forking
[Install]
WantedBy=multi-user.target
# /etc/systemd/system/vha-agent.service.d/exec.conf
[Service]
ExecStart=
ExecStart=/usr/bin/vha-agent -D -t off -P /run/vha-agent/vha-agent.pid -N /etc/varnish/nodes.conf -s /run/vha-agent/vha-status -T qa-test -p stat_intvl=1
PONG 1547046050 1.0
Unit varnish-discovery.service could not be found.
Failed to restart varnish-discovery.service: Unit varnish-discovery.service not found.
</failure>
>
<testcase
name="29-systemctl-varnishncsa.sh"
classname="log/vcp6-tests/.29-systemctl-varnishncsa.sh"
>
<testcase
name="30-install-broadcaster-debian.sh"
classname="log/vcp6-tests/.30-install-broadcaster-debian.sh"
<failure
type="failed"
message="failed" >
INFO: Running on ubuntu
Ign:1 https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 InRelease
Hit:2 https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 Release
Hit:3 http://security.ubuntu.com/ubuntu bionic-security InRelease
Hit:5 http://us.archive.ubuntu.com/ubuntu bionic InRelease
Hit:6 http://us.archive.ubuntu.com/ubuntu bionic-updates InRelease
Hit:7 http://us.archive.ubuntu.com/ubuntu bionic-backports InRelease
Hit:8 https://packagecloud.io/varnishplus/60-staging/ubuntu bionic InRelease
Hit:9 https://packagecloud.io/varnishplus/60-akamai-staging/ubuntu bionic InRelease
Hit:10 https://packagecloud.io/varnishplus/60-deviceatlas-staging/ubuntu bionic InRelease
Hit:11 https://packagecloud.io/varnishplus/60/ubuntu bionic InRelease
Hit:12 https://packagecloud.io/varnishplus/vagrant-qa-backup/ubuntu bionic InRelease
Reading package lists...
resource-agents - Cluster Resource Agents
libvarnishapi-dev - development files for Varnish
libvarnishapi1 - shared libraries for Varnish
nagios-plugins-contrib - Plugins for nagios compatible monitoring systems
prometheus-varnish-exporter - Prometheus exporter for Varnish
varnish - state of the art, high-performance web accelerator
varnish-doc - documentation for Varnish Cache
varnish-modules - Varnish module collection
varnish-agent - Varnish Agent is a small daemon meant to communicate with
varnish-broadcaster - The Varnish Broadcaster broadcasts requests to multiple Varnish caches
varnish-custom-statistics - Varnish Custom Statistics aggregating daemon.
varnish-custom-statistics-agent - Varnish Custom Statistics agent.
varnish-plus - A supercharged version of the popular web cache, Varnish Cache
varnish-plus-dev - development files for Varnish Cache
varnish-plus-ha - Simple replication of object insertions in a Varnish Cache Plus server.
varnish-plus-vmods-extra - Extra package of Varnish Modules (VMODs) for Varnish Cache Plus.
varnish-plus-akamai-connector - Varnish Akamai Connector
libvmod-deviceatlas - DeviceAtlas VMOD for Varnish
vac - Varnish Administration Console (VAC) is a management console for clusters of Varnish web caches.
varnish-plus-addon-ssl - Hitch TLS proxy
varnish-plus-discovery - Creates and updates nodes.conf files, used notably by vha-agent
Reading package lists...
Building dependency tree...
Reading state information...
The following NEW packages will be installed:
varnish-broadcaster
0 upgraded, 1 newly installed, 0 to remove and 222 not upgraded.
Need to get 2,717 kB of archives.
After this operation, 7,883 kB of additional disk space will be used.
Get:1 https://packagecloud.io/varnishplus/60-staging/ubuntu bionic/main amd64 varnish-broadcaster amd64 1.2.0-22~bionic [2,717 kB]
Fetched 2,717 kB in 3s (851 kB/s)
Selecting previously unselected package varnish-broadcaster.
(Reading database ...
(Reading database ... 5%
(Reading database ... 10%
(Reading database ... 15%
(Reading database ... 20%
(Reading database ... 25%
(Reading database ... 30%
(Reading database ... 35%
(Reading database ... 40%
(Reading database ... 45%
(Reading database ... 50%
(Reading database ... 55%
(Reading database ... 60%
(Reading database ... 65%
(Reading database ... 70%
(Reading database ... 75%
(Reading database ... 80%
(Reading database ... 85%
(Reading database ... 90%
(Reading database ... 95%
(Reading database ... 100%
(Reading database ... 106549 files and directories currently installed.)
Preparing to unpack .../varnish-broadcaster_1.2.0-22~bionic_amd64.deb ...
Unpacking varnish-broadcaster (1.2.0-22~bionic) ...
Processing triggers for ureadahead (0.100.0-20) ...
Setting up varnish-broadcaster (1.2.0-22~bionic) ...
Adding system user `vbcast' (UID 113) ...
Adding new group `vbcast' (GID 117) ...
Adding new user `vbcast' (UID 113) with group `vbcast' ...
Not creating home directory `/'.
Created symlink /etc/systemd/system/multi-user.target.wants/broadcaster.service → /lib/systemd/system/broadcaster.service.
A dependency job for broadcaster.service failed. See 'journalctl -xe' for details.
invoke-rc.d: initscript broadcaster, action start failed.
● broadcaster.service - Varnish Broadcaster
Loaded: loaded (/lib/systemd/system/broadcaster.service; enabled; vendor preset: enabled)
Active: inactive (dead)
Jan 09 15:01:15 ubuntu-18 systemd[1]: Dependency failed for Varnish Broadcaster.
Jan 09 15:01:15 ubuntu-18 systemd[1]: broadcaster.service: Job broadcaster.service/start failed with result 'dependency'.
dpkg: error processing package varnish-broadcaster (--configure):
installed varnish-broadcaster package post-installation script subprocess returned error exit status 1
Processing triggers for systemd (237-3ubuntu10) ...
Processing triggers for ureadahead (0.100.0-20) ...
Errors were encountered while processing:
varnish-broadcaster
E: Sub-process /usr/bin/dpkg returned an error code (1)
</failure>
>
<testcase
name="30-install-broadcaster-redhat.sh"
classname="log/vcp6-tests/.30-install-broadcaster-redhat.sh"
<skipped/>
>
<testcase
name="31-systemctl-broadcaster.sh"
classname="log/vcp6-tests/.31-systemctl-broadcaster.sh"
>
</testsuite>
</testsuites>