Report for TAP-Formatter-Bamboo-0.04

Back
From: metabase:user:314402c4-2aae-11df-837a-5e0a49663a4f
Subject: FAIL TAP-Formatter-Bamboo-0.04 v5.22.2 GNU/Linux
Date: 2016-05-08T16:19:19Z

This distribution has been tested as part of the CPAN Testers
project, supporting the Perl programming language.  See
http://wiki.cpantesters.org/ for more information or email
questions to cpan-testers-discuss@perl.org


--
Dear Bartlomiej Fulanty,

This is a computer-generated report for TAP-Formatter-Bamboo-0.04
on perl 5.22.2, created by CPAN-Reporter-1.2017.

Thank you for uploading your work to CPAN.  However, there was a problem
testing your distribution.

If you think this report is invalid, please consult the CPAN Testers Wiki
for suggestions on how to avoid getting FAIL reports for missing library
or binary dependencies, unsupported operating systems, and so on:

http://wiki.cpantesters.org/wiki/CPANAuthorNotes

Sections of this report:

    * Tester comments
    * Program output
    * Prerequisites
    * Environment and other context

------------------------------
TESTER COMMENTS
------------------------------

Additional comments from tester:

none provided

------------------------------
PROGRAM OUTPUT
------------------------------

Output from '/usr/bin/make test':

PERL_DL_NONLAZY=1 "/bbbike/perl-5.22.2/bin/perl5.22.2" "-MExtUtils::Command::MM" "-MTest::Harness" "-e" "undef *Test::Harness::Switches; test_harness(0, 'blib/lib', 'blib/arch')" t/*.t
t/00-load.t .... ok

#   Failed test 't/internal_tests/bad_chars.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/bad_chars.test" errors="0" failures="0" tests="3">
#     <system-out/>
#     <testcase name="t/internal_tests/bad_chars.test" time="TEST_TIME"/>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/bad_chars.results

#   Failed test 't/internal_tests/descriptive.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/descriptive.test" errors="0" failures="0" tests="5">
#     <system-out/>
#     <testcase name="t/internal_tests/descriptive.test" time="TEST_TIME"/>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/descriptive.results

#   Failed test 't/internal_tests/descriptive_trailing.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/descriptive_trailing.test" errors="0" failures="0" tests="5">
#     <system-out/>
#     <testcase name="t/internal_tests/descriptive_trailing.test" time="TEST_TIME"/>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/descriptive_trailing.results

#   Failed test 't/internal_tests/die.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/die.test" errors="1" failures="0" tests="0">
#     <system-out/>
#     <testcase name="t/internal_tests/die.test" time="TEST_TIME">
#       <failure><![CDATA[Fail reason(s):
#     parse errors (No plan found in TAP output)
# Test output:
# 
# ]]></failure>
#     </testcase>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/die.results

#   Failed test 't/internal_tests/die_head_end.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/die_head_end.test" errors="1" failures="0" tests="4">
#     <system-out/>
#     <testcase name="t/internal_tests/die_head_end.test" time="TEST_TIME">
#       <failure><![CDATA[Fail reason(s):
#     parse errors (No plan found in TAP output)
# Test output:
# ok 1
# ok 2
# ok 3
# ok 4
# 
# ]]></failure>
#     </testcase>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/die_head_end.results

#   Failed test 't/internal_tests/die_last_minute.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/die_last_minute.test" errors="0" failures="0" tests="4">
#     <system-out/>
#     <testcase name="t/internal_tests/die_last_minute.test" time="TEST_TIME">
#       <failure><![CDATA[Fail reason(s):
#     non-zero exit code (1)
# Test output:
# ok 1
# ok 2
# ok 3
# ok 4
# 1..4
# 
# ]]></failure>
#     </testcase>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/die_last_minute.results

#   Failed test 't/internal_tests/die_unfinished.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/die_unfinished.test" errors="1" failures="0" tests="3">
#     <system-out/>
#     <testcase name="t/internal_tests/die_unfinished.test" time="TEST_TIME">
#       <failure><![CDATA[Fail reason(s):
#     parse errors (Bad plan.  You planned 4 tests but ran 3.)
# Test output:
# 1..4
# ok 1
# ok 2
# ok 3
# 
# ]]></failure>
#     </testcase>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/die_unfinished.results

#   Failed test 't/internal_tests/empty.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/empty.test" errors="1" failures="0" tests="0">
#     <system-out/>
#     <testcase name="t/internal_tests/empty.test" time="TEST_TIME">
#       <failure><![CDATA[Fail reason(s):
#     parse errors (No plan found in TAP output)
# Test output:
# 
# ]]></failure>
#     </testcase>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/empty.results

#   Failed test 't/internal_tests/no_nums.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/no_nums.test" errors="0" failures="1" tests="5">
#     <system-out/>
#     <testcase name="t/internal_tests/no_nums.test" time="TEST_TIME">
#       <failure><![CDATA[Fail reason(s):
#     failed tests (3)
# Test output:
# 1..5
# ok
# ok
# not ok
# ok
# ok
# 
# ]]></failure>
#     </testcase>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/no_nums.results

#   Failed test 't/internal_tests/simple.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/simple.test" errors="0" failures="0" tests="5">
#     <system-out/>
#     <testcase name="t/internal_tests/simple.test" time="TEST_TIME"/>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/simple.results

#   Failed test 't/internal_tests/simple_fail.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/simple_fail.test" errors="0" failures="2" tests="5">
#     <system-out/>
#     <testcase name="t/internal_tests/simple_fail.test" time="TEST_TIME">
#       <failure><![CDATA[Fail reason(s):
#     failed tests (2, 5)
# Test output:
# 1..5
# ok 1
# not ok 2
# ok 3
# ok 4
# not ok 5
# 
# ]]></failure>
#     </testcase>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/simple_fail.results

#   Failed test 't/internal_tests/simple_yaml.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/simple_yaml.test" errors="0" failures="0" tests="5">
#     <system-out/>
#     <testcase name="t/internal_tests/simple_yaml.test" time="TEST_TIME"/>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/simple_yaml.results

#   Failed test 't/internal_tests/skip.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/skip.test" errors="0" failures="0" tests="5">
#     <system-out/>
#     <testcase name="t/internal_tests/skip.test" time="TEST_TIME"/>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/skip.results

#   Failed test 't/internal_tests/skip_nomsg.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/skip_nomsg.test" errors="0" failures="0" tests="1">
#     <system-out/>
#     <testcase name="t/internal_tests/skip_nomsg.test" time="TEST_TIME"/>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/skip_nomsg.results

#   Failed test 't/internal_tests/skipall.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/skipall.test" errors="0" failures="0" tests="0">
#     <system-out/>
#     <testcase name="t/internal_tests/skipall.test" time="TEST_TIME"/>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/skipall.results

#   Failed test 't/internal_tests/skipall_nomsg.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/skipall_nomsg.test" errors="0" failures="0" tests="0">
#     <system-out/>
#     <testcase name="t/internal_tests/skipall_nomsg.test" time="TEST_TIME"/>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/skipall_nomsg.results

#   Failed test 't/internal_tests/stdout_stderr.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/stdout_stderr.test" errors="0" failures="0" tests="4">
#     <system-out/>
#     <testcase name="t/internal_tests/stdout_stderr.test" time="TEST_TIME"/>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/stdout_stderr.results

#   Failed test 't/internal_tests/todo.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/todo.test" errors="0" failures="0" tests="5">
#     <system-out/>
#     <testcase name="t/internal_tests/todo.test" time="TEST_TIME">
#       <failure><![CDATA[Fail reason(s):
#     unexpected TODO passed (2)
# Test output:
# 1..5 todo 3 2;
# ok 1
# ok 2
# not ok 3
# ok 4
# ok 5
# 
# ]]></failure>
#     </testcase>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/todo.results

#   Failed test 't/internal_tests/todo_inline.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/todo_inline.test" errors="0" failures="0" tests="3">
#     <system-out/>
#     <testcase name="t/internal_tests/todo_inline.test" time="TEST_TIME">
#       <failure><![CDATA[Fail reason(s):
#     unexpected TODO passed (2)
# Test output:
# 1..3
# not ok 1 - Foo # TODO Just testing the todo interface.
# ok 2 - Unexpected success # TODO Just testing the todo interface.
# ok 3 - This is not todo
# 
# ]]></failure>
#     </testcase>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/todo_inline.results

#   Failed test 't/internal_tests/todo_misparse.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/todo_misparse.test" errors="0" failures="1" tests="1">
#     <system-out/>
#     <testcase name="t/internal_tests/todo_misparse.test" time="TEST_TIME">
#       <failure><![CDATA[Fail reason(s):
#     failed tests (1)
# Test output:
# 1..1
# not ok 1 Hamlette # TODOORNOTTODO
# 
# ]]></failure>
#     </testcase>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/todo_misparse.results

#   Failed test 't/internal_tests/too_many.test'
#   at t/20-single.t line 44.
# During compare:
# no element found at line 1, column 0, byte -1
# ---- Expected: ----
# <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
# <testsuites>
#   <testsuite name="t/internal_tests/too_many.test" errors="1" failures="4" tests="7">
#     <system-out/>
#     <testcase name="t/internal_tests/too_many.test" time="TEST_TIME">
#       <failure><![CDATA[Fail reason(s):
#     failed tests (4, 5, 6, 7)
#     parse errors (Bad plan.  You planned 3 tests but ran 7.)
# Test output:
# 1..3
# ok 1
# ok 2
# ok 3
# ok 4
# ok 5
# ok 6
# ok 7
# 
# ]]></failure>
#     </testcase>
#   </testsuite>
# </testsuites>
# Actual results saved to file t/internal_tests/too_many.results
# Looks like you failed 21 tests of 21.
t/20-single.t .. 
Dubious, test returned 21 (wstat 5376, 0x1500)
Failed 21/21 subtests 

Test Summary Report
-------------------
t/20-single.t (Wstat: 5376 Tests: 21 Failed: 21)
  Failed tests:  1-21
  Non-zero exit status: 21
Files=2, Tests=22,  3 wallclock secs ( 0.03 usr  0.02 sys +  1.45 cusr  0.38 csys =  1.88 CPU)
Result: FAIL
Failed 1/2 test programs. 21/22 subtests failed.
Makefile:859: recipe for target 'test_dynamic' failed
make: *** [test_dynamic] Error 21

------------------------------
PREREQUISITES
------------------------------

Prerequisite modules loaded:

requires:

    Module              Need Have    
    ------------------- ---- --------
    Encode              2.42 2.84    
    Moose               2    2.1801  
    MooseX::NonMoose    0.20 0.26    
    Test::Harness       3.2  3.35    
    XML::LibXML         2    2.0124  

build_requires:

    Module              Need Have    
    ------------------- ---- --------
    ExtUtils::MakeMaker 0    7.16    
    File::Slurp         0    9999.19 
    File::Temp          0    0.2304  
    Test::Harness       0    3.35    
    Test::More          0    1.001014
    Test::XML           0    0.08    

configure_requires:

    Module              Need Have    
    ------------------- ---- --------
    ExtUtils::MakeMaker 6.64 7.16    


------------------------------
ENVIRONMENT AND OTHER CONTEXT
------------------------------

Environment variables:

    LANG = C
    LC_ALL = de_DE.UTF-8
    PATH = /usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin:/sbin:/home/cpansand/bin/linux-gnu:/home/cpansand/bin/sh:/home/cpansand/bin:/usr/games:/home/cpansand/devel:/home/eserte/src/srezic-misc/scripts
    PERL5LIB = /home/cpansand/.cpan/build/2016050809/Linux-Prctl-1.6.0-QvhvSs/blib/arch:/home/cpansand/.cpan/build/2016050809/Linux-Prctl-1.6.0-QvhvSs/blib/lib
    PERL5OPT = 
    PERL5_CPANPLUS_IS_RUNNING = 18153
    PERL5_CPAN_IS_RUNNING = 18153
    PERL5_CPAN_IS_RUNNING_IN_RECURSION = 15012,18153
    PERLDOC = -MPod::Perldoc::ToTextOverstrike
    PERL_BATCH = yes
    PERL_CANARY_STABILITY_NOPROMPT = 1
    PERL_CPAN_REPORTER_CONFIG = /run/user/1023/cpansmoker/2016050806/cpanreporter_004_config.ini
    PERL_EXTUTILS_AUTOINSTALL = --defaultdeps
    PERL_PARALLEL_SMOKER = yes
    SHELL = /bin/zsh
    TERM = screen
    TMPDIR = /run/user/1023/cpansmoker/2016050806

Perl special variables (and OS-specific diagnostics, for MSWin32):

    $^X = /bbbike/perl-5.22.2/bin/perl5.22.2
    $UID/$EUID = 1023 / 1023
    $GID = 1023 1023
    $EGID = 1023 1023

Perl module toolchain versions installed:

    Module              Have    
    ------------------- --------
    CPAN                2.11    
    CPAN::Meta          2.150001
    Cwd                 3.62    
    ExtUtils::CBuilder  0.280221
    ExtUtils::Command   7.16    
    ExtUtils::Install   2.04    
    ExtUtils::MakeMaker 7.16    
    ExtUtils::Manifest  1.70    
    ExtUtils::ParseXS   3.30    
    File::Spec          3.62    
    JSON                2.90    
    JSON::PP            2.27400 
    Module::Build       0.4218  
    Module::Signature   0.79    
    Parse::CPAN::Meta   1.4414  
    Test::Harness       3.35    
    Test::More          1.001014
    YAML                1.15    
    YAML::Syck          1.29    
    version             0.9916  


--

Summary of my perl5 (revision 5 version 22 subversion 2) configuration:
   
  Platform:
    osname=linux, osvers=3.16.0-4-amd64, archname=x86_64-linux
    uname='linux eserte 3.16.0-4-amd64 #1 smp debian 3.16.7-ckt25-2 (2016-04-08) x86_64 gnulinux '
    config_args='-ds -e -Dprefix=/opt/perl-5.22.2 -Dcf_email=srezic@cpan.org'
    hint=recommended, useposix=true, d_sigaction=define
    useithreads=undef, usemultiplicity=undef
    use64bitint=define, use64bitall=define, uselongdouble=undef
    usemymalloc=n, bincompat5005=undef
  Compiler:
    cc='cc', ccflags ='-fwrapv -fno-strict-aliasing -pipe -fstack-protector-strong -I/usr/local/include -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -D_FORTIFY_SOURCE=2',
    optimize='-O2',
    cppflags='-fwrapv -fno-strict-aliasing -pipe -fstack-protector-strong -I/usr/local/include'
    ccversion='', gccversion='4.9.2', gccosandvers=''
    intsize=4, longsize=8, ptrsize=8, doublesize=8, byteorder=12345678, doublekind=3
    d_longlong=define, longlongsize=8, d_longdbl=define, longdblsize=16, longdblkind=3
    ivtype='long', ivsize=8, nvtype='double', nvsize=8, Off_t='off_t', lseeksize=8
    alignbytes=8, prototype=define
  Linker and Libraries:
    ld='cc', ldflags =' -fstack-protector-strong -L/usr/local/lib'
    libpth=/usr/local/lib /usr/lib/gcc/x86_64-linux-gnu/4.9/include-fixed /usr/include/x86_64-linux-gnu /usr/lib /lib/x86_64-linux-gnu /lib/../lib /usr/lib/x86_64-linux-gnu /usr/lib/../lib /lib
    libs=-lpthread -lnsl -ldl -lm -lcrypt -lutil -lc
    perllibs=-lpthread -lnsl -ldl -lm -lcrypt -lutil -lc
    libc=libc-2.19.so, so=so, useshrplib=false, libperl=libperl.a
    gnulibc_version='2.19'
  Dynamic Linking:
    dlsrc=dl_dlopen.xs, dlext=so, d_dlsymun=undef, ccdlflags='-Wl,-E'
    cccdlflags='-fPIC', lddlflags='-shared -O2 -L/usr/local/lib -fstack-protector-strong'


Characteristics of this binary (from libperl): 
  Compile-time options: HAS_TIMES PERLIO_LAYERS PERL_DONT_CREATE_GVSV
                        PERL_HASH_FUNC_ONE_AT_A_TIME_HARD PERL_MALLOC_WRAP
                        PERL_NEW_COPY_ON_WRITE PERL_PRESERVE_IVUV
                        USE_64_BIT_ALL USE_64_BIT_INT USE_LARGE_FILES
                        USE_LOCALE USE_LOCALE_COLLATE USE_LOCALE_CTYPE
                        USE_LOCALE_NUMERIC USE_LOCALE_TIME USE_PERLIO
                        USE_PERL_ATOF
  Built under linux
  Compiled at May  6 2016 22:58:24
  %ENV:
    PERL5LIB="/home/cpansand/.cpan/build/2016050809/Linux-Prctl-1.6.0-QvhvSs/blib/arch:/home/cpansand/.cpan/build/2016050809/Linux-Prctl-1.6.0-QvhvSs/blib/lib"
    PERL5OPT=""
    PERL5_CPANPLUS_IS_RUNNING="18153"
    PERL5_CPAN_IS_RUNNING="18153"
    PERL5_CPAN_IS_RUNNING_IN_RECURSION="15012,18153"
    PERLDOC="-MPod::Perldoc::ToTextOverstrike"
    PERL_BATCH="yes"
    PERL_CANARY_STABILITY_NOPROMPT="1"
    PERL_CPAN_REPORTER_CONFIG="/run/user/1023/cpansmoker/2016050806/cpanreporter_004_config.ini"
    PERL_EXTUTILS_AUTOINSTALL="--defaultdeps"
    PERL_PARALLEL_SMOKER="yes"
  @INC:
    /home/cpansand/.cpan/build/2016050809/Linux-Prctl-1.6.0-QvhvSs/blib/arch
    /home/cpansand/.cpan/build/2016050809/Linux-Prctl-1.6.0-QvhvSs/blib/lib
    /opt/perl-5.22.2/lib/site_perl/5.22.2/x86_64-linux
    /opt/perl-5.22.2/lib/site_perl/5.22.2
    /opt/perl-5.22.2/lib/5.22.2/x86_64-linux
    /opt/perl-5.22.2/lib/5.22.2
    .