common.sh revision ed0258a4
1# Copyright (c) 2019 Cisco and/or its affiliates.
2# Copyright (c) 2019 PANTHEON.tech and/or its affiliates.
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at:
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15set -exuo pipefail
16
17# This library defines functions used by multiple entry scripts.
18# Keep functions ordered alphabetically, please.
19
20# TODO: Add a link to bash style guide.
21# TODO: Consider putting every die into a {} block,
22#   the code might become more readable (but longer).
23
24
25function activate_docker_topology () {
26
27    # Create virtual vpp-device topology. Output of the function is topology
28    # file describing created environment saved to a file.
29    #
30    # Variables read:
31    # - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
32    # - TOPOLOGIES - Available topologies.
33    # - NODENESS - Node multiplicity of desired testbed.
34    # - FLAVOR - Node flavor string, usually describing the processor.
35    # - IMAGE_VER_FILE - Name of file that contains the image version.
36    # - CSIT_DIR - Directory where ${IMAGE_VER_FILE} is located.
37    # Variables set:
38    # - WORKING_TOPOLOGY - Path to topology file.
39
40    set -exuo pipefail
41
42    source "${BASH_FUNCTION_DIR}/device.sh" || {
43        die "Source failed!"
44    }
45
46    device_image="$(< ${CSIT_DIR}/${IMAGE_VER_FILE})"
47    case_text="${NODENESS}_${FLAVOR}"
48    case "${case_text}" in
49        "1n_skx" | "1n_tx2")
50            # We execute reservation over csit-shim-dcr (ssh) which runs sourced
51            # script's functions. Env variables are read from ssh output
52            # back to localhost for further processing.
53            hostname=$(grep search /etc/resolv.conf | cut -d' ' -f3) || die
54            ssh="ssh root@${hostname} -p 6022"
55            run="activate_wrapper ${NODENESS} ${FLAVOR} ${device_image}"
56            # backtics to avoid https://midnight-commander.org/ticket/2142
57            env_vars=`${ssh} "$(declare -f); ${run}"` || {
58                die "Topology reservation via shim-dcr failed!"
59            }
60            set -a
61            source <(echo "$env_vars" | grep -v /usr/bin/docker) || {
62                die "Source failed!"
63            }
64            set +a
65            ;;
66        "1n_vbox")
67            # We execute reservation on localhost. Sourced script automatially
68            # sets environment variables for further processing.
69            activate_wrapper "${NODENESS}" "${FLAVOR}" "${device_image}" || die
70            ;;
71        *)
72            die "Unknown specification: ${case_text}!"
73    esac
74
75    trap 'deactivate_docker_topology' EXIT || {
76         die "Trap attempt failed, please cleanup manually. Aborting!"
77    }
78
79    # Replace all variables in template with those in environment.
80    source <(echo 'cat <<EOF >topo.yml'; cat ${TOPOLOGIES[0]}; echo EOF;) || {
81        die "Topology file create failed!"
82    }
83
84    WORKING_TOPOLOGY="/tmp/topology.yaml"
85    mv topo.yml "${WORKING_TOPOLOGY}" || {
86        die "Topology move failed!"
87    }
88    cat ${WORKING_TOPOLOGY} | grep -v password || {
89        die "Topology read failed!"
90    }
91}
92
93
94function activate_virtualenv () {
95
96    # Update virtualenv pip package, delete and create virtualenv directory,
97    # activate the virtualenv, install requirements, set PYTHONPATH.
98
99    # Arguments:
100    # - ${1} - Path to existing directory for creating virtualenv in.
101    #          If missing or empty, ${CSIT_DIR} is used.
102    # - ${2} - Path to requirements file, ${CSIT_DIR}/requirements.txt if empty.
103    # Variables read:
104    # - CSIT_DIR - Path to existing root of local CSIT git repository.
105    # Variables exported:
106    # - PYTHONPATH - CSIT_DIR, as CSIT Python scripts usually need this.
107    # Functions called:
108    # - die - Print to stderr and exit.
109
110    set -exuo pipefail
111
112    root_path="${1-$CSIT_DIR}"
113    env_dir="${root_path}/env"
114    req_path=${2-$CSIT_DIR/requirements.txt}
115    rm -rf "${env_dir}" || die "Failed to clean previous virtualenv."
116    pip3 install --upgrade virtualenv || {
117        die "Virtualenv package install failed."
118    }
119    virtualenv -p $(which python3) "${env_dir}" || {
120        die "Virtualenv creation for $(which python) failed."
121    }
122    set +u
123    source "${env_dir}/bin/activate" || die "Virtualenv activation failed."
124    set -u
125    pip3 install --upgrade -r "${req_path}" || {
126        die "Requirements installation failed."
127    }
128    # Most CSIT Python scripts assume PYTHONPATH is set and exported.
129    export PYTHONPATH="${CSIT_DIR}" || die "Export failed."
130}
131
132
133function archive_tests () {
134
135    # Create .tar.xz of generated/tests for archiving.
136    # To be run after generate_tests, kept separate to offer more flexibility.
137
138    # Directory read:
139    # - ${GENERATED_DIR}/tests - Tree of executed suites to archive.
140    # File rewriten:
141    # - ${ARCHIVE_DIR}/tests.tar.xz - Archive of generated tests.
142
143    set -exuo pipefail
144
145    tar c "${GENERATED_DIR}/tests" | xz -9e > "${ARCHIVE_DIR}/tests.tar.xz" || {
146        die "Error creating archive of generated tests."
147    }
148}
149
150
151function check_download_dir () {
152
153    # Fail if there are no files visible in ${DOWNLOAD_DIR}.
154    #
155    # Variables read:
156    # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
157    # Directories read:
158    # - ${DOWNLOAD_DIR} - Has to be non-empty to proceed.
159    # Functions called:
160    # - die - Print to stderr and exit.
161
162    set -exuo pipefail
163
164    if [[ ! "$(ls -A "${DOWNLOAD_DIR}")" ]]; then
165        die "No artifacts downloaded!"
166    fi
167}
168
169
170function check_prerequisites () {
171
172    # Fail if prerequisites are not met.
173    #
174    # Functions called:
175    # - installed - Check if application is installed/present in system.
176    # - die - Print to stderr and exit.
177
178    set -exuo pipefail
179
180    if ! installed sshpass; then
181        die "Please install sshpass before continue!"
182    fi
183}
184
185
186function common_dirs () {
187
188    # Set global variables, create some directories (without touching content).
189
190    # Variables set:
191    # - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
192    # - CSIT_DIR - Path to existing root of local CSIT git repository.
193    # - TOPOLOGIES_DIR - Path to existing directory with available tpologies.
194    # - RESOURCES_DIR - Path to existing CSIT subdirectory "resources".
195    # - TOOLS_DIR - Path to existing resources subdirectory "tools".
196    # - PYTHON_SCRIPTS_DIR - Path to existing tools subdirectory "scripts".
197    # - ARCHIVE_DIR - Path to created CSIT subdirectory "archive".
198    # - DOWNLOAD_DIR - Path to created CSIT subdirectory "download_dir".
199    # - GENERATED_DIR - Path to created CSIT subdirectory "generated".
200    # Directories created if not present:
201    # ARCHIVE_DIR, DOWNLOAD_DIR, GENERATED_DIR.
202    # Functions called:
203    # - die - Print to stderr and exit.
204
205    set -exuo pipefail
206
207    this_file=$(readlink -e "${BASH_SOURCE[0]}") || {
208        die "Some error during locating of this source file."
209    }
210    BASH_FUNCTION_DIR=$(dirname "${this_file}") || {
211        die "Some error during dirname call."
212    }
213    # Current working directory could be in a different repo, e.g. VPP.
214    pushd "${BASH_FUNCTION_DIR}" || die "Pushd failed"
215    relative_csit_dir=$(git rev-parse --show-toplevel) || {
216        die "Git rev-parse failed."
217    }
218    CSIT_DIR=$(readlink -e "${relative_csit_dir}") || die "Readlink failed."
219    popd || die "Popd failed."
220    TOPOLOGIES_DIR=$(readlink -e "${CSIT_DIR}/topologies/available") || {
221        die "Readlink failed."
222    }
223    RESOURCES_DIR=$(readlink -e "${CSIT_DIR}/resources") || {
224        die "Readlink failed."
225    }
226    TOOLS_DIR=$(readlink -e "${RESOURCES_DIR}/tools") || {
227        die "Readlink failed."
228    }
229    DOC_GEN_DIR=$(readlink -e "${TOOLS_DIR}/doc_gen") || {
230        die "Readlink failed."
231    }
232    PYTHON_SCRIPTS_DIR=$(readlink -e "${TOOLS_DIR}/scripts") || {
233        die "Readlink failed."
234    }
235
236    ARCHIVE_DIR=$(readlink -f "${CSIT_DIR}/archive") || {
237        die "Readlink failed."
238    }
239    mkdir -p "${ARCHIVE_DIR}" || die "Mkdir failed."
240    DOWNLOAD_DIR=$(readlink -f "${CSIT_DIR}/download_dir") || {
241        die "Readlink failed."
242    }
243    mkdir -p "${DOWNLOAD_DIR}" || die "Mkdir failed."
244    GENERATED_DIR=$(readlink -f "${CSIT_DIR}/generated") || {
245        die "Readlink failed."
246    }
247    mkdir -p "${GENERATED_DIR}" || die "Mkdir failed."
248}
249
250
251function compose_pybot_arguments () {
252
253    # Variables read:
254    # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
255    # - DUT - CSIT test/ subdirectory, set while processing tags.
256    # - TAGS - Array variable holding selected tag boolean expressions.
257    # - TOPOLOGIES_TAGS - Tag boolean expression filtering tests for topology.
258    # - TEST_CODE - The test selection string from environment or argument.
259    # Variables set:
260    # - PYBOT_ARGS - String holding part of all arguments for pybot.
261    # - EXPANDED_TAGS - Array of strings pybot arguments compiled from tags.
262
263    set -exuo pipefail
264
265    # No explicit check needed with "set -u".
266    PYBOT_ARGS=("--loglevel" "TRACE")
267    PYBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
268
269    case "${TEST_CODE}" in
270        *"device"*)
271            PYBOT_ARGS+=("--suite" "tests.${DUT}.device")
272            ;;
273        *"func"*)
274            PYBOT_ARGS+=("--suite" "tests.${DUT}.func")
275            ;;
276        *"perf"*)
277            PYBOT_ARGS+=("--suite" "tests.${DUT}.perf")
278            ;;
279        *)
280            die "Unknown specification: ${TEST_CODE}"
281    esac
282
283    EXPANDED_TAGS=()
284    for tag in "${TAGS[@]}"; do
285        if [[ ${tag} == "!"* ]]; then
286            EXPANDED_TAGS+=("--exclude" "${tag#$"!"}")
287        else
288            EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}AND${tag}")
289        fi
290    done
291}
292
293
294function copy_archives () {
295
296    # Create additional archive if workspace variable is set.
297    # This way if script is running in jenkins all will be
298    # automatically archived to logs.fd.io.
299    #
300    # Variables read:
301    # - WORKSPACE - Jenkins workspace, copy only if the value is not empty.
302    #   Can be unset, then it speeds up manual testing.
303    # - ARCHIVE_DIR - Path to directory with content to be copied.
304    # Directories updated:
305    # - ${WORKSPACE}/archives/ - Created if does not exist.
306    #   Content of ${ARCHIVE_DIR}/ is copied here.
307    # Functions called:
308    # - die - Print to stderr and exit.
309
310    set -exuo pipefail
311
312    if [[ -n "${WORKSPACE-}" ]]; then
313        mkdir -p "${WORKSPACE}/archives/" || die "Archives dir create failed."
314        cp -rf "${ARCHIVE_DIR}"/* "${WORKSPACE}/archives" || die "Copy failed."
315    fi
316}
317
318
319function deactivate_docker_topology () {
320
321    # Deactivate virtual vpp-device topology by removing containers.
322    #
323    # Variables read:
324    # - NODENESS - Node multiplicity of desired testbed.
325    # - FLAVOR - Node flavor string, usually describing the processor.
326
327    set -exuo pipefail
328
329    case_text="${NODENESS}_${FLAVOR}"
330    case "${case_text}" in
331        "1n_skx" | "1n_tx2")
332            hostname=$(grep search /etc/resolv.conf | cut -d' ' -f3) || die
333            ssh="ssh root@${hostname} -p 6022"
334            env_vars=$(env | grep CSIT_ | tr '\n' ' ' ) || die
335            ${ssh} "$(declare -f); deactivate_wrapper ${env_vars}" || {
336                die "Topology cleanup via shim-dcr failed!"
337            }
338            ;;
339        "1n_vbox")
340            enter_mutex || die
341            clean_environment || {
342                die "Topology cleanup locally failed!"
343            }
344            exit_mutex || die
345            ;;
346        *)
347            die "Unknown specification: ${case_text}!"
348    esac
349}
350
351
352function die () {
353
354    # Print the message to standard error end exit with error code specified
355    # by the second argument.
356    #
357    # Hardcoded values:
358    # - The default error message.
359    # Arguments:
360    # - ${1} - The whole error message, be sure to quote. Optional
361    # - ${2} - the code to exit with, default: 1.
362
363    set -x
364    set +eu
365    warn "${1:-Unspecified run-time error occurred!}"
366    exit "${2:-1}"
367}
368
369
370function die_on_pybot_error () {
371
372    # Source this fragment if you want to abort on any failed test case.
373    #
374    # Variables read:
375    # - PYBOT_EXIT_STATUS - Set by a pybot running fragment.
376    # Functions called:
377    # - die - Print to stderr and exit.
378
379    set -exuo pipefail
380
381    if [[ "${PYBOT_EXIT_STATUS}" != "0" ]]; then
382        die "Test failures are present!" "${PYBOT_EXIT_STATUS}"
383    fi
384}
385
386
387function generate_tests () {
388
389    # Populate ${GENERATED_DIR}/tests based on ${CSIT_DIR}/tests/.
390    # Any previously existing content of ${GENERATED_DIR}/tests is wiped before.
391    # The generation is done by executing any *.py executable
392    # within any subdirectory after copying.
393
394    # This is a separate function, because this code is called
395    # both by autogen checker and entries calling run_pybot.
396
397    # Directories read:
398    # - ${CSIT_DIR}/tests - Used as templates for the generated tests.
399    # Directories replaced:
400    # - ${GENERATED_DIR}/tests - Overwritten by the generated tests.
401
402    set -exuo pipefail
403
404    rm -rf "${GENERATED_DIR}/tests" || die
405    cp -r "${CSIT_DIR}/tests" "${GENERATED_DIR}/tests" || die
406    cmd_line=("find" "${GENERATED_DIR}/tests" "-type" "f")
407    cmd_line+=("-executable" "-name" "*.py")
408    file_list=$("${cmd_line[@]}") || die
409
410    for gen in ${file_list}; do
411        directory="$(dirname "${gen}")" || die
412        filename="$(basename "${gen}")" || die
413        pushd "${directory}" || die
414        ./"${filename}" || die
415        popd || die
416    done
417}
418
419
420function get_test_code () {
421
422    # Arguments:
423    # - ${1} - Optional, argument of entry script (or empty as unset).
424    #   Test code value to override job name from environment.
425    # Variables read:
426    # - JOB_NAME - String affecting test selection, default if not argument.
427    # Variables set:
428    # - TEST_CODE - The test selection string from environment or argument.
429    # - NODENESS - Node multiplicity of desired testbed.
430    # - FLAVOR - Node flavor string, usually describing the processor.
431
432    set -exuo pipefail
433
434    TEST_CODE="${1-}" || die "Reading optional argument failed, somehow."
435    if [[ -z "${TEST_CODE}" ]]; then
436        TEST_CODE="${JOB_NAME-}" || die "Reading job name failed, somehow."
437    fi
438
439    case "${TEST_CODE}" in
440        *"1n-vbox"*)
441            NODENESS="1n"
442            FLAVOR="vbox"
443            ;;
444        *"1n-skx"*)
445            NODENESS="1n"
446            FLAVOR="skx"
447            ;;
448       *"1n-tx2"*)
449            NODENESS="1n"
450            FLAVOR="tx2"
451            ;;
452        *"2n-skx"*)
453            NODENESS="2n"
454            FLAVOR="skx"
455            ;;
456        *"3n-skx"*)
457            NODENESS="3n"
458            FLAVOR="skx"
459            ;;
460        *"2n-clx"*)
461            NODENESS="2n"
462            FLAVOR="clx"
463            ;;
464        *"2n-dnv"*)
465            NODENESS="2n"
466            FLAVOR="dnv"
467            ;;
468        *"3n-dnv"*)
469            NODENESS="3n"
470            FLAVOR="dnv"
471            ;;
472        *"3n-tsh"*)
473            NODENESS="3n"
474            FLAVOR="tsh"
475            ;;
476        *)
477            # Fallback to 3-node Haswell by default (backward compatibility)
478            NODENESS="3n"
479            FLAVOR="hsw"
480            ;;
481    esac
482}
483
484
485function get_test_tag_string () {
486
487    # Variables read:
488    # - GERRIT_EVENT_TYPE - Event type set by gerrit, can be unset.
489    # - GERRIT_EVENT_COMMENT_TEXT - Comment text, read for "comment-added" type.
490    # - TEST_CODE - The test selection string from environment or argument.
491    # Variables set:
492    # - TEST_TAG_STRING - The string following trigger word in gerrit comment.
493    #   May be empty, or even not set on event types not adding comment.
494
495    # TODO: ci-management scripts no longer need to perform this.
496
497    set -exuo pipefail
498
499    if [[ "${GERRIT_EVENT_TYPE-}" == "comment-added" ]]; then
500        case "${TEST_CODE}" in
501            *"device"*)
502                trigger="devicetest"
503                ;;
504            *"perf"*)
505                trigger="perftest"
506                ;;
507            *)
508                die "Unknown specification: ${TEST_CODE}"
509        esac
510        # Ignore lines not containing the trigger word.
511        comment=$(fgrep "${trigger}" <<< "${GERRIT_EVENT_COMMENT_TEXT}") || true
512        # The vpp-csit triggers trail stuff we are not interested in.
513        # Removing them and trigger word: https://unix.stackexchange.com/a/13472
514        # (except relying on \s whitespace, \S non-whitespace and . both).
515        # The last string is concatenated, only the middle part is expanded.
516        cmd=("grep" "-oP" '\S*'"${trigger}"'\S*\s\K.+$') || die "Unset trigger?"
517        # On parsing error, TEST_TAG_STRING probably stays empty.
518        TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}") || true
519    fi
520}
521
522
523function installed () {
524
525    # Check if the given utility is installed. Fail if not installed.
526    #
527    # Duplicate of common.sh function, as this file is also used standalone.
528    #
529    # Arguments:
530    # - ${1} - Utility to check.
531    # Returns:
532    # - 0 - If command is installed.
533    # - 1 - If command is not installed.
534
535    set -exuo pipefail
536
537    command -v "${1}"
538}
539
540
541function reserve_and_cleanup_testbed () {
542
543    # Reserve physical testbed, perform cleanup, register trap to unreserve.
544    # When cleanup fails, remove from topologies and keep retrying
545    # until all topologies are removed.
546    #
547    # Variables read:
548    # - TOPOLOGIES - Array of paths to topology yaml to attempt reservation on.
549    # - PYTHON_SCRIPTS_DIR - Path to directory holding the reservation script.
550    # - BUILD_TAG - Any string suitable as filename, identifying
551    #   test run executing this function. May be unset.
552    # Variables set:
553    # - TOPOLOGIES - Array of paths to topologies, with failed cleanups removed.
554    # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
555    # Functions called:
556    # - die - Print to stderr and exit.
557    # - ansible_hosts - Perform an action using ansible, see ansible.sh
558    # Traps registered:
559    # - EXIT - Calls cancel_all for ${WORKING_TOPOLOGY}.
560
561    set -exuo pipefail
562
563    while true; do
564        for topo in "${TOPOLOGIES[@]}"; do
565            set +e
566            scrpt="${PYTHON_SCRIPTS_DIR}/topo_reservation.py"
567            opts=("-t" "${topo}" "-r" "${BUILD_TAG:-Unknown}")
568            python "${scrpt}" "${opts[@]}"
569            result="$?"
570            set -e
571            if [[ "${result}" == "0" ]]; then
572                # Trap unreservation before cleanup check,
573                # so multiple jobs showing failed cleanup improve chances
574                # of humans to notice and fix.
575                WORKING_TOPOLOGY="${topo}"
576                echo "Reserved: ${WORKING_TOPOLOGY}"
577                trap "untrap_and_unreserve_testbed" EXIT || {
578                    message="TRAP ATTEMPT AND UNRESERVE FAILED, FIX MANUALLY."
579                    untrap_and_unreserve_testbed "${message}" || {
580                        die "Teardown should have died, not failed."
581                    }
582                    die "Trap attempt failed, unreserve succeeded. Aborting."
583                }
584                # Cleanup check.
585                set +e
586                ansible_hosts "cleanup"
587                result="$?"
588                set -e
589                if [[ "${result}" == "0" ]]; then
590                    break
591                fi
592                warn "Testbed cleanup failed: ${topo}"
593                untrap_and_unreserve_testbed "Fail of unreserve after cleanup."
594            fi
595            # Else testbed is accessible but currently reserved, moving on.
596        done
597
598        if [[ -n "${WORKING_TOPOLOGY-}" ]]; then
599            # Exit the infinite while loop if we made a reservation.
600            warn "Reservation and cleanup successful."
601            break
602        fi
603
604        if [[ "${#TOPOLOGIES[@]}" == "0" ]]; then
605            die "Run out of operational testbeds!"
606        fi
607
608        # Wait ~3minutes before next try.
609        sleep_time="$[ ( ${RANDOM} % 20 ) + 180 ]s" || {
610            die "Sleep time calculation failed."
611        }
612        echo "Sleeping ${sleep_time}"
613        sleep "${sleep_time}" || die "Sleep failed."
614    done
615}
616
617
618function run_pybot () {
619
620    # Run pybot with options based on input variables. Create output_info.xml
621    #
622    # Variables read:
623    # - CSIT_DIR - Path to existing root of local CSIT git repository.
624    # - ARCHIVE_DIR - Path to store robot result files in.
625    # - PYBOT_ARGS, EXPANDED_TAGS - See compose_pybot_arguments.sh
626    # - GENERATED_DIR - Tests are assumed to be generated under there.
627    # Variables set:
628    # - PYBOT_EXIT_STATUS - Exit status of most recent pybot invocation.
629    # Functions called:
630    # - die - Print to stderr and exit.
631
632    set -exuo pipefail
633
634    all_options=("--outputdir" "${ARCHIVE_DIR}" "${PYBOT_ARGS[@]}")
635    all_options+=("--noncritical" "EXPECTED_FAILING")
636    all_options+=("${EXPANDED_TAGS[@]}")
637
638    pushd "${CSIT_DIR}" || die "Change directory operation failed."
639    set +e
640    python -V
641    robot "${all_options[@]}" "${GENERATED_DIR}/tests/"
642    PYBOT_EXIT_STATUS="$?"
643    set -e
644
645    # Generate INFO level output_info.xml for post-processing.
646    all_options=("--loglevel" "INFO")
647    all_options+=("--log" "none")
648    all_options+=("--report" "none")
649    all_options+=("--output" "${ARCHIVE_DIR}/output_info.xml")
650    all_options+=("${ARCHIVE_DIR}/output.xml")
651    rebot "${all_options[@]}" || true
652    popd || die "Change directory operation failed."
653}
654
655
656function select_arch_os () {
657
658    # Set variables affected by local CPU architecture and operating system.
659    #
660    # Variables set:
661    # - VPP_VER_FILE - Name of file in CSIT dir containing vpp stable version.
662    # - IMAGE_VER_FILE - Name of file in CSIT dir containing the image name.
663    # - PKG_SUFFIX - Suffix of OS package file name, "rpm" or "deb."
664
665    set -exuo pipefail
666
667    os_id=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g') || {
668        die "Get OS release failed."
669    }
670
671    case "${os_id}" in
672        "ubuntu"*)
673            IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU"
674            VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_BIONIC"
675            PKG_SUFFIX="deb"
676            ;;
677        "centos"*)
678            IMAGE_VER_FILE="VPP_DEVICE_IMAGE_CENTOS"
679            VPP_VER_FILE="VPP_STABLE_VER_CENTOS"
680            PKG_SUFFIX="rpm"
681            ;;
682        *)
683            die "Unable to identify distro or os from ${os_id}"
684            ;;
685    esac
686
687    arch=$(uname -m) || {
688        die "Get CPU architecture failed."
689    }
690
691    case "${arch}" in
692        "aarch64")
693            IMAGE_VER_FILE="${IMAGE_VER_FILE}_ARM"
694            ;;
695        *)
696            ;;
697    esac
698}
699
700
701function select_tags () {
702
703    # Variables read:
704    # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
705    # - TEST_CODE - String affecting test selection, usually jenkins job name.
706    # - TEST_TAG_STRING - String selecting tags, from gerrit comment.
707    #   Can be unset.
708    # - TOPOLOGIES_DIR - Path to existing directory with available tpologies.
709    # - BASH_FUNCTION_DIR - Directory with input files to process.
710    # Variables set:
711    # - TAGS - Array of processed tag boolean expressions.
712
713    set -exuo pipefail
714
715    # NIC SELECTION
716    start_pattern='^  TG:'
717    end_pattern='^ \? \?[A-Za-z0-9]\+:'
718    # Remove the TG section from topology file
719    sed_command="/${start_pattern}/,/${end_pattern}/d"
720    # All topologies DUT NICs
721    available=$(sed "${sed_command}" "${TOPOLOGIES_DIR}"/* \
722                | grep -hoP "model: \K.*" | sort -u)
723    # Selected topology DUT NICs
724    reserved=$(sed "${sed_command}" "${WORKING_TOPOLOGY}" \
725               | grep -hoP "model: \K.*" | sort -u)
726    # All topologies DUT NICs - Selected topology DUT NICs
727    exclude_nics=($(comm -13 <(echo "${reserved}") <(echo "${available}"))) || {
728        die "Computation of excluded NICs failed."
729    }
730
731    # Select default NIC tag.
732    case "${TEST_CODE}" in
733        *"3n-dnv"* | *"2n-dnv"*)
734            default_nic="nic_intel-x553"
735            ;;
736        *"3n-tsh"*)
737            default_nic="nic_intel-x520-da2"
738            ;;
739        *"3n-skx"* | *"2n-skx"* | *"2n-clx"*)
740            default_nic="nic_intel-xxv710"
741            ;;
742        *"3n-hsw"* | *"mrr-daily-master")
743            default_nic="nic_intel-xl710"
744            ;;
745        *)
746            default_nic="nic_intel-x710"
747            ;;
748    esac
749
750    sed_nic_sub_cmd="sed s/\${default_nic}/${default_nic}/"
751    # Tag file directory shorthand.
752    tfd="${BASH_FUNCTION_DIR}"
753    case "${TEST_CODE}" in
754        # Select specific performance tests based on jenkins job type variable.
755        *"ndrpdr-weekly"* )
756            readarray -t test_tag_array < "${tfd}/mlr-weekly.txt" || die
757            ;;
758        *"mrr-daily"* )
759            readarray -t test_tag_array <<< $(${sed_nic_sub_cmd} \
760                ${tfd}/mrr-daily-${FLAVOR}.txt) || die
761            ;;
762        *"mrr-weekly"* )
763            readarray -t test_tag_array <<< $(${sed_nic_sub_cmd} \
764                ${tfd}/mrr-weekly.txt) || die
765            ;;
766        * )
767            if [[ -z "${TEST_TAG_STRING-}" ]]; then
768                # If nothing is specified, we will run pre-selected tests by
769                # following tags.
770                test_tag_array=("mrrAND${default_nic}AND1cAND64bANDip4base"
771                                "mrrAND${default_nic}AND1cAND78bANDip6base"
772                                "mrrAND${default_nic}AND1cAND64bANDl2bdbase"
773                                "mrrAND${default_nic}AND1cAND64bANDl2xcbase"
774                                "!dot1q" "!drv_avf")
775            else
776                # If trigger contains tags, split them into array.
777                test_tag_array=(${TEST_TAG_STRING//:/ })
778            fi
779            ;;
780    esac
781
782    # Blacklisting certain tags per topology.
783    #
784    # Reasons for blacklisting:
785    # - ipsechw - Blacklisted on testbeds without crypto hardware accelerator.
786    # TODO: Add missing reasons here (if general) or where used (if specific).
787    case "${TEST_CODE}" in
788        *"2n-skx"*)
789            test_tag_array+=("!ipsechw")
790            ;;
791        *"3n-skx"*)
792            test_tag_array+=("!ipsechw")
793            # Not enough nic_intel-xxv710 to support double link tests.
794            test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
795            ;;
796        *"2n-clx"*)
797            test_tag_array+=("!ipsechw")
798            ;;
799        *"2n-dnv"*)
800            test_tag_array+=("!ipsechw")
801            test_tag_array+=("!memif")
802            test_tag_array+=("!srv6_proxy")
803            test_tag_array+=("!vhost")
804            test_tag_array+=("!vts")
805            test_tag_array+=("!drv_avf")
806            ;;
807        *"3n-dnv"*)
808            test_tag_array+=("!memif")
809            test_tag_array+=("!srv6_proxy")
810            test_tag_array+=("!vhost")
811            test_tag_array+=("!vts")
812            test_tag_array+=("!drv_avf")
813            ;;
814        *"3n-tsh"*)
815            # 3n-tsh only has x520 NICs which don't work with AVF
816            test_tag_array+=("!drv_avf")
817            test_tag_array+=("!ipsechw")
818            ;;
819        *"3n-hsw"*)
820            # TODO: Introduce NOIOMMU version of AVF tests.
821            # TODO: Make (both) AVF tests work on Haswell,
822            # or document why (some of) it is not possible.
823            # https://github.com/FDio/vpp/blob/master/src/plugins/avf/README.md
824            test_tag_array+=("!drv_avf")
825            # All cards have access to QAT. But only one card (xl710)
826            # resides in same NUMA as QAT. Other cards must go over QPI
827            # which we do not want to even run.
828            test_tag_array+=("!ipsechwNOTnic_intel-xl710")
829            ;;
830        *)
831            # Default to 3n-hsw due to compatibility.
832            test_tag_array+=("!drv_avf")
833            test_tag_array+=("!ipsechwNOTnic_intel-xl710")
834            ;;
835    esac
836
837    # We will add excluded NICs.
838    test_tag_array+=("${exclude_nics[@]/#/!NIC_}")
839
840    TAGS=()
841
842    # We will prefix with perftest to prevent running other tests
843    # (e.g. Functional).
844    prefix="perftestAND"
845    set +x
846    if [[ "${TEST_CODE}" == "vpp-"* ]]; then
847        # Automatic prefixing for VPP jobs to limit the NIC used and
848        # traffic evaluation to MRR.
849        if [[ "${TEST_TAG_STRING-}" == *"nic_"* ]]; then
850            prefix="${prefix}mrrAND"
851        else
852            prefix="${prefix}mrrAND${default_nic}AND"
853        fi
854    fi
855    for tag in "${test_tag_array[@]}"; do
856        if [[ "${tag}" == "!"* ]]; then
857            # Exclude tags are not prefixed.
858            TAGS+=("${tag}")
859        elif [[ "${tag}" == " "* || "${tag}" == *"perftest"* ]]; then
860            # Badly formed tag expressions can trigger way too much tests.
861            set -x
862            warn "The following tag expression hints at bad trigger: ${tag}"
863            warn "Possible cause: Multiple triggers in a single comment."
864            die "Aborting to avoid triggering too many tests."
865        elif [[ "${tag}" != "" && "${tag}" != "#"* ]]; then
866            # Empty and comment lines are skipped.
867            # Other lines are normal tags, they are to be prefixed.
868            TAGS+=("${prefix}${tag}")
869        fi
870    done
871    set -x
872}
873
874
875function select_topology () {
876
877    # Variables read:
878    # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
879    # - FLAVOR - Node flavor string, currently either "hsw" or "skx".
880    # - CSIT_DIR - Path to existing root of local CSIT git repository.
881    # - TOPOLOGIES_DIR - Path to existing directory with available topologies.
882    # Variables set:
883    # - TOPOLOGIES - Array of paths to suitable topology yaml files.
884    # - TOPOLOGIES_TAGS - Tag expression selecting tests for the topology.
885    # Functions called:
886    # - die - Print to stderr and exit.
887
888    set -exuo pipefail
889
890    case_text="${NODENESS}_${FLAVOR}"
891    case "${case_text}" in
892        # TODO: Move tags to "# Blacklisting certain tags per topology" section.
893        # TODO: Double link availability depends on NIC used.
894        "1n_vbox")
895            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
896            TOPOLOGIES_TAGS="2_node_single_link_topo"
897            ;;
898        "1n_skx" | "1n_tx2")
899            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
900            TOPOLOGIES_TAGS="2_node_single_link_topo"
901            ;;
902        "2n_skx")
903            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_skx*.yaml )
904            TOPOLOGIES_TAGS="2_node_*_link_topo"
905            ;;
906        "3n_skx")
907            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_skx*.yaml )
908            TOPOLOGIES_TAGS="3_node_*_link_topo"
909            ;;
910        "2n_clx")
911            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx*.yaml )
912            TOPOLOGIES_TAGS="2_node_*_link_topo"
913            ;;
914        "2n_dnv")
915            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_dnv*.yaml )
916            TOPOLOGIES_TAGS="2_node_single_link_topo"
917            ;;
918        "3n_dnv")
919            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_dnv*.yaml )
920            TOPOLOGIES_TAGS="3_node_single_link_topo"
921            ;;
922        "3n_hsw")
923            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_hsw*.yaml )
924            TOPOLOGIES_TAGS="3_node_single_link_topo"
925            ;;
926        "3n_tsh")
927            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh*.yaml )
928            TOPOLOGIES_TAGS="3_node_single_link_topo"
929            ;;
930        *)
931            # No falling back to 3n_hsw default, that should have been done
932            # by the function which has set NODENESS and FLAVOR.
933            die "Unknown specification: ${case_text}"
934    esac
935
936    if [[ -z "${TOPOLOGIES-}" ]]; then
937        die "No applicable topology found!"
938    fi
939}
940
941
942function select_vpp_device_tags () {
943
944    # Variables read:
945    # - TEST_CODE - String affecting test selection, usually jenkins job name.
946    # - TEST_TAG_STRING - String selecting tags, from gerrit comment.
947    #   Can be unset.
948    # Variables set:
949    # - TAGS - Array of processed tag boolean expressions.
950
951    set -exuo pipefail
952
953    case "${TEST_CODE}" in
954        # Select specific device tests based on jenkins job type variable.
955        * )
956            if [[ -z "${TEST_TAG_STRING-}" ]]; then
957                # If nothing is specified, we will run pre-selected tests by
958                # following tags. Items of array will be concatenated by OR
959                # in Robot Framework.
960                test_tag_array=()
961            else
962                # If trigger contains tags, split them into array.
963                test_tag_array=(${TEST_TAG_STRING//:/ })
964            fi
965            ;;
966    esac
967
968    # Blacklisting certain tags per topology.
969    #
970    # Reasons for blacklisting:
971    # - avf - AVF is not possible to run on enic driver of VirtualBox.
972    # - vhost - VirtualBox does not support nesting virtualization on Intel CPU.
973    case "${TEST_CODE}" in
974        *"1n-vbox"*)
975            test_tag_array+=("!avf")
976            test_tag_array+=("!vhost")
977            ;;
978        *)
979            ;;
980    esac
981
982    TAGS=()
983
984    # We will prefix with devicetest to prevent running other tests
985    # (e.g. Functional).
986    prefix="devicetestAND"
987    if [[ "${TEST_CODE}" == "vpp-"* ]]; then
988        # Automatic prefixing for VPP jobs to limit testing.
989        prefix="${prefix}"
990    fi
991    for tag in "${test_tag_array[@]}"; do
992        if [[ ${tag} == "!"* ]]; then
993            # Exclude tags are not prefixed.
994            TAGS+=("${tag}")
995        else
996            TAGS+=("${prefix}${tag}")
997        fi
998    done
999}
1000
1001function untrap_and_unreserve_testbed () {
1002
1003    # Use this as a trap function to ensure testbed does not remain reserved.
1004    # Perhaps call directly before script exit, to free testbed for other jobs.
1005    # This function is smart enough to avoid multiple unreservations (so safe).
1006    # Topo cleanup is executed (call it best practice), ignoring failures.
1007    #
1008    # Hardcoded values:
1009    # - default message to die with if testbed might remain reserved.
1010    # Arguments:
1011    # - ${1} - Message to die with if unreservation fails. Default hardcoded.
1012    # Variables read (by inner function):
1013    # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
1014    # - PYTHON_SCRIPTS_DIR - Path to directory holding Python scripts.
1015    # Variables written:
1016    # - WORKING_TOPOLOGY - Set to empty string on successful unreservation.
1017    # Trap unregistered:
1018    # - EXIT - Failure to untrap is reported, but ignored otherwise.
1019    # Functions called:
1020    # - die - Print to stderr and exit.
1021    # - ansible_hosts - Perform an action using ansible, see ansible.sh
1022
1023    set -xo pipefail
1024    set +eu  # We do not want to exit early in a "teardown" function.
1025    trap - EXIT || echo "Trap deactivation failed, continuing anyway."
1026    wt="${WORKING_TOPOLOGY}"  # Just to avoid too long lines.
1027    if [[ -z "${wt-}" ]]; then
1028        set -eu
1029        warn "Testbed looks unreserved already. Trap removal failed before?"
1030    else
1031        ansible_hosts "cleanup" || true
1032        python "${PYTHON_SCRIPTS_DIR}/topo_reservation.py" -c -t "${wt}" || {
1033            die "${1:-FAILED TO UNRESERVE, FIX MANUALLY.}" 2
1034        }
1035        WORKING_TOPOLOGY=""
1036        set -eu
1037    fi
1038}
1039
1040
1041function warn () {
1042
1043    # Print the message to standard error.
1044    #
1045    # Arguments:
1046    # - ${@} - The text of the message.
1047
1048    set -exuo pipefail
1049
1050    echo "$@" >&2
1051}
1052