forked from nuttx/nuttx-update
add libuv and cmocka test cases with file system on sim and qemu on CI
Signed-off-by: vela-mib <vela-mib@xiaomi.com>
This commit is contained in:
parent
729e9fc8e3
commit
9847c6219d
11 changed files with 538 additions and 18 deletions
|
@ -72,6 +72,17 @@ citest
|
||||||
This configuration is the default configuration intended to be used by the automated
|
This configuration is the default configuration intended to be used by the automated
|
||||||
testing on CI of 32-bit RISC-V using QEMU.
|
testing on CI of 32-bit RISC-V using QEMU.
|
||||||
|
|
||||||
|
To run it with QEMU, use the following command::
|
||||||
|
|
||||||
|
$ qemu-system-riscv32 -semihosting -M virt -cpu rv32 \
|
||||||
|
-drive index=0,id=userdata,if=none,format=raw,file=./fatfs.img \
|
||||||
|
-device virtio-blk-device,bus=virtio-mmio-bus.0,drive=userdata \
|
||||||
|
-bios none -kernel nuttx -nographic
|
||||||
|
|
||||||
|
To run the CI scripts, use the following command::
|
||||||
|
|
||||||
|
$ ./nuttx/boards/risc-v/qemu-rv/rv-virt/configs/citest/run
|
||||||
|
|
||||||
citest64
|
citest64
|
||||||
--------
|
--------
|
||||||
|
|
||||||
|
|
|
@ -34,11 +34,20 @@ CONFIG_BOARDCTL_ROMDISK=y
|
||||||
CONFIG_BOARD_LOOPSPERMSEC=6366
|
CONFIG_BOARD_LOOPSPERMSEC=6366
|
||||||
CONFIG_BUILTIN=y
|
CONFIG_BUILTIN=y
|
||||||
CONFIG_CANCELLATION_POINTS=y
|
CONFIG_CANCELLATION_POINTS=y
|
||||||
|
CONFIG_CM_FS_TEST=y
|
||||||
|
CONFIG_CM_MM_TEST=y
|
||||||
|
CONFIG_CM_PTHREAD_TEST=y
|
||||||
|
CONFIG_CM_SCHED_TEST=y
|
||||||
|
CONFIG_CM_TIME_TEST=y
|
||||||
CONFIG_DEBUG_ASSERTIONS=y
|
CONFIG_DEBUG_ASSERTIONS=y
|
||||||
CONFIG_DEBUG_FEATURES=y
|
CONFIG_DEBUG_FEATURES=y
|
||||||
CONFIG_DEBUG_FULLOPT=y
|
CONFIG_DEBUG_FULLOPT=y
|
||||||
CONFIG_DEBUG_SYMBOLS=y
|
CONFIG_DEBUG_SYMBOLS=y
|
||||||
|
CONFIG_DEV_SIMPLE_ADDRENV=y
|
||||||
CONFIG_DEV_ZERO=y
|
CONFIG_DEV_ZERO=y
|
||||||
|
CONFIG_DRIVERS_VIRTIO=y
|
||||||
|
CONFIG_DRIVERS_VIRTIO_BLK=y
|
||||||
|
CONFIG_DRIVERS_VIRTIO_MMIO=y
|
||||||
CONFIG_ELF=y
|
CONFIG_ELF=y
|
||||||
CONFIG_ETC_ROMFS=y
|
CONFIG_ETC_ROMFS=y
|
||||||
CONFIG_EXAMPLES_HELLO=y
|
CONFIG_EXAMPLES_HELLO=y
|
||||||
|
@ -91,8 +100,7 @@ CONFIG_PREALLOC_TIMERS=0
|
||||||
CONFIG_PSEUDOFS_SOFTLINKS=y
|
CONFIG_PSEUDOFS_SOFTLINKS=y
|
||||||
CONFIG_PTHREAD_CLEANUP_STACKSIZE=4
|
CONFIG_PTHREAD_CLEANUP_STACKSIZE=4
|
||||||
CONFIG_PTHREAD_SPINLOCKS=y
|
CONFIG_PTHREAD_SPINLOCKS=y
|
||||||
CONFIG_PTHREAD_STACK_DEFAULT=8192
|
CONFIG_PTHREAD_STACK_MIN=2048
|
||||||
CONFIG_PTHREAD_STACK_MIN=8192
|
|
||||||
CONFIG_RAM_SIZE=33554432
|
CONFIG_RAM_SIZE=33554432
|
||||||
CONFIG_RAM_START=0x80000000
|
CONFIG_RAM_START=0x80000000
|
||||||
CONFIG_READLINE_CMD_HISTORY=y
|
CONFIG_READLINE_CMD_HISTORY=y
|
||||||
|
@ -125,5 +133,6 @@ CONFIG_TESTING_MM=y
|
||||||
CONFIG_TESTING_OPEN_MEMSTREAM=y
|
CONFIG_TESTING_OPEN_MEMSTREAM=y
|
||||||
CONFIG_TESTING_OSTEST=y
|
CONFIG_TESTING_OSTEST=y
|
||||||
CONFIG_TESTING_SCANFTEST=y
|
CONFIG_TESTING_SCANFTEST=y
|
||||||
|
CONFIG_TESTS_TESTSUITES=y
|
||||||
CONFIG_TLS_NELEM=8
|
CONFIG_TLS_NELEM=8
|
||||||
CONFIG_USEC_PER_TICK=1000
|
CONFIG_USEC_PER_TICK=1000
|
||||||
|
|
|
@ -23,6 +23,11 @@ CONFIG_BOARD_LOOPSPERMSEC=0
|
||||||
CONFIG_BOOT_RUNFROMEXTSRAM=y
|
CONFIG_BOOT_RUNFROMEXTSRAM=y
|
||||||
CONFIG_BUILTIN=y
|
CONFIG_BUILTIN=y
|
||||||
CONFIG_CANCELLATION_POINTS=y
|
CONFIG_CANCELLATION_POINTS=y
|
||||||
|
CONFIG_CM_FS_TEST=y
|
||||||
|
CONFIG_CM_MM_TEST=y
|
||||||
|
CONFIG_CM_PTHREAD_TEST=y
|
||||||
|
CONFIG_CM_SCHED_TEST=y
|
||||||
|
CONFIG_CM_TIME_TEST=y
|
||||||
CONFIG_DEBUG_ASSERTIONS=y
|
CONFIG_DEBUG_ASSERTIONS=y
|
||||||
CONFIG_DEBUG_FEATURES=y
|
CONFIG_DEBUG_FEATURES=y
|
||||||
CONFIG_DEBUG_SYMBOLS=y
|
CONFIG_DEBUG_SYMBOLS=y
|
||||||
|
@ -46,6 +51,7 @@ CONFIG_FSUTILS_PASSWD_READONLY=y
|
||||||
CONFIG_FS_AIO=y
|
CONFIG_FS_AIO=y
|
||||||
CONFIG_FS_BINFS=y
|
CONFIG_FS_BINFS=y
|
||||||
CONFIG_FS_FAT=y
|
CONFIG_FS_FAT=y
|
||||||
|
CONFIG_FS_HOSTFS=y
|
||||||
CONFIG_FS_NAMED_SEMAPHORES=y
|
CONFIG_FS_NAMED_SEMAPHORES=y
|
||||||
CONFIG_FS_PROCFS=y
|
CONFIG_FS_PROCFS=y
|
||||||
CONFIG_FS_RAMMAP=y
|
CONFIG_FS_RAMMAP=y
|
||||||
|
@ -74,11 +80,15 @@ CONFIG_LIBC_NETDB=y
|
||||||
CONFIG_LIBC_NUMBERED_ARGS=y
|
CONFIG_LIBC_NUMBERED_ARGS=y
|
||||||
CONFIG_LIBC_SCANSET=y
|
CONFIG_LIBC_SCANSET=y
|
||||||
CONFIG_LIBUV=y
|
CONFIG_LIBUV=y
|
||||||
|
CONFIG_LIBUV_THREADPOOL_SIZE=4
|
||||||
|
CONFIG_LIBUV_THREAD_STACKSIZE=16384
|
||||||
|
CONFIG_LIBUV_UTILS_STACKSIZE=65536
|
||||||
CONFIG_LIBUV_UTILS_TEST=y
|
CONFIG_LIBUV_UTILS_TEST=y
|
||||||
CONFIG_MM_IOB=y
|
CONFIG_MM_IOB=y
|
||||||
CONFIG_MQ_MAXMSGSIZE=128
|
CONFIG_MQ_MAXMSGSIZE=128
|
||||||
CONFIG_NET=y
|
CONFIG_NET=y
|
||||||
CONFIG_NETDEV_HPWORK_THREAD=y
|
CONFIG_NETDEV_HPWORK_THREAD=y
|
||||||
|
CONFIG_NETDEV_IFINDEX=y
|
||||||
CONFIG_NETDEV_LATEINIT=y
|
CONFIG_NETDEV_LATEINIT=y
|
||||||
CONFIG_NET_ICMP=y
|
CONFIG_NET_ICMP=y
|
||||||
CONFIG_NET_LOCAL=y
|
CONFIG_NET_LOCAL=y
|
||||||
|
@ -93,8 +103,7 @@ CONFIG_PSEUDOFS_ATTRIBUTES=y
|
||||||
CONFIG_PSEUDOFS_SOFTLINKS=y
|
CONFIG_PSEUDOFS_SOFTLINKS=y
|
||||||
CONFIG_PTHREAD_CLEANUP_STACKSIZE=4
|
CONFIG_PTHREAD_CLEANUP_STACKSIZE=4
|
||||||
CONFIG_PTHREAD_SPINLOCKS=y
|
CONFIG_PTHREAD_SPINLOCKS=y
|
||||||
CONFIG_PTHREAD_STACK_DEFAULT=8192
|
CONFIG_PTHREAD_STACK_MIN=2048
|
||||||
CONFIG_PTHREAD_STACK_MIN=8192
|
|
||||||
CONFIG_READLINE_TABCOMPLETION=y
|
CONFIG_READLINE_TABCOMPLETION=y
|
||||||
CONFIG_RR_INTERVAL=10
|
CONFIG_RR_INTERVAL=10
|
||||||
CONFIG_SCHED_BACKTRACE=y
|
CONFIG_SCHED_BACKTRACE=y
|
||||||
|
@ -112,6 +121,7 @@ CONFIG_START_YEAR=2008
|
||||||
CONFIG_SYSTEM_DUMPSTACK=y
|
CONFIG_SYSTEM_DUMPSTACK=y
|
||||||
CONFIG_SYSTEM_NSH=y
|
CONFIG_SYSTEM_NSH=y
|
||||||
CONFIG_SYSTEM_POPEN=y
|
CONFIG_SYSTEM_POPEN=y
|
||||||
|
CONFIG_TESTING_CMOCKA=y
|
||||||
CONFIG_TESTING_CXXTEST=y
|
CONFIG_TESTING_CXXTEST=y
|
||||||
CONFIG_TESTING_FMEMOPEN_TEST=y
|
CONFIG_TESTING_FMEMOPEN_TEST=y
|
||||||
CONFIG_TESTING_FOPENCOOKIE_TEST=y
|
CONFIG_TESTING_FOPENCOOKIE_TEST=y
|
||||||
|
@ -124,5 +134,6 @@ CONFIG_TESTING_OPEN_MEMSTREAM=y
|
||||||
CONFIG_TESTING_OSTEST=y
|
CONFIG_TESTING_OSTEST=y
|
||||||
CONFIG_TESTING_SCANFTEST=y
|
CONFIG_TESTING_SCANFTEST=y
|
||||||
CONFIG_TESTING_SCANFTEST_FNAME="/tmp/test.txt"
|
CONFIG_TESTING_SCANFTEST_FNAME="/tmp/test.txt"
|
||||||
|
CONFIG_TESTS_TESTSUITES=y
|
||||||
CONFIG_TLS_NELEM=16
|
CONFIG_TLS_NELEM=16
|
||||||
CONFIG_TLS_TASK_NELEM=8
|
CONFIG_TLS_TASK_NELEM=8
|
||||||
|
|
|
@ -40,8 +40,7 @@ ret="$?"
|
||||||
find ${nuttx}/tools/ci/testrun -name '__pycache__' |xargs rm -rf
|
find ${nuttx}/tools/ci/testrun -name '__pycache__' |xargs rm -rf
|
||||||
find ${nuttx}/tools/ci/testrun -name '.pytest_cache' |xargs rm -rf
|
find ${nuttx}/tools/ci/testrun -name '.pytest_cache' |xargs rm -rf
|
||||||
rm -rf ${logs}
|
rm -rf ${logs}
|
||||||
rm -rf ${nuttx}/../apps/testing/ltp/ltp/
|
rm -f ${nuttx}/fatfs.img
|
||||||
rm -f ${nuttx}/../apps/testing/cmocka/cmocka.zip
|
|
||||||
|
|
||||||
|
|
||||||
echo $ret
|
echo $ret
|
||||||
|
|
2
tools/ci/testrun/script/test_framework/__init__.py
Normal file
2
tools/ci/testrun/script/test_framework/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
# encoding: utf-8
|
39
tools/ci/testrun/script/test_framework/test_cmocka.py
Normal file
39
tools/ci/testrun/script/test_framework/test_cmocka.py
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
# encoding: utf-8
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
pytestmark = [pytest.mark.common, pytest.mark.rv_virt]
|
||||||
|
|
||||||
|
cmocka_list_start = "cmocka_list_start"
|
||||||
|
cmocka_list_end = "cmocka_list_end"
|
||||||
|
cmocka_test_start = "cmocka_test_start"
|
||||||
|
cmocka_test_end = "cmocka_test_end"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.run(order=1)
|
||||||
|
def test_cmocka(p):
|
||||||
|
if p.board == "sim":
|
||||||
|
os.mkdir("./test")
|
||||||
|
ret = p.sendCommand("mount -t hostfs -o fs=./test /data")
|
||||||
|
if p.board == "rv-virt":
|
||||||
|
ret = p.sendCommand("mount -t vfat /dev/virtblk0 /data")
|
||||||
|
|
||||||
|
p.sendCommand(f"echo {cmocka_list_start}")
|
||||||
|
p.sendCommand("cmocka --list", "Cmocka Test Completed")
|
||||||
|
p.sendCommand(f"echo {cmocka_list_end}")
|
||||||
|
|
||||||
|
p.sendCommand(f"echo {cmocka_test_start}")
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"cmocka --skip test_playback.*|test_interaction.*|test_stress.*|test_capture.*",
|
||||||
|
"Cmocka Test Completed",
|
||||||
|
timeout=1200,
|
||||||
|
)
|
||||||
|
p.sendCommand(f"echo {cmocka_test_end}")
|
||||||
|
|
||||||
|
if p.board == "sim":
|
||||||
|
os.rmdir("./test")
|
||||||
|
|
||||||
|
assert ret == 0
|
2
tools/ci/testrun/script/test_libuv/__init__.py
Normal file
2
tools/ci/testrun/script/test_libuv/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
# encoding: utf-8
|
427
tools/ci/testrun/script/test_libuv/test_libuv.py
Normal file
427
tools/ci/testrun/script/test_libuv/test_libuv.py
Normal file
|
@ -0,0 +1,427 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
# encoding: utf-8
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
class TestLibuv:
|
||||||
|
pytestmark = [pytest.mark.sim]
|
||||||
|
|
||||||
|
def test_test_macros(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests test_macros", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_close_order(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests close_order", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_run_once(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests run_once", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_run_nowait(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests run_nowait", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_loop_alive(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests loop_alive", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_loop_close(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests loop_close", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_loop_instant_close(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests loop_instant_close", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_loop_stop(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests loop_stop", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_loop_backend_timeout(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests loop_backend_timeout", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_default_loop_close(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests default_loop_close", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_barrier_1(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests barrier_1", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_barrier_2(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests barrier_2", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_barrier_3(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests barrier_3", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_barrier_serial_thread(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests barrier_serial_thread", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_barrier_serial_thread_single(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests barrier_serial_thread_single",
|
||||||
|
["not ok 1 -", "ok 1 -"],
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_condvar_1(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests condvar_1", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_condvar_2(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests condvar_2", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_condvar_3(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests condvar_3", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_condvar_4(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests condvar_4", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_condvar_5(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests condvar_5", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_semaphore_1(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests semaphore_1", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_semaphore_2(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests semaphore_2", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_semaphore_3(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests semaphore_3", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_timer(self, p):
|
||||||
|
ret = p.sendCommand("uv_run_tests timer", ["not ok 1 -", "ok 1 -"], timeout=10)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_timer_init(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests timer_init", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_timer_again(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests timer_again", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_timer_start_twice(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests timer_start_twice", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_timer_order(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests timer_order", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_timer_huge_timeout(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests timer_huge_timeout", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_timer_huge_repeat(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests timer_huge_repeat", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_timer_run_once(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests timer_run_once", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_timer_from_check(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests timer_from_check", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_timer_is_closing(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests timer_is_closing", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_timer_null_callback(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests timer_null_callback", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_timer_early_check(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests timer_early_check", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_loop_handles(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests loop_handles", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_walk_handles(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests walk_handles", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_active(self, p):
|
||||||
|
ret = p.sendCommand("uv_run_tests active", ["not ok 1 -", "ok 1 -"], timeout=10)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_embed(self, p):
|
||||||
|
if p.board in ["sim"]:
|
||||||
|
pytest.skip("unsupported at %s" % p.board)
|
||||||
|
ret = p.sendCommand("uv_run_tests embed", ["not ok 1 -", "ok 1 -"], timeout=10)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="VELAPLATFO-6346")
|
||||||
|
def test_async(self, p):
|
||||||
|
if p.ci:
|
||||||
|
pytest.skip("unsupported at %s" % p.board)
|
||||||
|
if p.board in ["sim", "vela"]:
|
||||||
|
pytest.skip("unsupported at %s" % p.board)
|
||||||
|
ret = p.sendCommand("uv_run_tests async", ["not ok 1 -", "ok 1 -"], timeout=10)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_async_null_cb(self, p):
|
||||||
|
if p.ci:
|
||||||
|
pytest.skip("unsupported at %s" % p.board)
|
||||||
|
if p.board in ["sim", "vela"]:
|
||||||
|
pytest.skip("unsupported at %s" % p.board)
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests async_null_cb", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_homedir(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests homedir", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_tmpdir(self, p):
|
||||||
|
ret = p.sendCommand("uv_run_tests tmpdir", ["not ok 1 -", "ok 1 -"], timeout=10)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_hrtime(self, p):
|
||||||
|
ret = p.sendCommand("uv_run_tests hrtime", ["not ok 1 -", "ok 1 -"], timeout=25)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_gettimeofday(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests gettimeofday", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_poll_oob(self, p):
|
||||||
|
if p.board in ["sim"]:
|
||||||
|
pytest.skip("unsupported at %s" % p.board)
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests poll_oob", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_threadpool_queue_work_simple(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests threadpool_queue_work_simple",
|
||||||
|
["not ok 1 -", "ok 1 -"],
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_threadpool_queue_work_einval(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests threadpool_queue_work_einval",
|
||||||
|
["not ok 1 -", "ok 1 -"],
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_threadpool_cancel_getnameinfo(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests threadpool_cancel_getnameinfo",
|
||||||
|
["not ok 1 -", "ok 1 -"],
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_threadpool_cancel_random(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests threadpool_cancel_random",
|
||||||
|
["not ok 1 -", "ok 1 -"],
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_threadpool_cancel_work(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests threadpool_cancel_work", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_threadpool_cancel_single(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests threadpool_cancel_single",
|
||||||
|
["not ok 1 -", "ok 1 -"],
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_thread_local_storage(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests thread_local_storage", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_thread_stack_size(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests thread_stack_size", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_thread_mutex(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests thread_mutex", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_thread_mutex_recursive(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests thread_mutex_recursive", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_thread_rwlock(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests thread_rwlock", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_thread_rwlock_trylock(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests thread_rwlock_trylock", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_thread_create(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests thread_create", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_thread_equal(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests thread_equal", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_queue_foreach_delete(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests queue_foreach_delete", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_random_async(self, p):
|
||||||
|
if p.ci:
|
||||||
|
pytest.skip("unsupported at %s" % p.board)
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests random_async", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_random_sync(self, p):
|
||||||
|
if p.ci:
|
||||||
|
pytest.skip("unsupported at %s" % p.board)
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests random_sync", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_handle_type_name(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests handle_type_name", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_req_type_name(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests req_type_name", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_utf8_decode1(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests utf8_decode1", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
||||||
|
|
||||||
|
def test_utf8_decode1_overrun(self, p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"uv_run_tests utf8_decode1_overrun", ["not ok 1 -", "ok 1 -"], timeout=10
|
||||||
|
)
|
||||||
|
assert ret == 1
|
|
@ -1629,17 +1629,6 @@ def test_ltp_interfaces_sigaction_28_16(p):
|
||||||
assert retID >= 0
|
assert retID >= 0
|
||||||
|
|
||||||
|
|
||||||
def test_ltp_interfaces_timer_settime_5_3(p):
|
|
||||||
ret = p.sendCommand(
|
|
||||||
"ltp_interfaces_timer_settime_5_3",
|
|
||||||
["PASSED", "passed", "Passed", "PASS"],
|
|
||||||
timeout=200,
|
|
||||||
)
|
|
||||||
retID = p.sendCommand("echo $?", "0", timeout=2)
|
|
||||||
assert ret >= 0
|
|
||||||
assert retID >= 0
|
|
||||||
|
|
||||||
|
|
||||||
def test_ltp_interfaces_sigaction_8_16(p):
|
def test_ltp_interfaces_sigaction_8_16(p):
|
||||||
ret = p.sendCommand(
|
ret = p.sendCommand(
|
||||||
"ltp_interfaces_sigaction_8_16",
|
"ltp_interfaces_sigaction_8_16",
|
||||||
|
@ -11182,3 +11171,14 @@ def test_ltp_interfaces_sigaction_4_57(p):
|
||||||
retID = p.sendCommand("echo $?", "0", timeout=2)
|
retID = p.sendCommand("echo $?", "0", timeout=2)
|
||||||
assert ret >= 0
|
assert ret >= 0
|
||||||
assert retID >= 0
|
assert retID >= 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_ltp_interfaces_timer_settime_5_3(p):
|
||||||
|
ret = p.sendCommand(
|
||||||
|
"ltp_interfaces_timer_settime_5_3",
|
||||||
|
["PASSED", "passed", "Passed", "PASS"],
|
||||||
|
timeout=200,
|
||||||
|
)
|
||||||
|
retID = p.sendCommand("echo $?", "0", timeout=2)
|
||||||
|
assert ret >= 0
|
||||||
|
assert retID >= 0
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# encoding: utf-8
|
# encoding: utf-8
|
||||||
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
pytestmark = [pytest.mark.common, pytest.mark.qemu]
|
pytestmark = [pytest.mark.common, pytest.mark.qemu]
|
||||||
|
@ -7,6 +9,10 @@ do_not_support = ["sabre-6quad", "rv-virt", "rv-virt64", "esp32c3-devkit", "bl60
|
||||||
|
|
||||||
|
|
||||||
def test_ostest(p):
|
def test_ostest(p):
|
||||||
|
if p.board == "sim":
|
||||||
|
os.mkdir("./test")
|
||||||
|
ret = p.sendCommand("mount -t hostfs -o fs=./test /data")
|
||||||
|
|
||||||
ret = p.sendCommand("ostest", "Exiting with status 0", timeout=300)
|
ret = p.sendCommand("ostest", "Exiting with status 0", timeout=300)
|
||||||
assert ret == 0
|
assert ret == 0
|
||||||
|
|
||||||
|
@ -59,6 +65,9 @@ def test_fs_test(p):
|
||||||
ret = p.sendCommand("fstest -n 10 -m %s" % fstest_dir, "FAILED: 0", timeout=2000)
|
ret = p.sendCommand("fstest -n 10 -m %s" % fstest_dir, "FAILED: 0", timeout=2000)
|
||||||
p.sendCommand("ls %s" % fstest_dir)
|
p.sendCommand("ls %s" % fstest_dir)
|
||||||
p.sendCommand("rmdir %s" % fstest_dir)
|
p.sendCommand("rmdir %s" % fstest_dir)
|
||||||
|
|
||||||
|
if p.board == "sim":
|
||||||
|
os.rmdir("./test")
|
||||||
assert ret == 0
|
assert ret == 0
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -344,15 +344,26 @@ class start:
|
||||||
)
|
)
|
||||||
if flag1 == "qemu-rv":
|
if flag1 == "qemu-rv":
|
||||||
flag2 = getConfigValue(path, board, core=None, flag="ARCH_RV64")
|
flag2 = getConfigValue(path, board, core=None, flag="ARCH_RV64")
|
||||||
|
options = ""
|
||||||
if flag2:
|
if flag2:
|
||||||
riscv = "qemu-system-riscv64"
|
riscv = "qemu-system-riscv64"
|
||||||
else:
|
else:
|
||||||
riscv = "qemu-system-riscv32"
|
riscv = "qemu-system-riscv32"
|
||||||
|
fs_flag = getConfigValue(path, board, core=None, flag="DRIVERS_VIRTIO_BLK")
|
||||||
|
if fs_flag:
|
||||||
|
os.system("dd if=/dev/zero of=fatfs.img bs=512 count=128K")
|
||||||
|
os.system("mkfs.fat fatfs.img")
|
||||||
|
os.system("chmod 777 ./fatfs.img")
|
||||||
|
options = (
|
||||||
|
"-drive index=0,id=userdata,if=none,format=raw,file=./fatfs.img "
|
||||||
|
"-device virtio-blk-device,bus=virtio-mmio-bus.0,drive=userdata"
|
||||||
|
)
|
||||||
self.process = pexpect.spawn(
|
self.process = pexpect.spawn(
|
||||||
"bash",
|
"bash",
|
||||||
[
|
[
|
||||||
"-c",
|
"-c",
|
||||||
"%s -M virt -bios ./nuttx -nographic | tee %s" % (riscv, self.log),
|
"%s -M virt -bios ./nuttx -nographic %s | tee %s"
|
||||||
|
% (riscv, options, self.log),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
self.process.expect(self.PROMPT)
|
self.process.expect(self.PROMPT)
|
||||||
|
|
Loading…
Reference in a new issue