diff mbox series

[v3,06/19] test: Avoid failing skipped tests

Message ID 20240623203213.1571666-7-sjg@chromium.org
State Changes Requested
Delegated to: Tom Rini
Headers show
Series labgrid: Provide an integration with Labgrid | expand

Commit Message

Simon Glass June 23, 2024, 8:32 p.m. UTC
When a test returns -EAGAIN this should not be considered a failure.
Fix what seems to be a problem case, where the pytests see a failure
when a test has merely been skipped.

Signed-off-by: Simon Glass <sjg@chromium.org>
---

(no changes since v1)

 test/test-main.c | 16 +++++++++++-----
 1 file changed, 11 insertions(+), 5 deletions(-)

Comments

Tom Rini June 24, 2024, 6:05 p.m. UTC | #1
On Sun, Jun 23, 2024 at 02:32:00PM -0600, Simon Glass wrote:

> When a test returns -EAGAIN this should not be considered a failure.
> Fix what seems to be a problem case, where the pytests see a failure
> when a test has merely been skipped.
> 
> Signed-off-by: Simon Glass <sjg@chromium.org>
> ---
> 
> (no changes since v1)
> 
>  test/test-main.c | 16 +++++++++++-----
>  1 file changed, 11 insertions(+), 5 deletions(-)
> 
> diff --git a/test/test-main.c b/test/test-main.c
> index 3fa6f6e32ec..cda1a186390 100644
> --- a/test/test-main.c
> +++ b/test/test-main.c
> @@ -448,7 +448,7 @@ static int ut_run_test(struct unit_test_state *uts, struct unit_test *test,
>  static int ut_run_test_live_flat(struct unit_test_state *uts,
>  				 struct unit_test *test)
>  {
> -	int runs;
> +	int runs, ret;
>  
>  	if ((test->flags & UT_TESTF_OTHER_FDT) && !IS_ENABLED(CONFIG_SANDBOX))
>  		return skip_test(uts);
> @@ -458,8 +458,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
>  	if (CONFIG_IS_ENABLED(OF_LIVE)) {
>  		if (!(test->flags & UT_TESTF_FLAT_TREE)) {
>  			uts->of_live = true;
> -			ut_assertok(ut_run_test(uts, test, test->name));
> -			runs++;
> +			ret = ut_run_test(uts, test, test->name);
> +			if (ret != -EAGAIN) {
> +				ut_assertok(ret);
> +				runs++;
> +			}
>  		}
>  	}
>  
> @@ -483,8 +486,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
>  	    (!runs || ut_test_run_on_flattree(test)) &&
>  	    !(gd->flags & GD_FLG_FDT_CHANGED)) {
>  		uts->of_live = false;
> -		ut_assertok(ut_run_test(uts, test, test->name));
> -		runs++;
> +		ret = ut_run_test(uts, test, test->name);
> +		if (ret != -EAGAIN) {
> +			ut_assertok(ret);
> +			runs++;
> +		}
>  	}
>  
>  	return 0;

How did you trigger this case exactly?
Simon Glass June 25, 2024, 12:38 p.m. UTC | #2
Hi Tom,

On Mon, 24 Jun 2024 at 19:06, Tom Rini <trini@konsulko.com> wrote:
>
> On Sun, Jun 23, 2024 at 02:32:00PM -0600, Simon Glass wrote:
>
> > When a test returns -EAGAIN this should not be considered a failure.
> > Fix what seems to be a problem case, where the pytests see a failure
> > when a test has merely been skipped.
> >
> > Signed-off-by: Simon Glass <sjg@chromium.org>
> > ---
> >
> > (no changes since v1)
> >
> >  test/test-main.c | 16 +++++++++++-----
> >  1 file changed, 11 insertions(+), 5 deletions(-)
> >
> > diff --git a/test/test-main.c b/test/test-main.c
> > index 3fa6f6e32ec..cda1a186390 100644
> > --- a/test/test-main.c
> > +++ b/test/test-main.c
> > @@ -448,7 +448,7 @@ static int ut_run_test(struct unit_test_state *uts, struct unit_test *test,
> >  static int ut_run_test_live_flat(struct unit_test_state *uts,
> >                                struct unit_test *test)
> >  {
> > -     int runs;
> > +     int runs, ret;
> >
> >       if ((test->flags & UT_TESTF_OTHER_FDT) && !IS_ENABLED(CONFIG_SANDBOX))
> >               return skip_test(uts);
> > @@ -458,8 +458,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> >       if (CONFIG_IS_ENABLED(OF_LIVE)) {
> >               if (!(test->flags & UT_TESTF_FLAT_TREE)) {
> >                       uts->of_live = true;
> > -                     ut_assertok(ut_run_test(uts, test, test->name));
> > -                     runs++;
> > +                     ret = ut_run_test(uts, test, test->name);
> > +                     if (ret != -EAGAIN) {
> > +                             ut_assertok(ret);
> > +                             runs++;
> > +                     }
> >               }
> >       }
> >
> > @@ -483,8 +486,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> >           (!runs || ut_test_run_on_flattree(test)) &&
> >           !(gd->flags & GD_FLG_FDT_CHANGED)) {
> >               uts->of_live = false;
> > -             ut_assertok(ut_run_test(uts, test, test->name));
> > -             runs++;
> > +             ret = ut_run_test(uts, test, test->name);
> > +             if (ret != -EAGAIN) {
> > +                     ut_assertok(ret);
> > +                     runs++;
> > +             }
> >       }
> >
> >       return 0;
>
> How did you trigger this case exactly?

I noticed this in CI, where some skipped tests were shown as failed in
the log, even though they were not counted as failures in the final
results.
>
> --
> Tom
Tom Rini June 25, 2024, 2:14 p.m. UTC | #3
On Tue, Jun 25, 2024 at 01:38:00PM +0100, Simon Glass wrote:
> Hi Tom,
> 
> On Mon, 24 Jun 2024 at 19:06, Tom Rini <trini@konsulko.com> wrote:
> >
> > On Sun, Jun 23, 2024 at 02:32:00PM -0600, Simon Glass wrote:
> >
> > > When a test returns -EAGAIN this should not be considered a failure.
> > > Fix what seems to be a problem case, where the pytests see a failure
> > > when a test has merely been skipped.
> > >
> > > Signed-off-by: Simon Glass <sjg@chromium.org>
> > > ---
> > >
> > > (no changes since v1)
> > >
> > >  test/test-main.c | 16 +++++++++++-----
> > >  1 file changed, 11 insertions(+), 5 deletions(-)
> > >
> > > diff --git a/test/test-main.c b/test/test-main.c
> > > index 3fa6f6e32ec..cda1a186390 100644
> > > --- a/test/test-main.c
> > > +++ b/test/test-main.c
> > > @@ -448,7 +448,7 @@ static int ut_run_test(struct unit_test_state *uts, struct unit_test *test,
> > >  static int ut_run_test_live_flat(struct unit_test_state *uts,
> > >                                struct unit_test *test)
> > >  {
> > > -     int runs;
> > > +     int runs, ret;
> > >
> > >       if ((test->flags & UT_TESTF_OTHER_FDT) && !IS_ENABLED(CONFIG_SANDBOX))
> > >               return skip_test(uts);
> > > @@ -458,8 +458,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > >       if (CONFIG_IS_ENABLED(OF_LIVE)) {
> > >               if (!(test->flags & UT_TESTF_FLAT_TREE)) {
> > >                       uts->of_live = true;
> > > -                     ut_assertok(ut_run_test(uts, test, test->name));
> > > -                     runs++;
> > > +                     ret = ut_run_test(uts, test, test->name);
> > > +                     if (ret != -EAGAIN) {
> > > +                             ut_assertok(ret);
> > > +                             runs++;
> > > +                     }
> > >               }
> > >       }
> > >
> > > @@ -483,8 +486,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > >           (!runs || ut_test_run_on_flattree(test)) &&
> > >           !(gd->flags & GD_FLG_FDT_CHANGED)) {
> > >               uts->of_live = false;
> > > -             ut_assertok(ut_run_test(uts, test, test->name));
> > > -             runs++;
> > > +             ret = ut_run_test(uts, test, test->name);
> > > +             if (ret != -EAGAIN) {
> > > +                     ut_assertok(ret);
> > > +                     runs++;
> > > +             }
> > >       }
> > >
> > >       return 0;
> >
> > How did you trigger this case exactly?
> 
> I noticed this in CI, where some skipped tests were shown as failed in
> the log, even though they were not counted as failures in the final
> results.

That's really really strange, do you have an example log or something
around still?
Simon Glass June 26, 2024, 8 a.m. UTC | #4
Hi Tom,

On Tue, 25 Jun 2024 at 15:14, Tom Rini <trini@konsulko.com> wrote:
>
> On Tue, Jun 25, 2024 at 01:38:00PM +0100, Simon Glass wrote:
> > Hi Tom,
> >
> > On Mon, 24 Jun 2024 at 19:06, Tom Rini <trini@konsulko.com> wrote:
> > >
> > > On Sun, Jun 23, 2024 at 02:32:00PM -0600, Simon Glass wrote:
> > >
> > > > When a test returns -EAGAIN this should not be considered a failure.
> > > > Fix what seems to be a problem case, where the pytests see a failure
> > > > when a test has merely been skipped.
> > > >
> > > > Signed-off-by: Simon Glass <sjg@chromium.org>
> > > > ---
> > > >
> > > > (no changes since v1)
> > > >
> > > >  test/test-main.c | 16 +++++++++++-----
> > > >  1 file changed, 11 insertions(+), 5 deletions(-)
> > > >
> > > > diff --git a/test/test-main.c b/test/test-main.c
> > > > index 3fa6f6e32ec..cda1a186390 100644
> > > > --- a/test/test-main.c
> > > > +++ b/test/test-main.c
> > > > @@ -448,7 +448,7 @@ static int ut_run_test(struct unit_test_state *uts, struct unit_test *test,
> > > >  static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > >                                struct unit_test *test)
> > > >  {
> > > > -     int runs;
> > > > +     int runs, ret;
> > > >
> > > >       if ((test->flags & UT_TESTF_OTHER_FDT) && !IS_ENABLED(CONFIG_SANDBOX))
> > > >               return skip_test(uts);
> > > > @@ -458,8 +458,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > >       if (CONFIG_IS_ENABLED(OF_LIVE)) {
> > > >               if (!(test->flags & UT_TESTF_FLAT_TREE)) {
> > > >                       uts->of_live = true;
> > > > -                     ut_assertok(ut_run_test(uts, test, test->name));
> > > > -                     runs++;
> > > > +                     ret = ut_run_test(uts, test, test->name);
> > > > +                     if (ret != -EAGAIN) {
> > > > +                             ut_assertok(ret);
> > > > +                             runs++;
> > > > +                     }
> > > >               }
> > > >       }
> > > >
> > > > @@ -483,8 +486,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > >           (!runs || ut_test_run_on_flattree(test)) &&
> > > >           !(gd->flags & GD_FLG_FDT_CHANGED)) {
> > > >               uts->of_live = false;
> > > > -             ut_assertok(ut_run_test(uts, test, test->name));
> > > > -             runs++;
> > > > +             ret = ut_run_test(uts, test, test->name);
> > > > +             if (ret != -EAGAIN) {
> > > > +                     ut_assertok(ret);
> > > > +                     runs++;
> > > > +             }
> > > >       }
> > > >
> > > >       return 0;
> > >
> > > How did you trigger this case exactly?
> >
> > I noticed this in CI, where some skipped tests were shown as failed in
> > the log, even though they were not counted as failures in the final
> > results.
>
> That's really really strange, do you have an example log or something
> around still?

This happens on snow, which is (maybe) the only real board that
defines CONFIG_UNIT_TEST

test/py/tests/test_ut.py sssnow # ut bdinfo bdinfo_test_eth
Test: bdinfo_test_eth: bdinfo.c
Skipping: Console recording disabled
test/test-main.c:486, ut_run_test_live_flat(): 0 == ut_run_test(uts,
test, test->name): Expected 0x0 (0), got 0xfffffff5 (-11)
Test bdinfo_test_eth failed 1 times
Skipped: 1, Failures: 1
snow # F+u-boot-test-reset snow snow

For this particular mechanism (-EAGAIN returned by test_pre_run()) , I
think a better fix would be to squash the error in ut_run_test(), as
is done when -EAGAIN is returned in the body of the test. I'll update
that. I cannot see any other way this could happen, but we can always
deal with it later if it does.

Regards,
Simon
Tom Rini June 26, 2024, 1:56 p.m. UTC | #5
On Wed, Jun 26, 2024 at 09:00:42AM +0100, Simon Glass wrote:
> Hi Tom,
> 
> On Tue, 25 Jun 2024 at 15:14, Tom Rini <trini@konsulko.com> wrote:
> >
> > On Tue, Jun 25, 2024 at 01:38:00PM +0100, Simon Glass wrote:
> > > Hi Tom,
> > >
> > > On Mon, 24 Jun 2024 at 19:06, Tom Rini <trini@konsulko.com> wrote:
> > > >
> > > > On Sun, Jun 23, 2024 at 02:32:00PM -0600, Simon Glass wrote:
> > > >
> > > > > When a test returns -EAGAIN this should not be considered a failure.
> > > > > Fix what seems to be a problem case, where the pytests see a failure
> > > > > when a test has merely been skipped.
> > > > >
> > > > > Signed-off-by: Simon Glass <sjg@chromium.org>
> > > > > ---
> > > > >
> > > > > (no changes since v1)
> > > > >
> > > > >  test/test-main.c | 16 +++++++++++-----
> > > > >  1 file changed, 11 insertions(+), 5 deletions(-)
> > > > >
> > > > > diff --git a/test/test-main.c b/test/test-main.c
> > > > > index 3fa6f6e32ec..cda1a186390 100644
> > > > > --- a/test/test-main.c
> > > > > +++ b/test/test-main.c
> > > > > @@ -448,7 +448,7 @@ static int ut_run_test(struct unit_test_state *uts, struct unit_test *test,
> > > > >  static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > >                                struct unit_test *test)
> > > > >  {
> > > > > -     int runs;
> > > > > +     int runs, ret;
> > > > >
> > > > >       if ((test->flags & UT_TESTF_OTHER_FDT) && !IS_ENABLED(CONFIG_SANDBOX))
> > > > >               return skip_test(uts);
> > > > > @@ -458,8 +458,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > >       if (CONFIG_IS_ENABLED(OF_LIVE)) {
> > > > >               if (!(test->flags & UT_TESTF_FLAT_TREE)) {
> > > > >                       uts->of_live = true;
> > > > > -                     ut_assertok(ut_run_test(uts, test, test->name));
> > > > > -                     runs++;
> > > > > +                     ret = ut_run_test(uts, test, test->name);
> > > > > +                     if (ret != -EAGAIN) {
> > > > > +                             ut_assertok(ret);
> > > > > +                             runs++;
> > > > > +                     }
> > > > >               }
> > > > >       }
> > > > >
> > > > > @@ -483,8 +486,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > >           (!runs || ut_test_run_on_flattree(test)) &&
> > > > >           !(gd->flags & GD_FLG_FDT_CHANGED)) {
> > > > >               uts->of_live = false;
> > > > > -             ut_assertok(ut_run_test(uts, test, test->name));
> > > > > -             runs++;
> > > > > +             ret = ut_run_test(uts, test, test->name);
> > > > > +             if (ret != -EAGAIN) {
> > > > > +                     ut_assertok(ret);
> > > > > +                     runs++;
> > > > > +             }
> > > > >       }
> > > > >
> > > > >       return 0;
> > > >
> > > > How did you trigger this case exactly?
> > >
> > > I noticed this in CI, where some skipped tests were shown as failed in
> > > the log, even though they were not counted as failures in the final
> > > results.
> >
> > That's really really strange, do you have an example log or something
> > around still?
> 
> This happens on snow, which is (maybe) the only real board that
> defines CONFIG_UNIT_TEST

I think it is too, but that's also perhaps a reminder that I should be
enabling it as part of my build before testing scripts. I'll go do that
now and see if this problem shows up a tiny bit more widely.

> test/py/tests/test_ut.py sssnow # ut bdinfo bdinfo_test_eth
> Test: bdinfo_test_eth: bdinfo.c
> Skipping: Console recording disabled
> test/test-main.c:486, ut_run_test_live_flat(): 0 == ut_run_test(uts,
> test, test->name): Expected 0x0 (0), got 0xfffffff5 (-11)
> Test bdinfo_test_eth failed 1 times
> Skipped: 1, Failures: 1
> snow # F+u-boot-test-reset snow snow
> 
> For this particular mechanism (-EAGAIN returned by test_pre_run()) , I
> think a better fix would be to squash the error in ut_run_test(), as
> is done when -EAGAIN is returned in the body of the test. I'll update
> that. I cannot see any other way this could happen, but we can always
> deal with it later if it does.

Thanks for explaining, please also include the example in the commit
message in v2.
Tom Rini June 26, 2024, 1:59 p.m. UTC | #6
On Wed, Jun 26, 2024 at 07:56:24AM -0600, Tom Rini wrote:
> On Wed, Jun 26, 2024 at 09:00:42AM +0100, Simon Glass wrote:
> > Hi Tom,
> > 
> > On Tue, 25 Jun 2024 at 15:14, Tom Rini <trini@konsulko.com> wrote:
> > >
> > > On Tue, Jun 25, 2024 at 01:38:00PM +0100, Simon Glass wrote:
> > > > Hi Tom,
> > > >
> > > > On Mon, 24 Jun 2024 at 19:06, Tom Rini <trini@konsulko.com> wrote:
> > > > >
> > > > > On Sun, Jun 23, 2024 at 02:32:00PM -0600, Simon Glass wrote:
> > > > >
> > > > > > When a test returns -EAGAIN this should not be considered a failure.
> > > > > > Fix what seems to be a problem case, where the pytests see a failure
> > > > > > when a test has merely been skipped.
> > > > > >
> > > > > > Signed-off-by: Simon Glass <sjg@chromium.org>
> > > > > > ---
> > > > > >
> > > > > > (no changes since v1)
> > > > > >
> > > > > >  test/test-main.c | 16 +++++++++++-----
> > > > > >  1 file changed, 11 insertions(+), 5 deletions(-)
> > > > > >
> > > > > > diff --git a/test/test-main.c b/test/test-main.c
> > > > > > index 3fa6f6e32ec..cda1a186390 100644
> > > > > > --- a/test/test-main.c
> > > > > > +++ b/test/test-main.c
> > > > > > @@ -448,7 +448,7 @@ static int ut_run_test(struct unit_test_state *uts, struct unit_test *test,
> > > > > >  static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > > >                                struct unit_test *test)
> > > > > >  {
> > > > > > -     int runs;
> > > > > > +     int runs, ret;
> > > > > >
> > > > > >       if ((test->flags & UT_TESTF_OTHER_FDT) && !IS_ENABLED(CONFIG_SANDBOX))
> > > > > >               return skip_test(uts);
> > > > > > @@ -458,8 +458,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > > >       if (CONFIG_IS_ENABLED(OF_LIVE)) {
> > > > > >               if (!(test->flags & UT_TESTF_FLAT_TREE)) {
> > > > > >                       uts->of_live = true;
> > > > > > -                     ut_assertok(ut_run_test(uts, test, test->name));
> > > > > > -                     runs++;
> > > > > > +                     ret = ut_run_test(uts, test, test->name);
> > > > > > +                     if (ret != -EAGAIN) {
> > > > > > +                             ut_assertok(ret);
> > > > > > +                             runs++;
> > > > > > +                     }
> > > > > >               }
> > > > > >       }
> > > > > >
> > > > > > @@ -483,8 +486,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > > >           (!runs || ut_test_run_on_flattree(test)) &&
> > > > > >           !(gd->flags & GD_FLG_FDT_CHANGED)) {
> > > > > >               uts->of_live = false;
> > > > > > -             ut_assertok(ut_run_test(uts, test, test->name));
> > > > > > -             runs++;
> > > > > > +             ret = ut_run_test(uts, test, test->name);
> > > > > > +             if (ret != -EAGAIN) {
> > > > > > +                     ut_assertok(ret);
> > > > > > +                     runs++;
> > > > > > +             }
> > > > > >       }
> > > > > >
> > > > > >       return 0;
> > > > >
> > > > > How did you trigger this case exactly?
> > > >
> > > > I noticed this in CI, where some skipped tests were shown as failed in
> > > > the log, even though they were not counted as failures in the final
> > > > results.
> > >
> > > That's really really strange, do you have an example log or something
> > > around still?
> > 
> > This happens on snow, which is (maybe) the only real board that
> > defines CONFIG_UNIT_TEST
> 
> I think it is too, but that's also perhaps a reminder that I should be
> enabling it as part of my build before testing scripts. I'll go do that
> now and see if this problem shows up a tiny bit more widely.

OK, not right now then, there's missing dependencies within the test.
I'll selectively enable once v2 is in tho.
Simon Glass Aug. 22, 2024, 3 a.m. UTC | #7
Hi Tom,

On Wed, 26 Jun 2024 at 02:00, Simon Glass <sjg@chromium.org> wrote:
>
> Hi Tom,
>
> On Tue, 25 Jun 2024 at 15:14, Tom Rini <trini@konsulko.com> wrote:
> >
> > On Tue, Jun 25, 2024 at 01:38:00PM +0100, Simon Glass wrote:
> > > Hi Tom,
> > >
> > > On Mon, 24 Jun 2024 at 19:06, Tom Rini <trini@konsulko.com> wrote:
> > > >
> > > > On Sun, Jun 23, 2024 at 02:32:00PM -0600, Simon Glass wrote:
> > > >
> > > > > When a test returns -EAGAIN this should not be considered a failure.
> > > > > Fix what seems to be a problem case, where the pytests see a failure
> > > > > when a test has merely been skipped.
> > > > >
> > > > > Signed-off-by: Simon Glass <sjg@chromium.org>
> > > > > ---
> > > > >
> > > > > (no changes since v1)
> > > > >
> > > > >  test/test-main.c | 16 +++++++++++-----
> > > > >  1 file changed, 11 insertions(+), 5 deletions(-)
> > > > >
> > > > > diff --git a/test/test-main.c b/test/test-main.c
> > > > > index 3fa6f6e32ec..cda1a186390 100644
> > > > > --- a/test/test-main.c
> > > > > +++ b/test/test-main.c
> > > > > @@ -448,7 +448,7 @@ static int ut_run_test(struct unit_test_state *uts, struct unit_test *test,
> > > > >  static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > >                                struct unit_test *test)
> > > > >  {
> > > > > -     int runs;
> > > > > +     int runs, ret;
> > > > >
> > > > >       if ((test->flags & UT_TESTF_OTHER_FDT) && !IS_ENABLED(CONFIG_SANDBOX))
> > > > >               return skip_test(uts);
> > > > > @@ -458,8 +458,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > >       if (CONFIG_IS_ENABLED(OF_LIVE)) {
> > > > >               if (!(test->flags & UT_TESTF_FLAT_TREE)) {
> > > > >                       uts->of_live = true;
> > > > > -                     ut_assertok(ut_run_test(uts, test, test->name));
> > > > > -                     runs++;
> > > > > +                     ret = ut_run_test(uts, test, test->name);
> > > > > +                     if (ret != -EAGAIN) {
> > > > > +                             ut_assertok(ret);
> > > > > +                             runs++;
> > > > > +                     }
> > > > >               }
> > > > >       }
> > > > >
> > > > > @@ -483,8 +486,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > >           (!runs || ut_test_run_on_flattree(test)) &&
> > > > >           !(gd->flags & GD_FLG_FDT_CHANGED)) {
> > > > >               uts->of_live = false;
> > > > > -             ut_assertok(ut_run_test(uts, test, test->name));
> > > > > -             runs++;
> > > > > +             ret = ut_run_test(uts, test, test->name);
> > > > > +             if (ret != -EAGAIN) {
> > > > > +                     ut_assertok(ret);
> > > > > +                     runs++;
> > > > > +             }
> > > > >       }
> > > > >
> > > > >       return 0;
> > > >
> > > > How did you trigger this case exactly?
> > >
> > > I noticed this in CI, where some skipped tests were shown as failed in
> > > the log, even though they were not counted as failures in the final
> > > results.
> >
> > That's really really strange, do you have an example log or something
> > around still?
>
> This happens on snow, which is (maybe) the only real board that
> defines CONFIG_UNIT_TEST
>
> test/py/tests/test_ut.py sssnow # ut bdinfo bdinfo_test_eth
> Test: bdinfo_test_eth: bdinfo.c
> Skipping: Console recording disabled
> test/test-main.c:486, ut_run_test_live_flat(): 0 == ut_run_test(uts,
> test, test->name): Expected 0x0 (0), got 0xfffffff5 (-11)
> Test bdinfo_test_eth failed 1 times
> Skipped: 1, Failures: 1
> snow # F+u-boot-test-reset snow snow
>
> For this particular mechanism (-EAGAIN returned by test_pre_run()) , I
> think a better fix would be to squash the error in ut_run_test(), as
> is done when -EAGAIN is returned in the body of the test. I'll update
> that. I cannot see any other way this could happen, but we can always
> deal with it later if it does.

No, that doesn't work, since the failure counting happens in the
caller. This patch seems to be the right fix, so I'll send it again
with a better commit message.

Regards,
Simon
Tom Rini Aug. 22, 2024, 2:11 p.m. UTC | #8
On Wed, Aug 21, 2024 at 09:00:33PM -0600, Simon Glass wrote:
> Hi Tom,
> 
> On Wed, 26 Jun 2024 at 02:00, Simon Glass <sjg@chromium.org> wrote:
> >
> > Hi Tom,
> >
> > On Tue, 25 Jun 2024 at 15:14, Tom Rini <trini@konsulko.com> wrote:
> > >
> > > On Tue, Jun 25, 2024 at 01:38:00PM +0100, Simon Glass wrote:
> > > > Hi Tom,
> > > >
> > > > On Mon, 24 Jun 2024 at 19:06, Tom Rini <trini@konsulko.com> wrote:
> > > > >
> > > > > On Sun, Jun 23, 2024 at 02:32:00PM -0600, Simon Glass wrote:
> > > > >
> > > > > > When a test returns -EAGAIN this should not be considered a failure.
> > > > > > Fix what seems to be a problem case, where the pytests see a failure
> > > > > > when a test has merely been skipped.
> > > > > >
> > > > > > Signed-off-by: Simon Glass <sjg@chromium.org>
> > > > > > ---
> > > > > >
> > > > > > (no changes since v1)
> > > > > >
> > > > > >  test/test-main.c | 16 +++++++++++-----
> > > > > >  1 file changed, 11 insertions(+), 5 deletions(-)
> > > > > >
> > > > > > diff --git a/test/test-main.c b/test/test-main.c
> > > > > > index 3fa6f6e32ec..cda1a186390 100644
> > > > > > --- a/test/test-main.c
> > > > > > +++ b/test/test-main.c
> > > > > > @@ -448,7 +448,7 @@ static int ut_run_test(struct unit_test_state *uts, struct unit_test *test,
> > > > > >  static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > > >                                struct unit_test *test)
> > > > > >  {
> > > > > > -     int runs;
> > > > > > +     int runs, ret;
> > > > > >
> > > > > >       if ((test->flags & UT_TESTF_OTHER_FDT) && !IS_ENABLED(CONFIG_SANDBOX))
> > > > > >               return skip_test(uts);
> > > > > > @@ -458,8 +458,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > > >       if (CONFIG_IS_ENABLED(OF_LIVE)) {
> > > > > >               if (!(test->flags & UT_TESTF_FLAT_TREE)) {
> > > > > >                       uts->of_live = true;
> > > > > > -                     ut_assertok(ut_run_test(uts, test, test->name));
> > > > > > -                     runs++;
> > > > > > +                     ret = ut_run_test(uts, test, test->name);
> > > > > > +                     if (ret != -EAGAIN) {
> > > > > > +                             ut_assertok(ret);
> > > > > > +                             runs++;
> > > > > > +                     }
> > > > > >               }
> > > > > >       }
> > > > > >
> > > > > > @@ -483,8 +486,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > > >           (!runs || ut_test_run_on_flattree(test)) &&
> > > > > >           !(gd->flags & GD_FLG_FDT_CHANGED)) {
> > > > > >               uts->of_live = false;
> > > > > > -             ut_assertok(ut_run_test(uts, test, test->name));
> > > > > > -             runs++;
> > > > > > +             ret = ut_run_test(uts, test, test->name);
> > > > > > +             if (ret != -EAGAIN) {
> > > > > > +                     ut_assertok(ret);
> > > > > > +                     runs++;
> > > > > > +             }
> > > > > >       }
> > > > > >
> > > > > >       return 0;
> > > > >
> > > > > How did you trigger this case exactly?
> > > >
> > > > I noticed this in CI, where some skipped tests were shown as failed in
> > > > the log, even though they were not counted as failures in the final
> > > > results.
> > >
> > > That's really really strange, do you have an example log or something
> > > around still?
> >
> > This happens on snow, which is (maybe) the only real board that
> > defines CONFIG_UNIT_TEST
> >
> > test/py/tests/test_ut.py sssnow # ut bdinfo bdinfo_test_eth
> > Test: bdinfo_test_eth: bdinfo.c
> > Skipping: Console recording disabled
> > test/test-main.c:486, ut_run_test_live_flat(): 0 == ut_run_test(uts,
> > test, test->name): Expected 0x0 (0), got 0xfffffff5 (-11)
> > Test bdinfo_test_eth failed 1 times
> > Skipped: 1, Failures: 1
> > snow # F+u-boot-test-reset snow snow
> >
> > For this particular mechanism (-EAGAIN returned by test_pre_run()) , I
> > think a better fix would be to squash the error in ut_run_test(), as
> > is done when -EAGAIN is returned in the body of the test. I'll update
> > that. I cannot see any other way this could happen, but we can always
> > deal with it later if it does.
> 
> No, that doesn't work, since the failure counting happens in the
> caller. This patch seems to be the right fix, so I'll send it again
> with a better commit message.

And I'll try and test it on Pi as with the series I posted and so
UNIT_TEST compiles on there, I got what looks like the same failure.
Simon Glass Aug. 22, 2024, 2:22 p.m. UTC | #9
Hi Tom,

On Thu, 22 Aug 2024 at 08:11, Tom Rini <trini@konsulko.com> wrote:
>
> On Wed, Aug 21, 2024 at 09:00:33PM -0600, Simon Glass wrote:
> > Hi Tom,
> >
> > On Wed, 26 Jun 2024 at 02:00, Simon Glass <sjg@chromium.org> wrote:
> > >
> > > Hi Tom,
> > >
> > > On Tue, 25 Jun 2024 at 15:14, Tom Rini <trini@konsulko.com> wrote:
> > > >
> > > > On Tue, Jun 25, 2024 at 01:38:00PM +0100, Simon Glass wrote:
> > > > > Hi Tom,
> > > > >
> > > > > On Mon, 24 Jun 2024 at 19:06, Tom Rini <trini@konsulko.com> wrote:
> > > > > >
> > > > > > On Sun, Jun 23, 2024 at 02:32:00PM -0600, Simon Glass wrote:
> > > > > >
> > > > > > > When a test returns -EAGAIN this should not be considered a failure.
> > > > > > > Fix what seems to be a problem case, where the pytests see a failure
> > > > > > > when a test has merely been skipped.
> > > > > > >
> > > > > > > Signed-off-by: Simon Glass <sjg@chromium.org>
> > > > > > > ---
> > > > > > >
> > > > > > > (no changes since v1)
> > > > > > >
> > > > > > >  test/test-main.c | 16 +++++++++++-----
> > > > > > >  1 file changed, 11 insertions(+), 5 deletions(-)
> > > > > > >
> > > > > > > diff --git a/test/test-main.c b/test/test-main.c
> > > > > > > index 3fa6f6e32ec..cda1a186390 100644
> > > > > > > --- a/test/test-main.c
> > > > > > > +++ b/test/test-main.c
> > > > > > > @@ -448,7 +448,7 @@ static int ut_run_test(struct unit_test_state *uts, struct unit_test *test,
> > > > > > >  static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > > > >                                struct unit_test *test)
> > > > > > >  {
> > > > > > > -     int runs;
> > > > > > > +     int runs, ret;
> > > > > > >
> > > > > > >       if ((test->flags & UT_TESTF_OTHER_FDT) && !IS_ENABLED(CONFIG_SANDBOX))
> > > > > > >               return skip_test(uts);
> > > > > > > @@ -458,8 +458,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > > > >       if (CONFIG_IS_ENABLED(OF_LIVE)) {
> > > > > > >               if (!(test->flags & UT_TESTF_FLAT_TREE)) {
> > > > > > >                       uts->of_live = true;
> > > > > > > -                     ut_assertok(ut_run_test(uts, test, test->name));
> > > > > > > -                     runs++;
> > > > > > > +                     ret = ut_run_test(uts, test, test->name);
> > > > > > > +                     if (ret != -EAGAIN) {
> > > > > > > +                             ut_assertok(ret);
> > > > > > > +                             runs++;
> > > > > > > +                     }
> > > > > > >               }
> > > > > > >       }
> > > > > > >
> > > > > > > @@ -483,8 +486,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > > > >           (!runs || ut_test_run_on_flattree(test)) &&
> > > > > > >           !(gd->flags & GD_FLG_FDT_CHANGED)) {
> > > > > > >               uts->of_live = false;
> > > > > > > -             ut_assertok(ut_run_test(uts, test, test->name));
> > > > > > > -             runs++;
> > > > > > > +             ret = ut_run_test(uts, test, test->name);
> > > > > > > +             if (ret != -EAGAIN) {
> > > > > > > +                     ut_assertok(ret);
> > > > > > > +                     runs++;
> > > > > > > +             }
> > > > > > >       }
> > > > > > >
> > > > > > >       return 0;
> > > > > >
> > > > > > How did you trigger this case exactly?
> > > > >
> > > > > I noticed this in CI, where some skipped tests were shown as failed in
> > > > > the log, even though they were not counted as failures in the final
> > > > > results.
> > > >
> > > > That's really really strange, do you have an example log or something
> > > > around still?
> > >
> > > This happens on snow, which is (maybe) the only real board that
> > > defines CONFIG_UNIT_TEST
> > >
> > > test/py/tests/test_ut.py sssnow # ut bdinfo bdinfo_test_eth
> > > Test: bdinfo_test_eth: bdinfo.c
> > > Skipping: Console recording disabled
> > > test/test-main.c:486, ut_run_test_live_flat(): 0 == ut_run_test(uts,
> > > test, test->name): Expected 0x0 (0), got 0xfffffff5 (-11)
> > > Test bdinfo_test_eth failed 1 times
> > > Skipped: 1, Failures: 1
> > > snow # F+u-boot-test-reset snow snow
> > >
> > > For this particular mechanism (-EAGAIN returned by test_pre_run()) , I
> > > think a better fix would be to squash the error in ut_run_test(), as
> > > is done when -EAGAIN is returned in the body of the test. I'll update
> > > that. I cannot see any other way this could happen, but we can always
> > > deal with it later if it does.
> >
> > No, that doesn't work, since the failure counting happens in the
> > caller. This patch seems to be the right fix, so I'll send it again
> > with a better commit message.
>
> And I'll try and test it on Pi as with the series I posted and so
> UNIT_TEST compiles on there, I got what looks like the same failure.

OK ta.

It would be good to get the Labgrid series into -next as it fixes
quite a few problems on snow for me. It will provide a better base for
solving other problems.

Regards,
SImon
Tom Rini Aug. 22, 2024, 2:29 p.m. UTC | #10
On Thu, Aug 22, 2024 at 08:22:59AM -0600, Simon Glass wrote:
> Hi Tom,
> 
> On Thu, 22 Aug 2024 at 08:11, Tom Rini <trini@konsulko.com> wrote:
> >
> > On Wed, Aug 21, 2024 at 09:00:33PM -0600, Simon Glass wrote:
> > > Hi Tom,
> > >
> > > On Wed, 26 Jun 2024 at 02:00, Simon Glass <sjg@chromium.org> wrote:
> > > >
> > > > Hi Tom,
> > > >
> > > > On Tue, 25 Jun 2024 at 15:14, Tom Rini <trini@konsulko.com> wrote:
> > > > >
> > > > > On Tue, Jun 25, 2024 at 01:38:00PM +0100, Simon Glass wrote:
> > > > > > Hi Tom,
> > > > > >
> > > > > > On Mon, 24 Jun 2024 at 19:06, Tom Rini <trini@konsulko.com> wrote:
> > > > > > >
> > > > > > > On Sun, Jun 23, 2024 at 02:32:00PM -0600, Simon Glass wrote:
> > > > > > >
> > > > > > > > When a test returns -EAGAIN this should not be considered a failure.
> > > > > > > > Fix what seems to be a problem case, where the pytests see a failure
> > > > > > > > when a test has merely been skipped.
> > > > > > > >
> > > > > > > > Signed-off-by: Simon Glass <sjg@chromium.org>
> > > > > > > > ---
> > > > > > > >
> > > > > > > > (no changes since v1)
> > > > > > > >
> > > > > > > >  test/test-main.c | 16 +++++++++++-----
> > > > > > > >  1 file changed, 11 insertions(+), 5 deletions(-)
> > > > > > > >
> > > > > > > > diff --git a/test/test-main.c b/test/test-main.c
> > > > > > > > index 3fa6f6e32ec..cda1a186390 100644
> > > > > > > > --- a/test/test-main.c
> > > > > > > > +++ b/test/test-main.c
> > > > > > > > @@ -448,7 +448,7 @@ static int ut_run_test(struct unit_test_state *uts, struct unit_test *test,
> > > > > > > >  static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > > > > >                                struct unit_test *test)
> > > > > > > >  {
> > > > > > > > -     int runs;
> > > > > > > > +     int runs, ret;
> > > > > > > >
> > > > > > > >       if ((test->flags & UT_TESTF_OTHER_FDT) && !IS_ENABLED(CONFIG_SANDBOX))
> > > > > > > >               return skip_test(uts);
> > > > > > > > @@ -458,8 +458,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > > > > >       if (CONFIG_IS_ENABLED(OF_LIVE)) {
> > > > > > > >               if (!(test->flags & UT_TESTF_FLAT_TREE)) {
> > > > > > > >                       uts->of_live = true;
> > > > > > > > -                     ut_assertok(ut_run_test(uts, test, test->name));
> > > > > > > > -                     runs++;
> > > > > > > > +                     ret = ut_run_test(uts, test, test->name);
> > > > > > > > +                     if (ret != -EAGAIN) {
> > > > > > > > +                             ut_assertok(ret);
> > > > > > > > +                             runs++;
> > > > > > > > +                     }
> > > > > > > >               }
> > > > > > > >       }
> > > > > > > >
> > > > > > > > @@ -483,8 +486,11 @@ static int ut_run_test_live_flat(struct unit_test_state *uts,
> > > > > > > >           (!runs || ut_test_run_on_flattree(test)) &&
> > > > > > > >           !(gd->flags & GD_FLG_FDT_CHANGED)) {
> > > > > > > >               uts->of_live = false;
> > > > > > > > -             ut_assertok(ut_run_test(uts, test, test->name));
> > > > > > > > -             runs++;
> > > > > > > > +             ret = ut_run_test(uts, test, test->name);
> > > > > > > > +             if (ret != -EAGAIN) {
> > > > > > > > +                     ut_assertok(ret);
> > > > > > > > +                     runs++;
> > > > > > > > +             }
> > > > > > > >       }
> > > > > > > >
> > > > > > > >       return 0;
> > > > > > >
> > > > > > > How did you trigger this case exactly?
> > > > > >
> > > > > > I noticed this in CI, where some skipped tests were shown as failed in
> > > > > > the log, even though they were not counted as failures in the final
> > > > > > results.
> > > > >
> > > > > That's really really strange, do you have an example log or something
> > > > > around still?
> > > >
> > > > This happens on snow, which is (maybe) the only real board that
> > > > defines CONFIG_UNIT_TEST
> > > >
> > > > test/py/tests/test_ut.py sssnow # ut bdinfo bdinfo_test_eth
> > > > Test: bdinfo_test_eth: bdinfo.c
> > > > Skipping: Console recording disabled
> > > > test/test-main.c:486, ut_run_test_live_flat(): 0 == ut_run_test(uts,
> > > > test, test->name): Expected 0x0 (0), got 0xfffffff5 (-11)
> > > > Test bdinfo_test_eth failed 1 times
> > > > Skipped: 1, Failures: 1
> > > > snow # F+u-boot-test-reset snow snow
> > > >
> > > > For this particular mechanism (-EAGAIN returned by test_pre_run()) , I
> > > > think a better fix would be to squash the error in ut_run_test(), as
> > > > is done when -EAGAIN is returned in the body of the test. I'll update
> > > > that. I cannot see any other way this could happen, but we can always
> > > > deal with it later if it does.
> > >
> > > No, that doesn't work, since the failure counting happens in the
> > > caller. This patch seems to be the right fix, so I'll send it again
> > > with a better commit message.
> >
> > And I'll try and test it on Pi as with the series I posted and so
> > UNIT_TEST compiles on there, I got what looks like the same failure.
> 
> OK ta.
> 
> It would be good to get the Labgrid series into -next as it fixes
> quite a few problems on snow for me. It will provide a better base for
> solving other problems.

I want to pick some of it for sure. But it also broke my lab support
(before an office power cycle left it in an odd state and I need to go
and confirm what's needed / not needed again to bring the boards up).
diff mbox series

Patch

diff --git a/test/test-main.c b/test/test-main.c
index 3fa6f6e32ec..cda1a186390 100644
--- a/test/test-main.c
+++ b/test/test-main.c
@@ -448,7 +448,7 @@  static int ut_run_test(struct unit_test_state *uts, struct unit_test *test,
 static int ut_run_test_live_flat(struct unit_test_state *uts,
 				 struct unit_test *test)
 {
-	int runs;
+	int runs, ret;
 
 	if ((test->flags & UT_TESTF_OTHER_FDT) && !IS_ENABLED(CONFIG_SANDBOX))
 		return skip_test(uts);
@@ -458,8 +458,11 @@  static int ut_run_test_live_flat(struct unit_test_state *uts,
 	if (CONFIG_IS_ENABLED(OF_LIVE)) {
 		if (!(test->flags & UT_TESTF_FLAT_TREE)) {
 			uts->of_live = true;
-			ut_assertok(ut_run_test(uts, test, test->name));
-			runs++;
+			ret = ut_run_test(uts, test, test->name);
+			if (ret != -EAGAIN) {
+				ut_assertok(ret);
+				runs++;
+			}
 		}
 	}
 
@@ -483,8 +486,11 @@  static int ut_run_test_live_flat(struct unit_test_state *uts,
 	    (!runs || ut_test_run_on_flattree(test)) &&
 	    !(gd->flags & GD_FLG_FDT_CHANGED)) {
 		uts->of_live = false;
-		ut_assertok(ut_run_test(uts, test, test->name));
-		runs++;
+		ret = ut_run_test(uts, test, test->name);
+		if (ret != -EAGAIN) {
+			ut_assertok(ret);
+			runs++;
+		}
 	}
 
 	return 0;