2 * PROJECT: ReactOS Automatic Testing Utility
3 * LICENSE: GPL-2.0+ (https://spdx.org/licenses/GPL-2.0+)
4 * PURPOSE: Class implementing functions for handling Wine tests
5 * COPYRIGHT: Copyright 2009-2019 Colin Finck (colin@reactos.org)
10 static const DWORD ListTimeout
= 10000;
12 // This value needs to be lower than the <timeout> configured in sysreg.xml! (usually 180000)
13 // Otherwise, sysreg2 kills the VM before we can kill the process.
14 static const DWORD ProcessActivityTimeout
= 170000;
18 * Constructs a CWineTest object.
20 CWineTest::CWineTest()
21 : m_hFind(NULL
), m_ListBuffer(NULL
)
23 WCHAR wszDirectory
[MAX_PATH
];
25 /* Set up m_TestPath */
26 if (GetEnvironmentVariableW(L
"ROSAUTOTEST_DIR", wszDirectory
, MAX_PATH
))
28 m_TestPath
= wszDirectory
;
29 if (*m_TestPath
.rbegin() != L
'\\')
34 if (!GetWindowsDirectoryW(wszDirectory
, MAX_PATH
))
35 FATAL("GetWindowsDirectoryW failed\n");
37 m_TestPath
= wszDirectory
;
38 m_TestPath
+= L
"\\bin\\";
43 * Destructs a CWineTest object.
45 CWineTest::~CWineTest()
55 * Gets the next module test file using the FindFirstFileW/FindNextFileW API.
58 * true if we found a next file, otherwise false.
61 CWineTest::GetNextFile()
63 bool FoundFile
= false;
66 /* Did we already begin searching for files? */
69 /* Then get the next file (if any) */
70 if(FindNextFileW(m_hFind
, &fd
))
75 /* Start searching for test files */
76 wstring FindPath
= m_TestPath
;
78 /* Did the user specify a module? */
79 if(Configuration
.GetModule().empty())
81 /* No module, so search for all files in that directory */
86 /* Search for files with the pattern "modulename_*" */
87 FindPath
+= Configuration
.GetModule();
88 FindPath
+= L
"_*.exe";
91 /* Search for the first file and check whether we got one */
92 m_hFind
= FindFirstFileW(FindPath
.c_str(), &fd
);
94 if(m_hFind
!= INVALID_HANDLE_VALUE
)
99 m_CurrentFile
= fd
.cFileName
;
105 * Executes the --list command of a module test file to get information about the available tests.
108 * The number of bytes we read into the m_ListBuffer member variable by capturing the output of the --list command.
111 CWineTest::DoListCommand()
113 DWORD BytesAvailable
;
118 /* Build the command line */
119 CommandLine
= m_TestPath
;
120 CommandLine
+= m_CurrentFile
;
121 CommandLine
+= L
" --list";
124 /* Start the process for getting all available tests */
125 CPipedProcess
Process(CommandLine
, Pipe
);
127 /* Wait till this process ended */
128 if(WaitForSingleObject(Process
.GetProcessHandle(), ListTimeout
) == WAIT_FAILED
)
129 TESTEXCEPTION("WaitForSingleObject failed for the test list\n");
132 /* Read the output data into a buffer */
133 if(!Pipe
.Peek(NULL
, 0, NULL
, &BytesAvailable
))
134 TESTEXCEPTION("CPipe::Peek failed for the test list\n");
136 /* Check if we got any */
141 ss
<< "The --list command did not return any data for " << UnicodeToAscii(m_CurrentFile
) << endl
;
142 TESTEXCEPTION(ss
.str());
146 m_ListBuffer
= new char[BytesAvailable
];
148 if(Pipe
.Read(m_ListBuffer
, BytesAvailable
, &Temp
, INFINITE
) != ERROR_SUCCESS
)
149 TESTEXCEPTION("CPipe::Read failed\n");
151 return BytesAvailable
;
155 * Gets the next test from m_ListBuffer, which was filled with information from the --list command.
158 * true if a next test was found, otherwise false.
161 CWineTest::GetNextTest()
164 static DWORD BufferSize
;
169 /* Perform the --list command */
170 BufferSize
= DoListCommand();
172 /* Move the pointer to the first test */
173 pStart
= strchr(m_ListBuffer
, '\n');
177 /* If we reach the buffer size, we finished analyzing the output of this test */
178 if(pStart
>= (m_ListBuffer
+ BufferSize
))
180 /* Clear m_CurrentFile to indicate that */
181 m_CurrentFile
.clear();
183 /* Also free the memory for the list buffer */
184 delete[] m_ListBuffer
;
190 /* Get start and end of this test name */
196 /* Store the test name */
197 m_CurrentTest
= string(pStart
, pEnd
);
199 /* Move the pointer to the next test */
206 * Interface to CTestList-derived classes for getting all information about the next test to be run.
209 * Returns a pointer to a CTestInfo object containing all available information about the next test.
212 CWineTest::GetNextTestInfo()
214 while(!m_CurrentFile
.empty() || GetNextFile())
220 /* If the user specified a test through the command line, check this here */
221 if(!Configuration
.GetTest().empty() && Configuration
.GetTest() != m_CurrentTest
)
225 auto_ptr
<CTestInfo
> TestInfo(new CTestInfo());
226 size_t UnderscorePosition
;
228 /* Build the command line */
229 TestInfo
->CommandLine
= m_TestPath
;
230 TestInfo
->CommandLine
+= m_CurrentFile
;
231 TestInfo
->CommandLine
+= ' ';
232 TestInfo
->CommandLine
+= AsciiToUnicode(m_CurrentTest
);
234 /* Store the Module name */
235 UnderscorePosition
= m_CurrentFile
.find_last_of('_');
237 if(UnderscorePosition
== m_CurrentFile
.npos
)
241 ss
<< "Invalid test file name: " << UnicodeToAscii(m_CurrentFile
) << endl
;
245 TestInfo
->Module
= UnicodeToAscii(m_CurrentFile
.substr(0, UnderscorePosition
));
248 TestInfo
->Test
= m_CurrentTest
;
250 return TestInfo
.release();
254 catch(CTestException
& e
)
258 ss
<< "An exception occurred trying to list tests for: " << UnicodeToAscii(m_CurrentFile
) << endl
;
260 StringOut(e
.GetMessage());
262 m_CurrentFile
.clear();
263 delete[] m_ListBuffer
;
271 * Runs a Wine test and captures the output
274 * Pointer to a CTestInfo object containing information about the test.
275 * Will contain the test log afterwards if the user wants to submit data.
278 CWineTest::RunTest(CTestInfo
* TestInfo
)
280 DWORD BytesAvailable
;
281 stringstream ss
, ssFinish
;
288 ss
<< "Running Wine Test, Module: " << TestInfo
->Module
<< ", Test: " << TestInfo
->Test
<< endl
;
291 StartTime
= GetTickCount();
295 /* Execute the test */
296 CPipedProcess
Process(TestInfo
->CommandLine
, Pipe
);
298 /* Receive all the data from the pipe */
301 DWORD dwReadResult
= Pipe
.Read(Buffer
, sizeof(Buffer
) - 1, &BytesAvailable
, ProcessActivityTimeout
);
302 if (dwReadResult
== ERROR_SUCCESS
)
304 /* Output text through StringOut, even while the test is still running */
305 Buffer
[BytesAvailable
] = 0;
306 tailString
= StringOut(tailString
.append(string(Buffer
)), false);
308 if (Configuration
.DoSubmit())
309 TestInfo
->Log
+= Buffer
;
311 else if (dwReadResult
== ERROR_BROKEN_PIPE
)
313 // The process finished and has been terminated.
316 else if (dwReadResult
== WAIT_TIMEOUT
)
318 // The process activity timeout above has elapsed without any new data.
319 TESTEXCEPTION("Timeout while waiting for the test process\n");
323 // An unexpected error.
324 TESTEXCEPTION("CPipe::Read failed for the test run\n");
328 catch(CTestException
& e
)
330 if(!tailString
.empty())
331 StringOut(tailString
);
333 StringOut(e
.GetMessage());
334 TestInfo
->Log
+= e
.GetMessage();
337 /* Print what's left */
338 if(!tailString
.empty())
339 StringOut(tailString
);
341 TotalTime
= ((float)GetTickCount() - StartTime
)/1000;
342 ssFinish
<< "Test " << TestInfo
->Test
<< " completed in ";
343 ssFinish
<< setprecision(2) << fixed
<< TotalTime
<< " seconds." << endl
;
344 StringOut(ssFinish
.str());
345 TestInfo
->Log
+= ssFinish
.str();
349 * Interface to other classes for running all desired Wine tests.
354 auto_ptr
<CTestList
> TestList
;
355 auto_ptr
<CWebService
> WebService
;
359 /* The virtual test list is of course faster, so it should be preferred over
361 Enable the journaled one only in case ...
362 - we're running under ReactOS (as the journal is only useful in conjunction with sysreg2)
363 - we shall keep information for Crash Recovery
364 - and the user didn't specify a module (then doing Crash Recovery doesn't really make sense) */
365 if(Configuration
.IsReactOS() && Configuration
.DoCrashRecovery() && Configuration
.GetModule().empty())
367 /* Use a test list with a permanent journal */
368 TestList
.reset(new CJournaledTestList(this));
372 /* Use the fast virtual test list with no additional overhead */
373 TestList
.reset(new CVirtualTestList(this));
376 /* Initialize the Web Service interface if required */
377 if(Configuration
.DoSubmit())
378 WebService
.reset(new CWebService());
380 /* Disable error dialogs if we're running in non-interactive mode */
381 if(!Configuration
.IsInteractive())
382 ErrorMode
= SetErrorMode(SEM_FAILCRITICALERRORS
| SEM_NOGPFAULTERRORBOX
);
384 /* Get information for each test to run */
385 while((TestInfo
= TestList
->GetNextTestInfo()) != 0)
387 auto_ptr
<CTestInfo
> TestInfoPtr(TestInfo
);
391 if(Configuration
.DoSubmit() && !TestInfo
->Log
.empty())
392 WebService
->Submit("wine", TestInfo
);
397 /* We're done with all tests. Finish this run */
398 if(Configuration
.DoSubmit())
399 WebService
->Finish("wine");
401 /* Restore the original error mode */
402 if(!Configuration
.IsInteractive())
403 SetErrorMode(ErrorMode
);