diff --git a/Common/Product/SharedProject/ProcessOutput.cs b/Common/Product/SharedProject/ProcessOutput.cs
index 884b5c5e9..e70dd944a 100644
--- a/Common/Product/SharedProject/ProcessOutput.cs
+++ b/Common/Product/SharedProject/ProcessOutput.cs
@@ -58,6 +58,15 @@ public virtual void Show() {
///
public virtual void ShowAndActivate() {
}
+
+ ///
+ /// Called to determine if stdin should be closed for a redirected process.
+ /// The default is true.
+ ///
+ public virtual bool CloseStandardInput()
+ {
+ return true;
+ }
}
sealed class TeeRedirector : Redirector, IDisposable {
@@ -304,7 +313,7 @@ public static ProcessOutput RunElevated(
return result;
}
- private static string GetArguments(IEnumerable arguments, bool quoteArgs) {
+ public static string GetArguments(IEnumerable arguments, bool quoteArgs) {
if (quoteArgs) {
return string.Join(" ", arguments.Where(a => a != null).Select(QuoteSingleArgument));
} else {
@@ -335,7 +344,7 @@ internal static IEnumerable SplitLines(string source) {
}
}
- internal static string QuoteSingleArgument(string arg) {
+ public static string QuoteSingleArgument(string arg) {
if (string.IsNullOrEmpty(arg)) {
return "\"\"";
}
@@ -423,10 +432,16 @@ private ProcessOutput(Process process, Redirector redirector) {
if (_process.StartInfo.RedirectStandardInput) {
// Close standard input so that we don't get stuck trying to read input from the user.
- try {
- _process.StandardInput.Close();
- } catch (InvalidOperationException) {
- // StandardInput not available
+ if (_redirector == null || (_redirector != null && _redirector.CloseStandardInput()))
+ {
+ try
+ {
+ _process.StandardInput.Close();
+ }
+ catch (InvalidOperationException)
+ {
+ // StandardInput not available
+ }
}
}
}
@@ -557,6 +572,20 @@ public Redirector Redirector {
get { return _redirector; }
}
+ ///
+ /// Writes a line to stdin. A redirector must have been provided that indicates not
+ /// to close the StandardInput stream.
+ ///
+ ///
+ public void WriteInputLine(string line)
+ {
+ if (IsStarted && _redirector != null && !_redirector.CloseStandardInput())
+ {
+ _process.StandardInput.WriteLine(line);
+ _process.StandardInput.Flush();
+ }
+ }
+
private void FlushAndCloseOutput() {
if (_process == null) {
return;
diff --git a/Common/Product/TestAdapter/VisualStudioApp.cs b/Common/Product/TestAdapter/VisualStudioApp.cs
index 34f381b08..c932162a1 100644
--- a/Common/Product/TestAdapter/VisualStudioApp.cs
+++ b/Common/Product/TestAdapter/VisualStudioApp.cs
@@ -15,7 +15,9 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
+using System.IO;
using System.Linq;
+using System.Reflection;
using System.Runtime.InteropServices;
using EnvDTE;
using Microsoft.VisualStudio.OLE.Interop;
@@ -75,7 +77,35 @@ public DTE GetDTE() {
return dte;
}
+#if DEV15
+ private static bool DTELoaded = false;
+#endif
+
private static DTE GetDTE(int processId) {
+#if DEV15
+ // VS 2017 doesn't install some assemblies to the GAC that are needed to work with the
+ // debugger, and as the tests don't execute in the devenv.exe process, those assemblies
+ // fail to load - so load them manually from PublicAssemblies.
+
+ // Use the executable name, as this is only needed for the out of proc test execution
+ // that may interact with the debugger (vstest.executionengine.x86.exe).
+ if (!DTELoaded)
+ {
+ string currentProc = Process.GetCurrentProcess().MainModule.FileName;
+ if (StringComparer.OrdinalIgnoreCase.Equals(
+ Path.GetFileName(currentProc), "vstest.executionengine.x86.exe"))
+ {
+ string baseDir = Path.GetDirectoryName(currentProc);
+ string publicAssemblies = Path.Combine(baseDir, "..\\..\\..\\PublicAssemblies");
+
+ Assembly.LoadFrom(Path.Combine(publicAssemblies, "Microsoft.VisualStudio.OLE.Interop.dll"));
+ Assembly.LoadFrom(Path.Combine(publicAssemblies, "envdte90.dll"));
+ Assembly.LoadFrom(Path.Combine(publicAssemblies, "envdte80.dll"));
+ Assembly.LoadFrom(Path.Combine(publicAssemblies, "envdte.dll"));
+ }
+ DTELoaded = true;
+ }
+#endif
MessageFilter.Register();
var prefix = Process.GetProcessById(processId).ProcessName;
diff --git a/Nodejs/Product/Nodejs/TestFrameworks/ExportRunner/exportrunner.js b/Nodejs/Product/Nodejs/TestFrameworks/ExportRunner/exportrunner.js
index 88dc6b431..42cbaa368 100644
--- a/Nodejs/Product/Nodejs/TestFrameworks/ExportRunner/exportrunner.js
+++ b/Nodejs/Product/Nodejs/TestFrameworks/ExportRunner/exportrunner.js
@@ -1,6 +1,26 @@
var fs = require('fs');
var path = require('path');
var vm = require('vm');
+var result = {
+ 'title': '',
+ 'passed': false,
+ 'stdOut': '',
+ 'stdErr': ''
+};
+
+function append_stdout(string, encoding, fd) {
+ result.stdOut += string;
+}
+function append_stderr(string, encoding, fd) {
+ result.stdErr += string;
+}
+function hook_outputs() {
+ process.stdout.write = append_stdout;
+ process.stderr.write = append_stderr;
+}
+
+
+hook_outputs();
var find_tests = function (testFileList, discoverResultFile) {
var debug;
@@ -50,8 +70,43 @@ var find_tests = function (testFileList, discoverResultFile) {
};
module.exports.find_tests = find_tests;
-var run_tests = function (testName, testFile) {
- var testCase = require(testFile);
- testCase[testName]();
+var run_tests = function (testCases, callback) {
+ function post(event) {
+ callback(event);
+ hook_outputs();
+ }
+
+ for (var test of testCases) {
+ post({
+ type: 'test start',
+ title: test.testName
+ });
+ try {
+ var testCase = require(test.testFile);
+ result.title = test.testName;
+ testCase[test.testName]();
+ result.passed = true;
+ } catch (err) {
+ result.passed = false;
+ console.error(err.name);
+ console.error(err.message);
+ }
+ post({
+ type: 'result',
+ title: test.testName,
+ result: result
+ });
+ result = {
+ 'title': '',
+ 'passed': false,
+ 'stdOut': '',
+ 'stdErr': ''
+ };
+ }
+ callback({
+ type: 'suite end',
+ result: result
+ });
+ process.exit();
};
module.exports.run_tests = run_tests;
\ No newline at end of file
diff --git a/Nodejs/Product/Nodejs/TestFrameworks/Tape/tape.js b/Nodejs/Product/Nodejs/TestFrameworks/Tape/tape.js
index 2bfec5671..f60f9c79c 100644
--- a/Nodejs/Product/Nodejs/TestFrameworks/Tape/tape.js
+++ b/Nodejs/Product/Nodejs/TestFrameworks/Tape/tape.js
@@ -2,6 +2,20 @@
var EOL = require('os').EOL;
var fs = require('fs');
var path = require('path');
+var result = {
+ 'title': '',
+ 'passed': false,
+ 'stdOut': '',
+ 'stdErr': ''
+};
+
+function append_stdout(string, encoding, fd) {
+ result.stdOut += string;
+}
+
+function append_stderr(string, encoding, fd) {
+ result.stdErr += string;
+}
function find_tests(testFileList, discoverResultFile, projectFolder) {
var test = findTape(projectFolder);
@@ -10,7 +24,7 @@ function find_tests(testFileList, discoverResultFile, projectFolder) {
}
var harness = test.getHarness({ exit: false });
- var tests = harness["_tests"];
+ var tests = harness['_tests'];
var count = 0;
var testList = [];
@@ -37,24 +51,78 @@ function find_tests(testFileList, discoverResultFile, projectFolder) {
};
module.exports.find_tests = find_tests;
-function run_tests(testName, testFile, workingFolder, projectFolder) {
- var testCases = loadTestCases(testFile);
- if (testCases === null) {
+function run_tests(testInfo, callback) {
+ var tape = findTape(testInfo[0].projectFolder);
+ if (tape === null) {
return;
}
- var test = findTape(projectFolder);
- if (test === null) {
- return;
- }
+ var harness = tape.getHarness({objectMode: true});
+ var capture = false; // Only capture between 'test' and 'end' events to avoid skipped test events.
+ harness.createStream({ objectMode: true }).on('data', function (evt){
+ switch (evt.type) {
+ case 'test':
+ capture = true;
+ // Test is starting. Reset the result object. Send a "test start" event.
+ result = {
+ 'title': evt.name,
+ 'passed': true,
+ 'stdOut': '',
+ 'stdErr': ''
+ };
+ callback({
+ 'type': 'test start',
+ 'title': result.title,
+ 'result': result
+ });
+ break;
+ case 'assert':
+ if (!capture) break;
+ // Correlate the success/failure asserts for this test. There may be multiple per test
+ var msg = "Operator: " + evt.operator + ". Expected: " + evt.expected + ". Actual: " + evt.actual + "\n";
+ if (evt.ok) {
+ result.stdOut += msg;
+ } else {
+ result.stdErr += msg + (evt.error.stack || evt.error.message) + "\n";
+ result.passed = false;
+ }
+ break;
+ case 'end':
+ if (!capture) break;
+ // Test is done. Send a "result" event.
+ callback({
+ 'type': 'result',
+ 'title': result.title,
+ 'result': result
+ });
+ capture = false;
+ break;
+ default:
+ break;
+ }
+ });
- try {
- var harness = test.getHarness();
- harness.only(testName);
- } catch (e) {
- logError("Error running test:", testName, "in", testFile, e);
- return;
- }
+ loadTestCases(testInfo[0].testFile);
+
+ // Skip those not selected to run. The rest will start running on the next tick.
+ harness['_tests'].forEach(function(test){
+ if( !testInfo.some( function(ti){ return ti.testName == test.name; }) ) {
+ test._skip = true;
+ }
+ });
+
+ harness.onFinish(function () {
+ if (capture) {
+ // Something didn't finish. Finish it now.
+ result.passed = false;
+ callback({
+ 'type': 'result',
+ 'title': result.title,
+ 'result': result
+ });
+ }
+ process.exit(0);
+ });
}
module.exports.run_tests = run_tests;
diff --git a/Nodejs/Product/Nodejs/TestFrameworks/mocha/mocha.js b/Nodejs/Product/Nodejs/TestFrameworks/mocha/mocha.js
index d858fc0ef..9a6c4cf7f 100644
--- a/Nodejs/Product/Nodejs/TestFrameworks/mocha/mocha.js
+++ b/Nodejs/Product/Nodejs/TestFrameworks/mocha/mocha.js
@@ -2,11 +2,28 @@
var EOL = require('os').EOL;
var fs = require('fs');
var path = require('path');
-
+var result = {
+ 'title': '',
+ 'passed': false,
+ 'stdOut': '',
+ 'stdErr': ''
+};
// Choose 'tap' rather than 'min' or 'xunit'. The reason is that
// 'min' produces undisplayable text to stdout and stderr under piped/redirect,
// and 'xunit' does not print the stack trace from the test.
var defaultMochaOptions = { ui: 'tdd', reporter: 'tap', timeout: 2000 };
+function append_stdout(string, encoding, fd) {
+ result.stdOut += string;
+}
+function append_stderr(string, encoding, fd) {
+ result.stdErr += string;
+}
+function hook_outputs() {
+ process.stdout.write = append_stdout;
+ process.stderr.write = append_stderr;
+}
+
+hook_outputs();
var find_tests = function (testFileList, discoverResultFile, projectFolder) {
var Mocha = detectMocha(projectFolder);
@@ -56,25 +73,119 @@ var find_tests = function (testFileList, discoverResultFile, projectFolder) {
};
module.exports.find_tests = find_tests;
-var run_tests = function (testName, testFile, workingFolder, projectFolder) {
- var Mocha = detectMocha(projectFolder);
+var run_tests = function (testCases, callback) {
+ function post(event) {
+ callback(event);
+ hook_outputs();
+ }
+
+ function escapeRegExp(string) {
+ return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string
+ }
+
+ var testResults = [];
+ var Mocha = detectMocha(testCases[0].projectFolder);
if (!Mocha) {
return;
}
- var mocha = initializeMocha(Mocha, projectFolder);
+ var mocha = initializeMocha(Mocha, testCases[0].projectFolder);
+
+ var testGrepString = '^(' + testCases.map(function (testCase) {
+ return escapeRegExp(testCase.testName);
+ }).join('|') + ')$';
- if (testName) {
- if (typeof mocha.fgrep === 'function')
- mocha.fgrep(testName); // since Mocha 3.0.0
- else
- mocha.grep(testName); // prior Mocha 3.0.0
+ if (testGrepString) {
+ mocha.grep(new RegExp(testGrepString));
}
- mocha.addFile(testFile);
+ mocha.addFile(testCases[0].testFile);
- mocha.run(function (code) {
+ var runner = mocha.run(function (code) {
process.exit(code);
});
+
+ // See events available at https://github.com/mochajs/mocha/blob/8cae7a34f0b6eafeb16567beb8852b827cc5956b/lib/runner.js#L47-L57
+ runner.on('suite', function (suite) {
+ post({
+ type: 'suite start',
+ result: result
+ });
+ });
+
+ runner.on('suite end', function (suite) {
+ post({
+ type: 'suite end',
+ result: result
+ });
+ });
+
+ runner.on('hook', function (hook) {
+ post({
+ type: 'hook start',
+ title: hook.title,
+ result: result
+ });
+ });
+
+ runner.on('hook end', function (hook) {
+ post({
+ type: 'hook end',
+ title: hook.title,
+ result: result
+ });
+ });
+
+ runner.on('start', function () {
+ post({
+ type: 'start',
+ result: result
+ });
+ });
+
+ runner.on('test', function (test) {
+ result.title = test.fullTitle();
+ post({
+ type: 'test start',
+ title: result.title
+ });
+ });
+
+ runner.on('end', function () {
+ post({
+ type: 'end',
+ result: result
+ });
+ });
+
+ runner.on('pass', function (test) {
+ result.passed = true;
+ post({
+ type: 'result',
+ title: result.title,
+ result: result
+ });
+ result = {
+ 'title': '',
+ 'passed': false,
+ 'stdOut': '',
+ 'stdErr': ''
+ }
+ });
+
+ runner.on('fail', function (test, err) {
+ result.passed = false;
+ post({
+ type: 'result',
+ title: result.title,
+ result: result
+ });
+ result = {
+ 'title': '',
+ 'passed': false,
+ 'stdOut': '',
+ 'stdErr': ''
+ }
+ });
};
function logError() {
diff --git a/Nodejs/Product/Nodejs/TestFrameworks/run_tests.js b/Nodejs/Product/Nodejs/TestFrameworks/run_tests.js
index 3765fd252..3cae0bbc5 100644
--- a/Nodejs/Product/Nodejs/TestFrameworks/run_tests.js
+++ b/Nodejs/Product/Nodejs/TestFrameworks/run_tests.js
@@ -1,10 +1,37 @@
var framework;
-try {
- framework = require('./' + process.argv[2] + '/' + process.argv[2] + '.js');
-} catch (exception) {
- console.log("NTVS_ERROR:Failed to load TestFramework (" + process.argv[2] + "), " + exception);
- process.exit(1);
-}
+var readline = require('readline');
+var old_stdout = process.stdout.write;
+var old_stderr = process.stderr.write;
+var rl = readline.createInterface({
+ input: process.stdin,
+ output: process.stdout
+});
-framework.run_tests(process.argv[3], process.argv[4], process.argv[5], process.argv[6]);
+rl.on('line', (line) => {
+ rl.close();
+ var testCases = JSON.parse(line);
+ // get rid of leftover quotations from C# (necessary?)
+ for (var test in testCases) {
+ for (var value in testCases[test]) {
+ testCases[test][value] = testCases[test][value].replace(/["]+/g, '');
+ }
+ }
+ try {
+ framework = require('./' + testCases[0].framework + '/' + testCases[0].framework + '.js');
+ } catch (exception) {
+ console.log("NTVS_ERROR:Failed to load TestFramework (" + testCases[0].framework + "), " + exception);
+ process.exit(1);
+ }
+
+ function postResult(result) {
+ // unhook stdout and stderr
+ process.stdout.write = old_stdout;
+ process.stderr.write = old_stderr;
+ if (result) {
+ console.log(JSON.stringify(result));
+ }
+ }
+ // run the test
+ framework.run_tests(testCases, postResult);
+});
diff --git a/Nodejs/Product/TestAdapter/TestExecutor.cs b/Nodejs/Product/TestAdapter/TestExecutor.cs
index 67bd721d0..2e6a2070d 100644
--- a/Nodejs/Product/TestAdapter/TestExecutor.cs
+++ b/Nodejs/Product/TestAdapter/TestExecutor.cs
@@ -32,9 +32,32 @@
using Microsoft.VisualStudioTools;
using Microsoft.VisualStudioTools.Project;
using MSBuild = Microsoft.Build.Evaluation;
+using Newtonsoft.Json;
+
+namespace Microsoft.NodejsTools.TestAdapter {
+ class TestExecutionRedirector : Redirector
+ {
+ Action writer;
+ public TestExecutionRedirector(Action onWriteLine)
+ {
+ writer = onWriteLine;
+ }
+ public override void WriteErrorLine(string line)
+ {
+ writer(line);
+ }
+
+ public override void WriteLine(string line)
+ {
+ writer(line);
+ }
+
+ public override bool CloseStandardInput()
+ {
+ return false;
+ }
+ }
-namespace Microsoft.NodejsTools.TestAdapter
-{
[ExtensionUri(TestExecutor.ExecutorUriString)]
class TestExecutor : ITestExecutor {
public const string ExecutorUriString = "executor://NodejsTestExecutor/v1";
@@ -44,8 +67,13 @@ class TestExecutor : ITestExecutor {
private readonly ManualResetEvent _cancelRequested = new ManualResetEvent(false);
+ private static readonly char[] _needToBeQuoted = new[] { ' ', '"' };
private ProcessOutput _nodeProcess;
private object _syncObject = new object();
+ private List _currentTests;
+ private IFrameworkHandle _frameworkHandle;
+ private TestResult _currentResult = null;
+ private ResultObject _currentResultObject = null;
public void Cancel() {
//let us just kill the node process there, rather do it late, because VS engine process
@@ -54,8 +82,38 @@ public void Cancel() {
_cancelRequested.Set();
}
+ private void ProcessTestRunnerEmit(string line)
+ {
+ try
+ {
+ TestEvent testEvent = JsonConvert.DeserializeObject(line);
+ // Extract test from list of tests
+ var test = _currentTests.Where(n => n.DisplayName == testEvent.title);
+ if (test.Count() > 0)
+ {
+ if (testEvent.type == "test start")
+ {
+ _currentResult = new TestResult(test.First());
+ _currentResult.StartTime = DateTimeOffset.Now;
+ _frameworkHandle.RecordStart(test.First());
+ }
+ else if (testEvent.type == "result")
+ {
+ RecordEnd(_frameworkHandle, test.First(), _currentResult, testEvent.result);
+ }
+ }
+ else if (testEvent.type == "suite end")
+ {
+ _currentResultObject = testEvent.result;
+ }
+ }
+ catch (JsonReaderException) {
+ // Often lines emitted while running tests are not test results, and thus will fail to parse above
+ }
+ }
+
///
- /// This is the equivallent of "RunAll" functionality
+ /// This is the equivalent of "RunAll" functionality
///
/// Refers to the list of test sources passed to the test adapter from the client. (Client could be VS or command line)
/// Defines the settings related to the current run
@@ -75,148 +133,124 @@ public void RunTests(IEnumerable sources, IRunContext runContext, IFrame
return;
}
- RunTestCases(receiver.Tests, runContext, frameworkHandle);
+ RunTests(receiver.Tests, runContext, frameworkHandle);
}
- public void RunTests(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle) {
+ ///
+ /// This is the equivalent of "Run Selected Tests" functionality.
+ ///
+ /// The list of TestCases selected to run
+ /// Defines the settings related to the current run
+ /// Handle to framework. Used for recording results
+ public void RunTests(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle)
+ {
ValidateArg.NotNull(tests, "tests");
ValidateArg.NotNull(runContext, "runContext");
ValidateArg.NotNull(frameworkHandle, "frameworkHandle");
-
_cancelRequested.Reset();
- RunTestCases(tests, runContext, frameworkHandle);
+ // .ts file path -> project settings
+ var fileToTests = new Dictionary>();
+ var sourceToSettings = new Dictionary();
+ NodejsProjectSettings projectSettings = null;
+
+ // put tests into dictionary where key is their source file
+ foreach (var test in tests)
+ {
+ if (!fileToTests.ContainsKey(test.CodeFilePath))
+ {
+ fileToTests[test.CodeFilePath] = new List();
+ }
+ fileToTests[test.CodeFilePath].Add(test);
+ }
+
+ // where key is the file and value is a list of tests
+ foreach (KeyValuePair> entry in fileToTests)
+ {
+ TestCase firstTest = entry.Value.ElementAt(0);
+ if (!sourceToSettings.TryGetValue(firstTest.Source, out projectSettings))
+ {
+ sourceToSettings[firstTest.Source] = projectSettings = LoadProjectSettings(firstTest.Source);
+ }
+
+ _currentTests = entry.Value;
+ _frameworkHandle = frameworkHandle;
+
+ // Run all test cases in a given file
+ RunTestCases(entry.Value, runContext, frameworkHandle, projectSettings);
+ }
}
- private void RunTestCases(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle) {
+ private void RunTestCases(IEnumerable tests, IRunContext runContext, IFrameworkHandle frameworkHandle, NodejsProjectSettings settings) {
// May be null, but this is handled by RunTestCase if it matters.
// No VS instance just means no debugging, but everything else is
// okay.
+ if (tests.Count() == 0)
+ {
+ return;
+ }
using (var app = VisualStudioApp.FromEnvironmentVariable(NodejsConstants.NodeToolsProcessIdEnvironmentVariable)) {
+ int port = 0;
+ List nodeArgs = new List();
// .njsproj file path -> project settings
var sourceToSettings = new Dictionary();
+ List testObjects = new List();
+
+ if (!File.Exists(settings.NodeExePath))
+ {
+ frameworkHandle.SendMessage(TestMessageLevel.Error, "Interpreter path does not exist: " + settings.NodeExePath);
+ return;
+ }
+
+ // All tests being run are for the same test file, so just use the first test listed to get the working dir
+ NodejsTestInfo testInfo = new NodejsTestInfo(tests.First().FullyQualifiedName);
+ var workingDir = Path.GetDirectoryName(CommonUtils.GetAbsoluteFilePath(settings.WorkingDir, testInfo.ModulePath));
foreach (var test in tests) {
if (_cancelRequested.WaitOne(0)) {
break;
}
- try {
- RunTestCase(app, frameworkHandle, runContext, test, sourceToSettings);
- } catch (Exception ex) {
- frameworkHandle.SendMessage(TestMessageLevel.Error, ex.ToString());
+ if (settings == null) {
+ frameworkHandle.SendMessage(
+ TestMessageLevel.Error,
+ "Unable to determine interpreter to use for " + test.Source);
+ frameworkHandle.RecordEnd(test, TestOutcome.Failed);
}
- }
- }
- }
- private void KillNodeProcess() {
- lock (_syncObject) {
- if (_nodeProcess != null) {
- _nodeProcess.Kill();
- }
- }
- }
- private static int GetFreePort() {
- return Enumerable.Range(new Random().Next(49152, 65536), 60000).Except(
- from connection in IPGlobalProperties.GetIPGlobalProperties().GetActiveTcpConnections()
- select connection.LocalEndPoint.Port
- ).First();
- }
+ List args = new List();
+ args.AddRange(GetInterpreterArgs(test, workingDir, settings.ProjectRootDir));
- private IEnumerable GetInterpreterArgs(TestCase test, string workingDir, string projectRootDir) {
- TestFrameworks.NodejsTestInfo testInfo = new TestFrameworks.NodejsTestInfo(test.FullyQualifiedName);
- TestFrameworks.FrameworkDiscover discover = new TestFrameworks.FrameworkDiscover();
- return discover.Get(testInfo.TestFramework).ArgumentsToRunTests(testInfo.TestName, testInfo.ModulePath, workingDir, projectRootDir);
- }
-
- private static IEnumerable GetDebugArgs(NodejsProjectSettings settings, out int port) {
- port = GetFreePort();
-
- return new[] {
- "--debug-brk=" + port.ToString()
- };
- }
-
- private void RunTestCase(VisualStudioApp app, IFrameworkHandle frameworkHandle, IRunContext runContext, TestCase test, Dictionary sourceToSettings) {
- var testResult = new TestResult(test);
- frameworkHandle.RecordStart(test);
- testResult.StartTime = DateTimeOffset.Now;
- NodejsProjectSettings settings;
- if (!sourceToSettings.TryGetValue(test.Source, out settings)) {
- sourceToSettings[test.Source] = settings = LoadProjectSettings(test.Source);
- }
- if (settings == null) {
- frameworkHandle.SendMessage(
- TestMessageLevel.Error,
- "Unable to determine interpreter to use for " + test.Source);
- RecordEnd(
- frameworkHandle,
- test,
- testResult,
- null,
- "Unable to determine interpreter to use for " + test.Source,
- TestOutcome.Failed);
- return;
- }
-
-#if DEV15
- // VS 2017 doesn't install some assemblies to the GAC that are needed to work with the
- // debugger, and as the tests don't execute in the devenv.exe process, those assemblies
- // fail to load - so load them manually from PublicAssemblies.
-
- // Use the executable name, as this is only needed for the out of proc test execution
- // that may interact with the debugger (vstest.executionengine.x86.exe).
- string currentProc = Process.GetCurrentProcess().MainModule.FileName;
- if(Path.GetFileName(currentProc).ToLowerInvariant().Equals("vstest.executionengine.x86.exe"))
- {
- string baseDir = Path.GetDirectoryName(currentProc);
- string publicAssemblies = Path.Combine(baseDir, "..\\..\\..\\PublicAssemblies");
-
- Assembly.LoadFrom(Path.Combine(publicAssemblies, "Microsoft.VisualStudio.OLE.Interop.dll"));
- Assembly.LoadFrom(Path.Combine(publicAssemblies, "envdte90.dll"));
- Assembly.LoadFrom(Path.Combine(publicAssemblies, "envdte80.dll"));
- Assembly.LoadFrom(Path.Combine(publicAssemblies, "envdte.dll"));
- }
-#endif
+ // Fetch the run_tests argument for starting node.exe if not specified yet
+ if(nodeArgs.Count == 0 && args.Count > 0)
+ {
+ nodeArgs.Add(args[0]);
+ }
- NodejsTestInfo testInfo = new NodejsTestInfo(test.FullyQualifiedName);
- List args = new List();
- int port = 0;
- if (runContext.IsBeingDebugged && app != null) {
- app.GetDTE().Debugger.DetachAll();
- args.AddRange(GetDebugArgs(settings, out port));
- }
+ testObjects.Add(new TestCaseObject(args[1], args[2], args[3], args[4], args[5]));
+ }
- var workingDir = Path.GetDirectoryName(CommonUtils.GetAbsoluteFilePath(settings.WorkingDir, testInfo.ModulePath));
- args.AddRange(GetInterpreterArgs(test, workingDir, settings.ProjectRootDir));
+ if (runContext.IsBeingDebugged && app != null)
+ {
+ app.GetDTE().Debugger.DetachAll();
+ // Ensure that --debug-brk is the first argument
+ nodeArgs.InsertRange(0, GetDebugArgs(out port));
+ }
- //Debug.Fail("attach debugger");
- if (!File.Exists(settings.NodeExePath)) {
- frameworkHandle.SendMessage(TestMessageLevel.Error, "Interpreter path does not exist: " + settings.NodeExePath);
- return;
- }
- lock (_syncObject) {
_nodeProcess = ProcessOutput.Run(
- settings.NodeExePath,
- args,
- workingDir,
- null,
- false,
- null,
- false);
+ settings.NodeExePath,
+ nodeArgs,
+ settings.WorkingDir,
+ /* env */ null,
+ /* visible */ false,
+ /* redirector */ new TestExecutionRedirector(this.ProcessTestRunnerEmit),
+ /* quote args */ false);
-#if DEBUG
- frameworkHandle.SendMessage(TestMessageLevel.Informational, "cd " + workingDir);
- frameworkHandle.SendMessage(TestMessageLevel.Informational, _nodeProcess.Arguments);
-#endif
-
- _nodeProcess.Wait(TimeSpan.FromMilliseconds(500));
if (runContext.IsBeingDebugged && app != null) {
try {
//the '#ping=0' is a special flag to tell VS node debugger not to connect to the port,
//because a connection carries the consequence of setting off --debug-brk, and breakpoints will be missed.
- string qualifierUri = string.Format(CultureInfo.InvariantCulture, "tcp://localhost:{0}#ping=0", port);
+ string qualifierUri = string.Format("tcp://localhost:{0}#ping=0", port);
while (!app.AttachToProcess(_nodeProcess, NodejsRemoteDebugPortSupplierUnsecuredId, qualifierUri)) {
if (_nodeProcess.Wait(TimeSpan.FromMilliseconds(500))) {
break;
@@ -235,16 +269,52 @@ private void RunTestCase(VisualStudioApp app, IFrameworkHandle frameworkHandle,
}
#endif
}
+ // Send the process the list of tests to run and wait for it to complete
+ _nodeProcess.WriteInputLine(JsonConvert.SerializeObject(testObjects));
+ _nodeProcess.Wait();
+
+ // Automatically fail tests that haven't been run by this point (failures in before() hooks)
+ foreach(TestCase notRunTest in _currentTests) {
+ TestResult result = new TestResult(notRunTest);
+ result.Outcome = TestOutcome.Failed;
+ if(_currentResultObject != null) {
+ result.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, _currentResultObject.stdout));
+ result.Messages.Add(new TestResultMessage(TestResultMessage.StandardErrorCategory, _currentResultObject.stderr));
+ }
+ frameworkHandle.RecordResult(result);
+ frameworkHandle.RecordEnd(notRunTest, TestOutcome.Failed);
+ }
}
+ }
- WaitHandle.WaitAll(new WaitHandle[] { _nodeProcess.WaitHandle });
+ private void KillNodeProcess() {
+ lock (_syncObject) {
+ if (_nodeProcess != null) {
+ _nodeProcess.Kill();
+ }
+ }
+ }
- bool runCancelled = _cancelRequested.WaitOne(0);
- RecordEnd(frameworkHandle, test, testResult,
- string.Join(Environment.NewLine, _nodeProcess.StandardOutputLines),
- string.Join(Environment.NewLine, _nodeProcess.StandardErrorLines),
- (!runCancelled && _nodeProcess.ExitCode == 0) ? TestOutcome.Passed : TestOutcome.Failed);
- _nodeProcess.Dispose();
+ private static int GetFreePort() {
+ return Enumerable.Range(new Random().Next(49152, 65536), 60000).Except(
+ from connection in IPGlobalProperties.GetIPGlobalProperties().GetActiveTcpConnections()
+ select connection.LocalEndPoint.Port
+ ).First();
+ }
+
+ private IEnumerable GetInterpreterArgs(TestCase test, string workingDir, string projectRootDir) {
+ TestFrameworks.NodejsTestInfo testInfo = new TestFrameworks.NodejsTestInfo(test.FullyQualifiedName);
+ TestFrameworks.FrameworkDiscover discover = new TestFrameworks.FrameworkDiscover();
+ return discover.Get(testInfo.TestFramework).ArgumentsToRunTests(testInfo.TestName, testInfo.ModulePath, workingDir, projectRootDir);
+ }
+
+ private static IEnumerable GetDebugArgs(out int port) {
+ port = GetFreePort();
+
+ // TODO: Need to use --inspect-brk on Node.js 8 or later
+ return new[] {
+ "--debug-brk=" + port.ToString()
+ };
}
private NodejsProjectSettings LoadProjectSettings(string projectFile) {
@@ -272,51 +342,95 @@ private NodejsProjectSettings LoadProjectSettings(string projectFile) {
};
}
- private static void RecordEnd(IFrameworkHandle frameworkHandle, TestCase test, TestResult result, string stdout, string stderr, TestOutcome outcome) {
+ private void RecordEnd(IFrameworkHandle frameworkHandle, TestCase test, TestResult result, ResultObject resultObject) {
+ String[] standardOutputLines = resultObject.stdout.Split('\n');
+ String[] standardErrorLines = resultObject.stderr.Split('\n');
result.EndTime = DateTimeOffset.Now;
result.Duration = result.EndTime - result.StartTime;
- result.Outcome = outcome;
- result.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, stdout));
- result.Messages.Add(new TestResultMessage(TestResultMessage.StandardErrorCategory, stderr));
- result.Messages.Add(new TestResultMessage(TestResultMessage.AdditionalInfoCategory, stderr));
-
+ result.Outcome = resultObject.passed ? TestOutcome.Passed : TestOutcome.Failed;
+ result.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, String.Join(Environment.NewLine, standardOutputLines)));
+ result.Messages.Add(new TestResultMessage(TestResultMessage.StandardErrorCategory, String.Join(Environment.NewLine, standardErrorLines)));
+ result.Messages.Add(new TestResultMessage(TestResultMessage.AdditionalInfoCategory, String.Join(Environment.NewLine, standardErrorLines)));
frameworkHandle.RecordResult(result);
- frameworkHandle.RecordEnd(test, outcome);
+ frameworkHandle.RecordEnd(test, result.Outcome);
+ _currentTests.Remove(test);
}
+ }
+}
- class DataReceiver {
- public readonly StringBuilder Data = new StringBuilder();
+class DataReceiver {
+ public readonly StringBuilder Data = new StringBuilder();
- public void DataReceived(object sender, DataReceivedEventArgs e) {
- if (e.Data != null) {
- Data.AppendLine(e.Data);
- }
- }
+ public void DataReceived(object sender, DataReceivedEventArgs e) {
+ if (e.Data != null) {
+ Data.AppendLine(e.Data);
}
+ }
+}
- class TestReceiver : ITestCaseDiscoverySink {
- public List Tests { get; private set; }
+class TestReceiver : ITestCaseDiscoverySink {
+ public List Tests { get; private set; }
- public TestReceiver() {
- Tests = new List();
- }
+ public TestReceiver() {
+ Tests = new List();
+ }
- public void SendTestCase(TestCase discoveredTest) {
- Tests.Add(discoveredTest);
- }
- }
+ public void SendTestCase(TestCase discoveredTest) {
+ Tests.Add(discoveredTest);
+ }
+}
- class NodejsProjectSettings {
- public NodejsProjectSettings() {
- NodeExePath = String.Empty;
- SearchPath = String.Empty;
- WorkingDir = String.Empty;
- }
+class NodejsProjectSettings {
+ public NodejsProjectSettings() {
+ NodeExePath = String.Empty;
+ SearchPath = String.Empty;
+ WorkingDir = String.Empty;
+ }
- public string NodeExePath { get; set; }
- public string SearchPath { get; set; }
- public string WorkingDir { get; set; }
- public string ProjectRootDir { get; set; }
- }
+ public string NodeExePath { get; set; }
+ public string SearchPath { get; set; }
+ public string WorkingDir { get; set; }
+ public string ProjectRootDir { get; set; }
+}
+
+class ResultObject {
+ public ResultObject() {
+ title = String.Empty;
+ passed = false;
+ stdout = String.Empty;
+ stderr = String.Empty;
}
+ public string title { get; set; }
+ public bool passed { get; set; }
+ public string stdout { get; set; }
+ public string stderr { get; set; }
+}
+
+class TestEvent {
+ public string type { get; set; }
+ public string title { get; set; }
+ public ResultObject result { get; set; }
}
+
+class TestCaseObject {
+ public TestCaseObject() {
+ framework = String.Empty;
+ testName = String.Empty;
+ testFile = String.Empty;
+ workingFolder = String.Empty;
+ projectFolder = String.Empty;
+ }
+
+ public TestCaseObject(string framework, string testName, string testFile, string workingFolder, string projectFolder) {
+ this.framework = framework;
+ this.testName = testName;
+ this.testFile = testFile;
+ this.workingFolder = workingFolder;
+ this.projectFolder = projectFolder;
+ }
+ public string framework { get; set; }
+ public string testName { get; set; }
+ public string testFile { get; set; }
+ public string workingFolder { get; set; }
+ public string projectFolder { get; set; }
+}
\ No newline at end of file