123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398 |
- "use strict";
- Object.defineProperty(exports, "__esModule", {
- value: true
- });
- exports.createMergedReport = createMergedReport;
- var _fs = _interopRequireDefault(require("fs"));
- var _path = _interopRequireDefault(require("path"));
- var _teleReceiver = require("../isomorphic/teleReceiver");
- var _stringInternPool = require("../isomorphic/stringInternPool");
- var _reporters = require("../runner/reporters");
- var _multiplexer = require("./multiplexer");
- var _utils = require("playwright-core/lib/utils");
- var _blob = require("./blob");
- var _util = require("../util");
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
- /**
- * Copyright (c) Microsoft Corporation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- async function createMergedReport(config, dir, reporterDescriptions, rootDirOverride) {
- var _eventData$pathSepara;
- const reporters = await (0, _reporters.createReporters)(config, 'merge', reporterDescriptions);
- const multiplexer = new _multiplexer.Multiplexer(reporters);
- const stringPool = new _stringInternPool.StringInternPool();
- let printStatus = () => {};
- if (!multiplexer.printsToStdio()) {
- printStatus = printStatusToStdout;
- printStatus(`merging reports from ${dir}`);
- }
- const shardFiles = await sortedShardFiles(dir);
- if (shardFiles.length === 0) throw new Error(`No report files found in ${dir}`);
- const eventData = await mergeEvents(dir, shardFiles, stringPool, printStatus, rootDirOverride);
- // If expicit config is provided, use platform path separator, otherwise use the one from the report (if any).
- const pathSep = rootDirOverride ? _path.default.sep : (_eventData$pathSepara = eventData.pathSeparatorFromMetadata) !== null && _eventData$pathSepara !== void 0 ? _eventData$pathSepara : _path.default.sep;
- const receiver = new _teleReceiver.TeleReporterReceiver(pathSep, multiplexer, false, config.config);
- printStatus(`processing test events`);
- const dispatchEvents = async events => {
- for (const event of events) {
- if (event.method === 'onEnd') printStatus(`building final report`);
- await receiver.dispatch(event);
- if (event.method === 'onEnd') printStatus(`finished building report`);
- }
- };
- await dispatchEvents(eventData.prologue);
- for (const {
- reportFile,
- eventPatchers
- } of eventData.reports) {
- const reportJsonl = await _fs.default.promises.readFile(reportFile);
- const events = parseTestEvents(reportJsonl);
- new _stringInternPool.JsonStringInternalizer(stringPool).traverse(events);
- eventPatchers.patchers.push(new AttachmentPathPatcher(dir));
- eventPatchers.patchEvents(events);
- await dispatchEvents(events);
- }
- await dispatchEvents(eventData.epilogue);
- }
- const commonEventNames = ['onBlobReportMetadata', 'onConfigure', 'onProject', 'onBegin', 'onEnd'];
- const commonEvents = new Set(commonEventNames);
- const commonEventRegex = new RegExp(`${commonEventNames.join('|')}`);
- function parseCommonEvents(reportJsonl) {
- return splitBufferLines(reportJsonl).map(line => line.toString('utf8')).filter(line => commonEventRegex.test(line)) // quick filter
- .map(line => JSON.parse(line)).filter(event => commonEvents.has(event.method));
- }
- function parseTestEvents(reportJsonl) {
- return splitBufferLines(reportJsonl).map(line => line.toString('utf8')).filter(line => line.length).map(line => JSON.parse(line)).filter(event => !commonEvents.has(event.method));
- }
- function splitBufferLines(buffer) {
- const lines = [];
- let start = 0;
- while (start < buffer.length) {
- // 0x0A is the byte for '\n'
- const end = buffer.indexOf(0x0A, start);
- if (end === -1) {
- lines.push(buffer.slice(start));
- break;
- }
- lines.push(buffer.slice(start, end));
- start = end + 1;
- }
- return lines;
- }
- async function extractAndParseReports(dir, shardFiles, internalizer, printStatus) {
- const shardEvents = [];
- await _fs.default.promises.mkdir(_path.default.join(dir, 'resources'), {
- recursive: true
- });
- const reportNames = new UniqueFileNameGenerator();
- for (const file of shardFiles) {
- const absolutePath = _path.default.join(dir, file);
- printStatus(`extracting: ${(0, _util.relativeFilePath)(absolutePath)}`);
- const zipFile = new _utils.ZipFile(absolutePath);
- const entryNames = await zipFile.entries();
- for (const entryName of entryNames.sort()) {
- let fileName = _path.default.join(dir, entryName);
- const content = await zipFile.read(entryName);
- if (entryName.endsWith('.jsonl')) {
- fileName = reportNames.makeUnique(fileName);
- const parsedEvents = parseCommonEvents(content);
- // Passing reviver to JSON.parse doesn't work, as the original strings
- // keep beeing used. To work around that we traverse the parsed events
- // as a post-processing step.
- internalizer.traverse(parsedEvents);
- shardEvents.push({
- file,
- localPath: fileName,
- metadata: findMetadata(parsedEvents, file),
- parsedEvents
- });
- }
- await _fs.default.promises.writeFile(fileName, content);
- }
- zipFile.close();
- }
- return shardEvents;
- }
- function findMetadata(events, file) {
- var _events$;
- if (((_events$ = events[0]) === null || _events$ === void 0 ? void 0 : _events$.method) !== 'onBlobReportMetadata') throw new Error(`No metadata event found in ${file}`);
- const metadata = events[0].params;
- if (metadata.version > _blob.currentBlobReportVersion) throw new Error(`Blob report ${file} was created with a newer version of Playwright.`);
- return metadata;
- }
- async function mergeEvents(dir, shardReportFiles, stringPool, printStatus, rootDirOverride) {
- var _blobs$;
- const internalizer = new _stringInternPool.JsonStringInternalizer(stringPool);
- const configureEvents = [];
- const projectEvents = [];
- const endEvents = [];
- const blobs = await extractAndParseReports(dir, shardReportFiles, internalizer, printStatus);
- // Sort by (report name; shard; file name), so that salt generation below is deterministic when:
- // - report names are unique;
- // - report names are missing;
- // - report names are clashing between shards.
- blobs.sort((a, b) => {
- var _a$metadata$name, _b$metadata$name, _a$metadata$shard$cur, _a$metadata$shard, _b$metadata$shard$cur, _b$metadata$shard;
- const nameA = (_a$metadata$name = a.metadata.name) !== null && _a$metadata$name !== void 0 ? _a$metadata$name : '';
- const nameB = (_b$metadata$name = b.metadata.name) !== null && _b$metadata$name !== void 0 ? _b$metadata$name : '';
- if (nameA !== nameB) return nameA.localeCompare(nameB);
- const shardA = (_a$metadata$shard$cur = (_a$metadata$shard = a.metadata.shard) === null || _a$metadata$shard === void 0 ? void 0 : _a$metadata$shard.current) !== null && _a$metadata$shard$cur !== void 0 ? _a$metadata$shard$cur : 0;
- const shardB = (_b$metadata$shard$cur = (_b$metadata$shard = b.metadata.shard) === null || _b$metadata$shard === void 0 ? void 0 : _b$metadata$shard.current) !== null && _b$metadata$shard$cur !== void 0 ? _b$metadata$shard$cur : 0;
- if (shardA !== shardB) return shardA - shardB;
- return a.file.localeCompare(b.file);
- });
- const saltSet = new Set();
- printStatus(`merging events`);
- const reports = [];
- for (const {
- file,
- parsedEvents,
- metadata,
- localPath
- } of blobs) {
- // Generate unique salt for each blob.
- const sha1 = (0, _utils.calculateSha1)(metadata.name || _path.default.basename(file)).substring(0, 16);
- let salt = sha1;
- for (let i = 0; saltSet.has(salt); i++) salt = sha1 + '-' + i;
- saltSet.add(salt);
- const eventPatchers = new JsonEventPatchers();
- eventPatchers.patchers.push(new IdsPatcher(stringPool, metadata.name, salt));
- // Only patch path separators if we are merging reports with explicit config.
- if (rootDirOverride) eventPatchers.patchers.push(new PathSeparatorPatcher(metadata.pathSeparator));
- eventPatchers.patchEvents(parsedEvents);
- for (const event of parsedEvents) {
- if (event.method === 'onConfigure') configureEvents.push(event);else if (event.method === 'onProject') projectEvents.push(event);else if (event.method === 'onEnd') endEvents.push(event);
- }
- // Save information about the reports to stream their test events later.
- reports.push({
- eventPatchers,
- reportFile: localPath
- });
- }
- return {
- prologue: [mergeConfigureEvents(configureEvents, rootDirOverride), ...projectEvents, {
- method: 'onBegin',
- params: undefined
- }],
- reports,
- epilogue: [mergeEndEvents(endEvents), {
- method: 'onExit',
- params: undefined
- }],
- pathSeparatorFromMetadata: (_blobs$ = blobs[0]) === null || _blobs$ === void 0 ? void 0 : _blobs$.metadata.pathSeparator
- };
- }
- function mergeConfigureEvents(configureEvents, rootDirOverride) {
- if (!configureEvents.length) throw new Error('No configure events found');
- let config = {
- configFile: undefined,
- globalTimeout: 0,
- maxFailures: 0,
- metadata: {},
- rootDir: '',
- version: '',
- workers: 0,
- listOnly: false
- };
- for (const event of configureEvents) config = mergeConfigs(config, event.params.config);
- if (rootDirOverride) {
- config.rootDir = rootDirOverride;
- } else {
- const rootDirs = new Set(configureEvents.map(e => e.params.config.rootDir));
- if (rootDirs.size > 1) {
- throw new Error([`Blob reports being merged were recorded with different test directories, and`, `merging cannot proceed. This may happen if you are merging reports from`, `machines with different environments, like different operating systems or`, `if the tests ran with different playwright configs.`, ``, `You can force merge by specifying a merge config file with "-c" option. If`, `you'd like all test paths to be correct, make sure 'testDir' in the merge config`, `file points to the actual tests location.`, ``, `Found directories:`, ...rootDirs].join('\n'));
- }
- }
- return {
- method: 'onConfigure',
- params: {
- config
- }
- };
- }
- function mergeConfigs(to, from) {
- return {
- ...to,
- ...from,
- metadata: {
- ...to.metadata,
- ...from.metadata,
- actualWorkers: (to.metadata.actualWorkers || 0) + (from.metadata.actualWorkers || 0)
- },
- workers: to.workers + from.workers
- };
- }
- function mergeEndEvents(endEvents) {
- let startTime = endEvents.length ? 10000000000000 : Date.now();
- let status = 'passed';
- let duration = 0;
- for (const event of endEvents) {
- const shardResult = event.params.result;
- if (shardResult.status === 'failed') status = 'failed';else if (shardResult.status === 'timedout' && status !== 'failed') status = 'timedout';else if (shardResult.status === 'interrupted' && status !== 'failed' && status !== 'timedout') status = 'interrupted';
- startTime = Math.min(startTime, shardResult.startTime);
- duration = Math.max(duration, shardResult.duration);
- }
- const result = {
- status,
- startTime,
- duration
- };
- return {
- method: 'onEnd',
- params: {
- result
- }
- };
- }
- async function sortedShardFiles(dir) {
- const files = await _fs.default.promises.readdir(dir);
- return files.filter(file => file.endsWith('.zip')).sort();
- }
- function printStatusToStdout(message) {
- process.stdout.write(`${message}\n`);
- }
- class UniqueFileNameGenerator {
- constructor() {
- this._usedNames = new Set();
- }
- makeUnique(name) {
- if (!this._usedNames.has(name)) {
- this._usedNames.add(name);
- return name;
- }
- const extension = _path.default.extname(name);
- name = name.substring(0, name.length - extension.length);
- let index = 0;
- while (true) {
- const candidate = `${name}-${++index}${extension}`;
- if (!this._usedNames.has(candidate)) {
- this._usedNames.add(candidate);
- return candidate;
- }
- }
- }
- }
- class IdsPatcher {
- constructor(_stringPool, _reportName, _salt) {
- this._stringPool = _stringPool;
- this._reportName = _reportName;
- this._salt = _salt;
- }
- patchEvent(event) {
- const {
- method,
- params
- } = event;
- switch (method) {
- case 'onProject':
- this._onProject(params.project);
- return;
- case 'onTestBegin':
- case 'onStepBegin':
- case 'onStepEnd':
- case 'onStdIO':
- params.testId = this._mapTestId(params.testId);
- return;
- case 'onTestEnd':
- params.test.testId = this._mapTestId(params.test.testId);
- return;
- }
- }
- _onProject(project) {
- var _project$metadata;
- project.metadata = (_project$metadata = project.metadata) !== null && _project$metadata !== void 0 ? _project$metadata : {};
- project.metadata.reportName = this._reportName;
- project.id = this._stringPool.internString(project.id + this._salt);
- project.suites.forEach(suite => this._updateTestIds(suite));
- }
- _updateTestIds(suite) {
- suite.tests.forEach(test => test.testId = this._mapTestId(test.testId));
- suite.suites.forEach(suite => this._updateTestIds(suite));
- }
- _mapTestId(testId) {
- return this._stringPool.internString(testId + this._salt);
- }
- }
- class AttachmentPathPatcher {
- constructor(_resourceDir) {
- this._resourceDir = _resourceDir;
- }
- patchEvent(event) {
- if (event.method !== 'onTestEnd') return;
- for (const attachment of event.params.result.attachments) {
- if (!attachment.path) continue;
- attachment.path = _path.default.join(this._resourceDir, attachment.path);
- }
- }
- }
- class PathSeparatorPatcher {
- constructor(from) {
- this._from = void 0;
- this._to = void 0;
- this._from = from !== null && from !== void 0 ? from : _path.default.sep === '/' ? '\\' : '/';
- this._to = _path.default.sep;
- }
- patchEvent(jsonEvent) {
- if (this._from === this._to) return;
- if (jsonEvent.method === 'onProject') {
- this._updateProject(jsonEvent.params.project);
- return;
- }
- if (jsonEvent.method === 'onTestEnd') {
- const testResult = jsonEvent.params.result;
- testResult.errors.forEach(error => this._updateLocation(error.location));
- testResult.attachments.forEach(attachment => {
- if (attachment.path) attachment.path = this._updatePath(attachment.path);
- });
- return;
- }
- if (jsonEvent.method === 'onStepBegin') {
- const step = jsonEvent.params.step;
- this._updateLocation(step.location);
- return;
- }
- }
- _updateProject(project) {
- project.outputDir = this._updatePath(project.outputDir);
- project.testDir = this._updatePath(project.testDir);
- project.snapshotDir = this._updatePath(project.snapshotDir);
- project.suites.forEach(suite => this._updateSuite(suite, true));
- }
- _updateSuite(suite, isFileSuite = false) {
- this._updateLocation(suite.location);
- if (isFileSuite) suite.title = this._updatePath(suite.title);
- for (const child of suite.suites) this._updateSuite(child);
- for (const test of suite.tests) this._updateLocation(test.location);
- }
- _updateLocation(location) {
- if (location) location.file = this._updatePath(location.file);
- }
- _updatePath(text) {
- return text.split(this._from).join(this._to);
- }
- }
- class JsonEventPatchers {
- constructor() {
- this.patchers = [];
- }
- patchEvents(events) {
- for (const event of events) {
- for (const patcher of this.patchers) patcher.patchEvent(event);
- }
- }
- }
|