diff --git a/PrettyPrintDuplexStream.js b/PrettyPrintDuplexStream.js deleted file mode 100644 index fa7a17d..0000000 --- a/PrettyPrintDuplexStream.js +++ /dev/null @@ -1,154 +0,0 @@ -const Transform = require('stream').Transform; -const fs = require('fs'); -const path = require('path'); -const chalk = require('chalk'); //library for colorizing Strings -// color palette for messages -- can also do rgb; e.g. chalk.rgb(123, 45, 67) -const originalErrorColor = chalk.bold.redBright; -const errorDetailColor = chalk.bold.cyan; -const errorCodeColor = chalk.bold.greenBright; -const noErrorCode = '-1'; - -//https://stackoverflow.com/questions/48507828/pipe-issue-with-node-js-duplex-stream-example - -// make a class that implements a Duplex stream; this means you can use it to pipe into and out of -class PrettyPrintDuplexStream extends Transform { - - constructor(name, options) { - super(options); - this.name = name; - this.solutionMap = {}; - const csvFilePath = path.join(__dirname, 'errorMessages.txt' ); - const allLines = fs.readFileSync(csvFilePath).toString().split('\n'); - this.solutionMap[ noErrorCode ] = 'Error message has no error code'; // put in a value for error messages that don't have an ERROR_CODE - - // populate a map with the key as the ERROR_CODE number and the value is the suggested solution - for( var i in allLines) { - const line = allLines[i].toString(); - const parts= line.split(','); - const key = parts[0].trim(); - const value = parts[2]; - this.solutionMap[key] = value; - } - this.idSet = new Set(); - this.idSet.add( noErrorCode ); - } - - translateNames( inName ) { // translate error Strings to friendlier informative alternatives. - switch(inName) { - case 'shr-expand': return ('Model Expansion'); - case 'shr-fhir-export' : return ('FHIR Export') ; - default: return(inName); - } - } - - getUnqualifiedName ( inName ) { // take a name with '.' delimiters and return the last part - const nameParts = inName.split('.'); - if (nameParts.length > 0) { - return(nameParts[nameParts.length -1 ]); - } - return(inName); - } - - getMatchWithRegexPos(myStr, myRegex, pos) { // apply regex to String; return match at pos if it matches; otherwise the original string - const myMatch = myStr.match(myRegex); - return( myStr.match(myRegex) != null ? myMatch[pos] : ''); - } - - parseFromPrefixOrSuffix( myRegex, myPrefix, mySuffix ) { - let myPart = this.getMatchWithRegexPos(mySuffix, myRegex, 1); //parse field from suffix - if (myPart === '') { // if missing, try prefix - myPart = this.getMatchWithRegexPos(myPrefix, myRegex, 1); - } - return(myPart); - } - - // this function processes a single error message and returns a colorized, formatted string - // for the moment, it writes the original input message in red to console.log - processLine(myinline, printAllErrors) { - const myline = myinline.toString(); - const result = myline.match(/ERROR_CODE:([\d]+)\s/); - let preErrCode = ''; - let postErrCode = ''; - let eCode = noErrorCode; - if (printAllErrors) { - console.log( originalErrorColor (myline.trim() )); // print the input message in red (for now) - } - let formattedOutput = '\nERROR '; - - // split myline on ERROR_CODE; preErrCode is everything before ERROR_CODE; postErrCode is everything after - if ( result != null) { - const temp = myline.split('ERROR_CODE'); - preErrCode = temp[0]; - postErrCode = temp[1]; - if ( result[1] == null) { - eCode = noErrorCode; - } - else { - eCode = result[1].trim(); - } - } - - const dateTimeRegex = /\[\d\d:\d\d:\d\d.\d\d\dZ\]\s+/; - let outline = myline.replace(dateTimeRegex,'').toString(); //remove timestamp; format is [hh.mm.ss.xxxZ] - const errShrRegex = /(ERROR[\s]+[\w]+:[\s]+)/; - // split into piece before "ERROR_CODE" and piece after "ERROR_CODE" - preErrCode = preErrCode.replace(errShrRegex, ''); // remove the 'ERROR shr' part - preErrCode = preErrCode.replace(dateTimeRegex, ''); // remove the timestamp - formattedOutput += `${eCode}: ${preErrCode}`; // first part of new message is ERROR xxxxx: <> - - // parse the parts we need - const modulePart = this.parseFromPrefixOrSuffix( /module=([\w.-]+)[,]*/, preErrCode, postErrCode ); //parse module - const shrIdPart = this.parseFromPrefixOrSuffix( /shrId=([\w.-]+)[,]*/, preErrCode, postErrCode ); //parse shrId - //parse MappingRule - const mappingRulePart = this.parseFromPrefixOrSuffix( /mappingRule[=:]+[\s]*(["]*([\w."[\]-]+[\s]*)+["]*)/ ,preErrCode, postErrCode).replace(/[\n]+/g,' '); - let targetPart = this.parseFromPrefixOrSuffix( /target=([\w.-]+)[,]*/, preErrCode, postErrCode ); //parse target part - const targetSpecPart = this.parseFromPrefixOrSuffix(/targetSpec=([\w.-]+)[,]*/,preErrCode, postErrCode); //parse targetSpec - const targetUrlPart = this.getMatchWithRegexPos(postErrCode, /target:[\s]+([\w./:-]+)[,]*/, 1); //parse targetURL - if (targetPart === '') { - targetPart = targetUrlPart; // use targetURL if target is unavailable - } - - // now we have pieces; assemble the pieces into a formatted, colorized, multi-line message - outline = errorCodeColor(formattedOutput) - + errorDetailColor ( '\n During: ' + this.translateNames(modulePart)) - + errorDetailColor( '\n Class: ' + this.getUnqualifiedName(this.translateNames(shrIdPart))); - - // if parts are optional/missing, then only print them if they are found - if (targetSpecPart != '') { - outline += errorDetailColor( '\n Target Spec: ' + this.translateNames(targetSpecPart)); - } - if (targetPart != '') { - outline += errorDetailColor( '\n Target Class: ' + this.translateNames(targetPart)); - } - if (mappingRulePart != '') { - outline += errorDetailColor( '\n Mapping Rule: ' + this.translateNames(mappingRulePart)) ; - } - - // lookup the suggested fix using eCode as the key - const suggestedFixPart = this.solutionMap[eCode].toString().trim().replace(/['"']+/g,''); - if (suggestedFixPart !== '' && suggestedFixPart !== 'Unknown') { - outline += errorDetailColor( '\n Suggested Fix: ' + suggestedFixPart ) ; - } - const key = eCode; // if you want a less strict de-duplicator, you can add another element; e.g. let key = eCode + targetPart; - - if (this.idSet.has(key)) { - return(''); - } - else { - this.idSet.add(key); - if (printAllErrors === true) { - console.log( originalErrorColor (myline )); - } - return (outline); - } - } - - - _transform(chunk, encoding, callback) { - const ans = this.processLine(chunk, false); - console.log( ans ); - callback(); - } -} - -module.exports = PrettyPrintDuplexStream; \ No newline at end of file diff --git a/PrettyPrintDuplexStreamJson.js b/PrettyPrintDuplexStreamJson.js index d06a69b..7c2dcda 100644 --- a/PrettyPrintDuplexStreamJson.js +++ b/PrettyPrintDuplexStreamJson.js @@ -1,7 +1,9 @@ +/* eslint no-console: off */ const Transform = require('stream').Transform; const fs = require('fs'); const path = require('path'); const chalk = require('chalk'); //library for colorizing Strings +const { nameFromLevel } = require('bunyan'); // color palette for messages -- can also do rgb; e.g. chalk.rgb(123, 45, 67) const originalErrorColor = chalk.bold.greenBright; const errorDetailColor = chalk.bold.cyan; @@ -20,7 +22,7 @@ class PrettyPrintDuplexStreamJson extends Transform { this.templateStrings = {}; const csvFilePath = path.join(__dirname, 'errorMessages.txt' ); this.solutionMap[ noErrorCode ] = 'Error message has no error code; please add error code'; // -1 means noErrorCode - // build the hashMap resources from the errorMessages csv file; each column has a part + // build the hashMap resources from the errorMessages csv file; each column has a part this.buildMapFromFile( csvFilePath, this.solutionMap, 0, 2); this.buildMapFromFile( csvFilePath, this.ruleMap, 0, 3); this.buildMapFromFile( csvFilePath, this.templateStrings, 0, 1); @@ -31,9 +33,9 @@ class PrettyPrintDuplexStreamJson extends Transform { buildMapFromFile( filePath, map, keyColumnNumber, valueColumnNumber) { - const recArray = fs.readFileSync(filePath).toString().split('\n'); + const recArray = fs.readFileSync(filePath).toString().split('\n'); // populate a map with the key as the ERROR_CODE number and the value is the suggested solution - for( const i in recArray) { + for( const i in recArray) { const line = recArray[i].toString(); if (line.trim().startsWith('//') ){ continue; // skip lines that start with comment delimiter @@ -52,7 +54,7 @@ class PrettyPrintDuplexStreamJson extends Transform { default: return inName ; } } - + getUnqualifiedName ( inName ) { // take a name with '.' delimiters and return the last part if (inName === '') { return inName ; @@ -72,12 +74,20 @@ class PrettyPrintDuplexStreamJson extends Transform { return myECode ; } else { - myECode = result[1].trim(); + myECode = result[1].trim(); } } return myECode ; } + getLevel( level ) { + if (level === null) { + return 'INFO'; + } + const levelStr = nameFromLevel[level]; + return levelStr ? levelStr.toUpperCase() : 'INFO'; + } + getAttributeOrEmptyString( myPart) { // guard against undefined or null attributes if (myPart == null) { return '' ; @@ -97,20 +107,20 @@ class PrettyPrintDuplexStreamJson extends Transform { return hashValue ; } const parts = semicolonDelimitedKeyList.split(';'); - + for (let i=0; i < parts.length; i++) { let curKey = parts[i].trim(); // since errorCode not in json attributes we have to special case this if (curKey === 'errorNumber' ) { - hashValue += errorCode + '$'; + hashValue += errorCode + '$'; } else { curKey = jsonObj[ parts[i].trim() ]; if (curKey === undefined) { console.log('undefined JSON atttribute ' + parts[i].trim()); } - hashValue += curKey + '$'; + hashValue += curKey + '$'; } - } + } } return hashValue; } @@ -121,7 +131,7 @@ class PrettyPrintDuplexStreamJson extends Transform { if (myTemplate != null) { const myMatches = myTemplate.match(templateRegex); if (myMatches != null) { - + for (let i=0; i < myMatches.length; i++) { const strToReplace = myMatches[i].toString(); const myKey = strToReplace.replace(/\{/g, '').replace(/\}/g, '').replace(/\$/g, ''); @@ -134,20 +144,20 @@ class PrettyPrintDuplexStreamJson extends Transform { } } } - } + } return template ; } - + // this function processes a single error message and returns a colorized, formatted string // for the moment, it writes the original input message in red to console.log processLine(myinline, printAllErrors) { - + if (printAllErrors !== false) { - console.log( originalErrorColor (myinline )); + console.log( originalErrorColor (myinline )); } const myJson = JSON.parse(myinline); // convert String to object, then grab attributes const jsonKeys = Object.keys( myJson); - const jMsg = this.getAttributeOrEmptyString( myJson.msg ); + const jMsg = this.getAttributeOrEmptyString( myJson.msg ); const modulePart = this.getAttributeOrEmptyString( myJson.module); //grab module const eCode = this.parseErrorCode( /([\d]{5})\s*/, jMsg); //extract ERROR_CODE:ddddd from msg attribute;eCode = noErrorCode if not found; let detailMsg = ''; @@ -157,19 +167,22 @@ class PrettyPrintDuplexStreamJson extends Transform { detailMsg = this.processTemplate(jsonKeys, myTemplate, myJson ) ; } else { - console.log( errorCodeColor(' Message is missing errorCode; no template found. Default error code '+ eCode + ':')); + console.log( errorCodeColor('Message is missing errorCode; no template found: ' + myinline)); return ''; - } - + } + + const level = this.getLevel( myJson.level ); //grab level const shrIdPart = this.getAttributeOrEmptyString( myJson.shrId ); //grab shrId const mappingRulePart = this.getAttributeOrEmptyString( myJson.mappingRule ); //grab mappingRule const targetPart = this.getAttributeOrEmptyString( myJson.target ); //grab targetPart const targetSpecPart = this.getAttributeOrEmptyString( myJson.targetSpec ); //grab targetSpec // now we have pieces; assemble the pieces into a formatted, colorized, multi-line message - let outline = errorCodeColor('\nERROR ' + eCode + ': ' + detailMsg ); // first part of new message is ERROR xxxxx: <> - outline += errorDetailColor ( '\n During: ' + this.translateNames( modulePart)) - + errorDetailColor( '\n Class: ' + this.getUnqualifiedName(this.translateNames( shrIdPart))); + let outline = errorCodeColor('\n' + level + ' ' + eCode + ': ' + detailMsg ); // first part of new message is ERROR xxxxx: <> + outline += errorDetailColor ( '\n During: ' + this.translateNames( modulePart)); // if parts are optional/missing, then only print them if they are found + if (shrIdPart !== '') { + outline += errorDetailColor( '\n Class: ' + this.getUnqualifiedName(this.translateNames( shrIdPart))); + } if ( targetSpecPart != '') { outline += errorDetailColor( '\n Target Spec: ' + this.translateNames(this.targetSpecPart)); } @@ -184,12 +197,12 @@ class PrettyPrintDuplexStreamJson extends Transform { if ( suggestedFix != null ) { suggestedFix = suggestedFix.replace(/'/g, '').trim() ; // only print suggested fix if it's available from the resource file (i.e. in the solutionMap) - if (suggestedFix !== 'Unknown' && suggestedFix !== '') { + if (suggestedFix !== 'Unknown' && suggestedFix !== '') { outline += errorDetailColor( '\n Suggested Fix: ' + suggestedFix.trim() + '\n' ); } } const myDedupHashKey = this.buildHashKey( eCode, this.ruleMap, myJson ) ; - + if (myDedupHashKey === '') { // if you have no keys for deduplication in errorMessages.txt for thisd, print everything return outline ; } @@ -205,7 +218,9 @@ class PrettyPrintDuplexStreamJson extends Transform { _transform(chunk, encoding, callback) { const ans = this.processLine(chunk, false); - console.log( ans ); + if (ans.length > 0) { + console.log( ans ); + } callback(); } } diff --git a/app.js b/app.js index dd6b392..fd3b14a 100644 --- a/app.js +++ b/app.js @@ -3,7 +3,6 @@ const path = require('path'); const mkdirp = require('mkdirp'); const bunyan = require('bunyan'); const program = require('commander'); -const bps = require('@ojolabs/bunyan-prettystream'); const { sanityCheckModules } = require('shr-models'); const shrTI = require('shr-text-import'); const shrEx = require('shr-expand'); @@ -31,7 +30,7 @@ let input; program .usage(' [options]') .option('-l, --log-level ', 'the console log level ', /^(fatal|error|warn|info|debug|trace)$/i, 'info') - .option('-m, --log-mode ', 'the console log mode ', /^(short|long|json|off)$/i, 'short') + .option('-m, --log-mode ', 'the console log mode ', /^(normal|json|off)$/i, 'normal') .option('-s, --skip ', 'skip an export feature ', collect, []) .option('-o, --out ', `the path to the output folder`, path.join('.', 'out')) .option('-c, --config ', 'the name of the config file', 'config.json') @@ -94,26 +93,14 @@ mkdirp.sync(program.out); const PrettyPrintDuplexStreamJson = require('./PrettyPrintDuplexStreamJson'); const mdpStream = new PrettyPrintDuplexStreamJson(); -//invoke the regex stream processor -const PrettyPrintDuplexStream = require('./PrettyPrintDuplexStream'); -const mdpStreamTxt = new PrettyPrintDuplexStream(); - // Set up the logger streams const [ll, lm] = [program.logLevel.toLowerCase(), program.logMode.toLowerCase()]; const streams = []; -if (lm == 'short' || lm == 'long') { - const prettyStdOut = new bps({mode: lm}); - // use the regex stream processor on the text stream - prettyStdOut.pipe(mdpStreamTxt); - mdpStreamTxt.pipe(process.stdout); - streams.push({ level: ll, type: 'raw', stream: prettyStdOut}); -} else if (lm == 'json') { - const printRawJson = false; - if (printRawJson) { - streams.push({ level: ll, stream: process.stdout }); - } +if (lm == 'normal') { streams.push({ level: ll, stream: mdpStream }); mdpStream.pipe(process.stdout); +} else if (lm == 'json') { + streams.push({ level: ll, stream: process.stdout }); } // Setup a ringbuffer for counting the number of errors at the end const logCounter = new LogCounter(); @@ -122,6 +109,7 @@ streams.push({ level: 'warn', type: 'raw', stream: logCounter}); streams.push({ level: 'trace', path: path.join(program.out, 'out.log') }); const logger = bunyan.createLogger({ name: 'shr', + module: 'shr-cli', streams: streams }); @@ -144,7 +132,9 @@ if (doDD) { } // Go! -logger.info('Starting CLI Import/Export'); +// 05001, 'Starting CLI Import/Export',, +logger.info('05001'); + let configSpecifications = shrTI.importConfigFromFilePath(input, program.config); if (!configSpecifications) { process.exit(1); @@ -224,7 +214,7 @@ if (doCIMCORE) { mkdirp.sync(path.dirname(hierarchyPath)); fs.writeFileSync(hierarchyPath, JSON.stringify(out, null, ' ')); } catch (error) { - //logger.error('Unable to successfully serialize namespace meta information %s into CIMCORE, failing with error "%s". ERROR_CODE:15004', namespace, error); \ + // 15004, 'Unable to successfully serialize ${nameSpace} meta information ${} into CIMCORE, failing with error ${errorText}', 'Unknown, 'errorNumber' logger.error({nameSpace: namespace, errorText: error }, '15004' ); } } @@ -241,7 +231,7 @@ if (doCIMCORE) { mkdirp.sync(path.dirname(hierarchyPath)); fs.writeFileSync(hierarchyPath, JSON.stringify(out, null, ' ')); } catch (error) { - //logger.error('Unable to successfully serialize element %s into CIMCORE, failing with error "%s". ERROR_CODE:15001', de.identifier.fqn, error); + // 15001, 'Unable to successfully serialize element ${identifierName} into CIMCORE, failing with error ${errorText}', 'Unknown, 'errorNumber' logger.error({identifierName: de.identifier.fqn, errorText: error }, '15001'); } } @@ -258,7 +248,7 @@ if (doCIMCORE) { mkdirp.sync(path.dirname(hierarchyPath)); fs.writeFileSync(hierarchyPath, JSON.stringify(out, null, ' ')); } catch (error) { - //logger.error('Unable to successfully serialize value set %s into CIMCORE, failing with error "%s". ERROR_CODE:15002', vs.identifier.fqn, error); + // 15002, 'Unable to successfully serialize value set ${valueSet} into CIMCORE, failing with error ${errorText}', 'Unknown, 'errorNumber' logger.error({valueSet:vs.identifier.fqn, errorText: error}, '15002'); } } @@ -276,18 +266,18 @@ if (doCIMCORE) { mkdirp.sync(path.dirname(hierarchyPath)); fs.writeFileSync(hierarchyPath, JSON.stringify(out, null, ' ')); } catch (error) { - //logger.error('Unable to successfully serialize mapping %s into CIMCORE, failing with error "%s". ERROR_CODE:15003', mapping.identifier.fqn, error); + // 15003, 'Unable to successfully serialize mapping ${mappingIdentifier} into CIMCORE, failing with error ${errorText}', 'Unknown, 'errorNumber' logger.error({mappingIdentifier:mapping.identifier.fqn, errorText:error },'15003'); } } } } catch (error) { - //logger.fatal('Failure in CIMCORE export. Aborting with error message: %s', error); - logger.fatal({errorText: JSON.stringify(error) },'15100'); + // 15005, 'Failure in CIMCORE export. Aborting with error message: ${errorText}', 'Unknown, 'errorNumber' + logger.fatal({errorText: JSON.stringify(error) },'15005'); failedExports.push('CIMCORE'); } } else { - logger.info('Skipping CIMCORE export'); + logger.info('05003'); } if (doDD) { @@ -295,11 +285,13 @@ if (doDD) { const hierarchyPath = path.join(program.out, 'data-dictionary'); shrDD.generateDDtoPath(expSpecifications, configSpecifications, hierarchyPath); } catch (error) { - logger.fatal('Failure in Data Dictionary export. Aborting with error message: %s', error); + // 15006, 'Failure in data dictionary export. Aborting with error message: ${errorText}', 'Unknown, 'errorNumber' + logger.fatal({ errorText: JSON.stringify(error) }, '15006'); failedExports.push('shr-data-dict-export'); } } else { - logger.info('Skipping Data Dictionary export'); + // 05004, 'Skipping Data Dictionary export',, + logger.info('05004'); } let fhirResults = null; @@ -323,11 +315,12 @@ if (doES6) { }; handleNS(es6Results, es6Path); } catch (error) { - logger.fatal('Failure in ES6 export. Aborting with error message: %s', error); + // 15007, 'Failure in ES6 export. Aborting with error message: ${errorText}', 'Unknown, 'errorNumber' + logger.fatal({ errorText: JSON.stringify(error) }, '15007'); failedExports.push('shr-es6-export'); } } else { - logger.info('Skipping ES6 export'); + logger.info('05005'); } @@ -362,11 +355,13 @@ if (doFHIR) { fs.writeFileSync(path.join(baseFHIRPath, `shr_qa.html`), fhirResults.qaHTML); shrFE.exportIG(expSpecifications, fhirResults, path.join(baseFHIRPath, 'guide'), configSpecifications, input); } catch (error) { - logger.fatal('Failure in FHIR export. Aborting with error message: %s', error); + // 15008, 'Failure in FHIR export. Aborting with error message: ${errorText}', 'Unknown, 'errorNumber' + logger.fatal({ errorText: JSON.stringify(error) }, '15008'); failedExports.push('shr-fhir-export'); } } else { - logger.info('Skipping FHIR export'); + // 05006, 'Skipping FHIR export',, + logger.info('05006'); } if (doJSONSchema) { @@ -398,11 +393,13 @@ if (doJSONSchema) { // } } catch (error) { - logger.fatal('Failure in JSON Schema export. Aborting with error message: %s', error); + // 15009, 'Failure in JSON Schema export. Aborting with error message: ${errorText}', 'Unknown, 'errorNumber' + logger.fatal({ errorText: JSON.stringify(error) }, '15009'); failedExports.push('shr-json-schema-export'); } } else { - logger.info('Skipping JSON Schema export'); + // 05007, 'Skipping JSON Schema export',, + logger.info('05007'); } if (doModelDoc) { @@ -417,25 +414,27 @@ if (doModelDoc) { shrJDE.exportToPath(igJavadocResults, fhirPath); } } catch (error) { - logger.fatal('Failure in Model Doc export. Aborting with error message: %s', error); + // 15010, 'Failure in Model Doc export. Aborting with error message: ${errorText}', 'Unknown, 'errorNumber' + logger.fatal({ errorText: JSON.stringify(error) }, '15010'); failedExports.push('shr-model-doc'); } } else { - logger.fatal('CIMCORE is required for generating Model Doc. Skipping Model Docs export.'); + // 15011, 'CIMCORE is required for generating Model Doc. Skipping Model Docs export.', 'Do not skip CIMCORE if Model Doc should be generated', 'errorNumber' + logger.fatal('15011'); failedExports.push('shr-model-doc'); } } else { - logger.info('Skipping Model Docs export'); + // 05008, 'Skipping Model Docs export',, + logger.info('05008'); } -logger.info('Finished CLI Import/Export'); +logger.info('05002'); const ftlCounter = logCounter.fatal; const errCounter = logCounter.error; const wrnCounter = logCounter.warn; let [errColor, errLabel, wrnColor, wrnLabel, resetColor, ftlColor, ftlLabel] = ['\x1b[32m', 'errors', '\x1b[32m', 'warnings', '\x1b[0m', '\x1b[31m', 'fatal errors']; if (ftlCounter.count > 0) { - // logger.fatal(''); ftlLabel = `fatal errors (${failedExports.join(', ')})`; } if (errCounter.count > 0) { diff --git a/errorMessages.txt b/errorMessages.txt index cad1794..bf0ab55 100644 --- a/errorMessages.txt +++ b/errorMessages.txt @@ -1,16 +1,75 @@ Number, Message, Solution, deduplicationKeys -// test 01001, 'No project configuration file found - currently using default EXAMPLE identifiers. Auto-generating a proper config.json in your specifications folder', 'Open the config.json file and customize it for your project.', 'errorNumber' -01002, 'Config file missing key: ${key} using default key: ${defaultValue} instead.', ' Open the config.json file and add your project specific details for that key.', 'errorNumber' -02001, 'Potentially mismatched targets: ${class} maps to ${item} but based on class (${class}) maps to ${item} and ${item} is not based on ${element} in ${class}. ', 'You're overwriting an inherited mapping. This is not necessarily an issue but is definitely something to be cautious of.', 'errorNumber' -03001, 'Trying to map ${profile} to ${code} but ${profile} was previously mapped to it', 'Unknown', 'errorNumber' - // test +01002, 'Config file missing key: ${key} using default value: ${defaultValue} instead.', ' Open the config.json file and add your project specific details for that key.', 'errorNumber' +01003, 'Unknown Filetype: ${fileType}',, +01004, 'Start importing content profile file',, +01005, 'Done importing content profile file',, +01006, 'Entered content profile file',, +01007, 'Exiting content profile file',, +01008, 'Start importing data elements file',, +01009, 'Done importing data elements file',, +01010, 'Start importing data element',, +01011, 'Done importing data element',, +01012, 'Using config file ${configFile}',, +01013, 'Start importing mapping file',, +01014, 'Done importing mapping file',, +01015, 'Start importing namespace mapping',, +01016, 'Done importing namespace mapping',, +01017, 'Start importing element mapping',, +01018, 'Done importing element mapping',, +01019, 'Configuration file '${oldPropertyName}' field will be deprecated. Use 'implementationGuide.${igPropertyName}' instead.', 'Replace old propery path with new property path', +01020, 'Start preprocessing data elements file',, +01021, 'Done preprocessing data elements file',, +01022, 'Start importing value set file',, +01023, 'Done importing value set file',, +02001, 'Potentially mismatched targets: ${class} maps to ${item} but based on class (${baseClass}) maps to ${baseItem} and ${item} is not based on ${baseItem} in ${class}. ', 'You're overwriting an inherited mapping. This is not necessarily an issue but is definitely something to be cautious of.', 'errorNumber' +02002, 'Ignoring TBD parent ${tbdText} for child element ${child}.', 'Unknown', 'errorNumber' +02003, 'Start expanding element',, +02004, 'Done expanding element',, +02005, 'Start expanding mapping',, +02006, 'Done expanding mapping',, +03001, 'Trying to map ${profile} to ${code} but ${otherProfile} was previously mapped to it', 'Unknown', 'errorNumber' 03002, 'Choice has equivalent types so choice options may overwrite or override each other when mapped to FHIR.', 'Unknown', 'errorNumber' 03003, 'Overriding extensible value set constraint from ${vs1} to ${vs2}. Only allowed when new codes do not overlap meaning of old codes.', 'Unknown', 'errorNumber' 03004, 'Element profiled on Basic. Consider a more specific mapping.', 'The Basic profile should not be used in most cases. Consider a more specific profile mapping that categorizes the Element being mapped.', 'errorNumber' 03005, 'No mapping to '${elementPath}'. This property is core to the target resource and usually should be mapped.', 'Unknown', 'errorNumber' 03006, 'The ${property} property is not bound to a value set fixed to a code or fixed to a quantity unit. This property is core to the target resource and usually should be constrained ', 'Unknown', 'errorNumber' +03007, 'Exporting FHIR using target: ${target}',, +03008, 'Unmapped fields in [ ${elements} ]:\n${fields}', 'Map fields to FHIR properties or to extensions', 'errorNumber' +03009, 'Profile Indicators JSON: ${profileIndicators}',, +03010, 'Start mapping element',, +03011, 'Done mapping element',, +03012, 'Start mapping rule',, +03013, 'Done mapping rule',, +03014, 'Nested include types are currently not supported when applied to extensions', 'Unknown', 'errorNumber' +03015, 'Start mapping extension',, +03016, 'Done mapping extension',, +03017, 'Deep path mapped to extension URL, but extension placed at root level.',, +03018, 'Abstract Class: The \'${property}\' property is not bound to a value set or fixed to a code. This property is core to the target resource and usually should be constrained.', 'Constrain the property if possible', 'errorNumber' +03019, 'Start creating extension',, +03020, 'Done creating extension',, +03021, 'HL7 IGs require a package-list.json file. A starter version has been written to ${packageListFile}. For more information on package files, see: http://wiki.hl7.org/index.php?title=FHIR_IG_PackageList_doco', 'Edit the packageListFile', 'errorNumber' +03022, 'WARNING: Overwriting generated profile with patch: ${patchFile}',, +03023, 'Start exporting element logical model',, +03024, 'Done exporting element logical model',, 04001, 'Unsupported code system: ${codeSystem}', 'Unknown', 'errorNumber' +05001, 'Starting CLI Import/Export',, +05002, 'Finished CLI Import/Export',, +05003, 'Skipping CIMCORE export',, +05004, 'Skipping Data Dictionary export',, +05005, 'Skipping ES6 export',, +05006, 'Skipping FHIR export',, +05007, 'Skipping JSON Schema export',, +05008, 'Skipping Model Docs export',, +06001, 'Start generating class',, +06002, 'Done generating class',, +07001, 'Unknown constraint type ${constraintType}.', 'Unknown', 'errorNumber' +07002, 'Compiling Documentation for ${count} namespaces...',, +07003, 'Building documentation pages for ${count} elements...',, +08001, 'Exporting namespace.',, +08002, 'Finished exporting namespace.',, +08003, 'Exporting element',, +08004, 'Cannot enforce constraint ${constraint} on Element ${elementFqn} since ${path} refers to a type introduced by an "includesType" constraint',, 'errorNumber' 11001, 'Element name '${name}' should begin with a capital letter', 'Rename the specified Element', 'errorNumber' 11002, 'Entry Element name '${name}' should begin with a capital letter', 'Rename the specified EntryElement', 'errorNumber' 11003, 'Unable to resolve value set reference: ${valueSet}', 'Invalid value set reference double check the name and the path', 'errorNumber' @@ -36,7 +95,6 @@ Number, Message, Solution, deduplicationKeys 11023, 'extraneous input ${input} expecting ${listOfKeywords}', 'This is usually a typo issue. Investigate spelling and keywords used around the specificied text input.', 'errorNumber' 11024, 'Elements cannot use ${value} modifier and specify ${value} field at same time.', 'Unknown', 'errorNumber' 11025, 'Fields cannot be constrained to type ${value} ', 'Unknown', 'errorNumber' -11026, 'ref( ${value} ) is an unsupported construct; treating as ${value} without the reference.', 'Unknown', 'errorNumber' 11027, 'Unable to import property ${fqn1} unknown value type: ${valueType1}', 'The type either does not exist or the import tool needs to be updated.', 'errorNumber' 11028, 'Unable to import unknown constraint type: ${constraintType1} ', 'The type either does not exist or the import tool needs to be updated.', 'errorNumber' 11029, 'Unable to import mapping. Unknown rule type: ${ruleType}', 'The type either does not exist or the import tool needs to be updated.', 'errorNumber' @@ -49,6 +107,17 @@ Number, Message, Solution, deduplicationKeys 11036, 'Path not found for ${identifier}: ${path}', 'Unknown', 'errorNumber' 11037, 'Definition not found for data element in content profile path: ${cpProfilePath}', 'Unknown', 'errorNumber' 11038, 'Could not find content profile file: ${cpFile}', 'Unknown', 'errorNumber' +11039, 'Grammar declaration not found', 'Add Grammar declaration at top of file', 'errorNumber' +11040, 'Property "${name}" already exists.', 'Remove or rename redundant property', 'errorNumber' +11041, 'Choice value constrained without specifying the specific choice', 'Specify the choice to constrain using []', 'errorNumber' +11042, 'Constraint refers to previous identifier', 'Unknown', 'errorNumber' +11043, 'Value should not declare cardinality', 'Remove cardinality from value declaration', 'errorNumber' +11044, 'Missing a value element', 'Unknown', 'errorNumber' +11045, 'Invalid file ${fileName1} ', 'Unknown', 'errorNumber' +11046, 'Invalid config file: ${configFilename1} ', 'Unknown', 'errorNumber' +11047, 'Resolution error ${errorText} ', 'Unknown', 'errorNumber' +11048, 'Cannot resolve element definition for ${elementFqn}', 'Unknown', 'errorNumber' +11049, 'Namespace declaration not found', 'Add Namespace declaration to file', 'errorNumber' 12001, 'Cannot resolve element definition.', 'Element doesn't exist. Double check spelling and inheritance', 'errorNumber' 12002, 'Reference to non-existing base: ${elementName1}', 'Base doesn't exist. Double check spelling and inheritance.', 'errorNumber' 12003, 'No cardinality found for value: ${value1}', 'Explicitly define cardinality for that value.', 'errorNumber' @@ -69,10 +138,10 @@ Number, Message, Solution, deduplicationKeys 12020, 'Cardinality of ${name1} not found. Please explicitly define the cardinality.', 'Unknown', 'errorNumber' 12021, 'Cannot include cardinality on ${name1} cardinality of ${card1} doesnt fit within ${card2}', 'The cardinality of included parameters must be as narrow or narrower than the property it contains.', 'errorNumber' 12022, 'Cannot constrain valueset of ${name1} since it has no identifier', 'Unknown', 'errorNumber' -12023, 'Cannot constrain valueset of ${name1} since neither it nor its value is a code Coding or CodeableConcept', 'Unknown', 'errorNumber' +12023, 'Cannot constrain valueset of ${name1} since neither it nor its value is a concept', 'Unknown', 'errorNumber' 12024, 'Cannot constrain valueset of ${name1} since it is already constrained to a single code', 'Unknown', 'errorNumber' -12025, 'Cannot constrain code of ${name1} since neither it nor its value is a code based on a Coding or based on CodeableConcept', 'Unknown', 'errorNumber' -12026, 'Cannot constrain included code of ${name1} since neither it nor its value is a code based on a Coding or based on CodeableConcept', 'Unknown', 'errorNumber' +12025, 'Cannot constrain code of ${name1} since neither it nor its value is a concept', 'Unknown', 'errorNumber' +12026, 'Cannot constrain included code of ${name1} since neither it nor its value is a concept', 'Unknown', 'errorNumber' 12027, 'Cannot constrain boolean value of ${name1} since neither it nor its value is a boolean' , 'Unknown', 'errorNumber' 12028, 'Cannot constrain boolean value of ${name1} to ${value1} since a previous constraint constrains it to ${value2}' , 'Unknown', 'errorNumber' 12029, 'Cannot resolve element definition for ${name1}', 'This is due to a incomplete definition for an element. Please refer to the document for proper definition syntax.', 'errorNumber' @@ -148,9 +217,9 @@ Number, Message, Solution, deduplicationKeys 13059, 'Cannot fix ${target1} to ${value1} since it is already fixed to ${otherValue1}', 'Unknown', 'errorNumber' 13060, 'Could not determine how to map nested value (${elementPath}) to FHIR profile.', 'Occurs on the FHIR profile export when there are multiple levels of reference specified. e.g.: LaboratoryObservationTopic.Specimen.CollectionSite within LaboratoryObservationTopic. Resolved by creating a reference in the cimi entity to FHIR map in cimi_entity_map.txt.', 'errorNumber' 13061, 'Mapping ${pathName1} sub-fields is currently not supported.', 'Unknown', 'errorNumber' -13063, 'Could not find FHIR element with ${path1} %s for content profile rule with ${path2}', 'Unknown', 'errorNumber' -13064, 'Could not find FHIR element for content profile rule with path ${path}', 'Unknown', 'errorNumber' -13065, 'Could not find FHIR element subextension for content profile rule with path ${path}', 'Unknown', 'errorNumber' +13063, 'Could not find FHIR element with ${path1} %s for content profile rule with ${path2}', 'Unknown', 'errorNumber' +13064, 'Could not find FHIR element for content profile rule with path ${path}', 'Unknown', 'errorNumber' +13065, 'Could not find FHIR element subextension for content profile rule with path ${path}', 'Unknown', 'errorNumber' 13070, 'Cannot make choice element explicit at ${id1}. Invalid identifier: ${id2}. ', 'Unknown', 'errorNumber' 13071, 'Cannot make choice element explicit at ${element1}. Could not find compatible type match for: ${element2}.', 'Unknown', 'errorNumber' 13072, 'Target path ${targetPath1} and slice path ${slicePath1} are not compatible. ', 'Unknown', 'errorNumber' @@ -158,11 +227,6 @@ Number, Message, Solution, deduplicationKeys 13074, 'Could not find element to slice at ${slicePath1}', 'Unknown', 'errorNumber' 13075, 'Cannot create slice since there is no slice-on command.', 'Unknown', 'errorNumber' 13076, 'Unsupported choices in fields', 'Unknown', 'errorNumber' -13077, 'Invalid file ${fileName1} ', 'Unknown', 'errorNumber' -13078, 'Invalid config file: ${configFilename1} ', 'Unknown', 'errorNumber' -13079, 'Resolution error ${errorText} ', 'Unknown', 'errorNumber' -13080, 'Error rendering model doc: ${errorText}', 'Unknown', 'errorNumber' -13081, 'Error copying files for export of model doc: ${errorText}', 'Unknown', 'errorNumber' 13082, 'Unexpected error exporting element to FHIR Logical Model. ${errorText} ', 'Unknown', 'errorNumber' 13083, 'Could not find based on element ${element1} for child element ${element2}. ', 'Unknown', 'errorNumber' 13084, 'Could not find expanded definition of shr.base.Entry. Inheritance calculations for ${identifier1} will be incomplete.', 'Unknown', 'errorNumber' @@ -171,42 +235,68 @@ Number, Message, Solution, deduplicationKeys 13087, 'Cannot resolve target of card constraint on ${target1} ', 'Unknown', 'errorNumber' 13088, 'Invalid constraint path: ${target1}', 'Unknown', 'errorNumber' 13089, 'Cannot determine target item of mapping for ${identifier1}', 'Unknown', 'errorNumber' -13090, 'Clashing property names: ${name1} and ${name2} ', 'Unknown', 'errorNumber' -13091, 'Ignoring field defined as a choice: ${field1}', 'Unknown', 'errorNumber' -13092, 'Ignoring name-less field: ${field1} ', 'Unknown', 'errorNumber' -13093, 'Ignoring restricted field name: Value : ${field1} ', 'Unknown', 'errorNumber' -13094, 'Choices with options with cardinalities that are not exactly one are illegal ${value1}. Ignoring option ${option1}', 'Unknown', 'errorNumber -13095, 'Unsupported Incomplete', 'Unknown', 'errorNumber' -13096, 'Unknown type for value ${value1} ', 'Unknown', 'errorNumber' -13097, 'Could not find definition for ${supertype1} which is a supertype of ${def1}', 'Unknown', 'errorNumber' -13098, 'Internal error unexpected constraint target: ${target1} for constraint ${constraint1}'A, 'Unknown', 'errorNumber' -13099, 'Multiple valueset constraints found on a single element ${element1}', 'Unknown', 'errorNumber' -13100, 'Multiple code constraints found on a single element ${element1} ', 'Unknown', 'errorNumber' -13101, 'Internal error: unhandled constraint ${constraint1} ', 'Unknown', 'errorNumber' -13102, 'Encountered a constraint path containing a primitive ${pathId1} at index ${index1} that was not the leaf: ${constraint1} ', 'Unknown', 'errorNumber' -13103, 'Encountered a constraint path with a primitive leaf ${pathId1} on an element that lacked a value: ${constraint1} ', 'Unknown', 'errorNumber' -13104, 'Encountered a constraint path with a primitive leaf ${pathId1} on an element with a mismatched value: ${constraint1} on valueDef ${valueDef1} ', 'Unknown', 'errorNumber' -13105, 'Encountered a constraint path with a primitive leaf ${pathId1} on an element with a mismatched value: ${constraint1} on valueDef ${valueDef1} ', 'Unknown', 'errorNumber' -13106, 'Cannot resolve element definition for ${pathId1} on constraint ${constraint1}. ', 'Unknown', 'errorNumber' -13107, 'Element ${element1} lacked any fields or a value that matched ${pathId1} as part of constraint ${constraint1} ', 'Unknown', 'errorNumber' -13108, 'Element ${element1} lacked a field or a value that matched ${pathId1} as part of constraint ${constraint1} ', 'Unknown', 'errorNumber' -13109, 'Encountered an unnormalized constraint path containing a primitive ${pathId1} at index ${index1}: ${constraint1} ', 'Unknown', 'errorNumber' -13110, 'Element ${element1} lacked any fields or a value that matched ${pathId1} as part of constraint ${constraint1} ', 'Unknown', 'errorNumber' -13111, 'Target of an unnormalized constraint: ${target1} does not have a value. Constraint is: ${constraint1} ', 'Unknown', 'errorNumber' -13112, 'Constraint should not be on the value (except for choices): ${target1} in an expanded object model ${constraint1}. Ignoring constraint.', 'Unknown', 'errorNumber' -13113, 'Target of an unnormalized constraint: ${constraint1} was a choice value that did not have a valid option: ${identifier1} ', 'Unknown', 'errorNumber' +13114, 'Cannot create extension with value[x] type: ${type}', 'Check value to ensure it is a primitive, maps to a FHIR datatype, or is an Entry (which allows references)', 'errorNumber' +13115, 'Couldn\'t identify appropriate Extension.value[x] value for ${elementId}. Is it an attempt to reference a non-Entry?', 'Check value to ensure it is a primitive, maps to a FHIR datatype, or is an Entry (which allows references)', 'errorNumber' +13116, 'Package-id '${packageId}' (found in package-list.json) does not match npm-name '${npmName}' from config file', 'Fix packageId in package-list.json file', 'errorNumber' +13117, 'Canonical URL '${packageUrl}' (found in package-list.json) does not match canonical URL '${canonicalUrl}' from config file', 'Fix canonical URL in package-list.json file', 'errorNumber' +13118, 'The package-list.json file must have at least one listed publication object', 'Add at least one publication object.', 'errorNumber' +13119, 'The first publication object in the package-list.json list must be the CI build (with "version":"curent", "status":"ci-build", and "current":true)', 'Add the CI Build entry to the publication objects in package-list.json', 'errorNumber' +13120, 'The package-list.json contains the invalid (placeholder) path for the CI build: ${ciPath}', 'Update the path to the CI build with the real CI build URL', 'errorNumber' +13121, 'Namespace strategy requires config.implementationGuide.primarySelectionStrategy.primary to be an array', 'Update the config file so that config.implementationGuide.primarySelectionStrategy.primary is an array', 'errorNumber' +13122, 'Hybrid strategy requires config.implementationGuide.primarySelectionStrategy.primary to be an array', 'Update the config file so that config.implementationGuide.primarySelectionStrategy.primary is an array', 'errorNumber' +13123, 'Specified extraResources path is not valid: ${path}', 'Fix or remove the implementationGuide.extraResources value in the config to be a valid path', 'errorNumber' +13124, 'Invalid extra resource. Extra resource JSON must include id and resourceType properties: ${resourcePath}.', 'Add id and/or resourceType to the resource', 'errorNumber' +13125, 'Invalid extra resource. IG is for FHIR ${igVersion}, but resource is for FHIR ${resourceVersion}: ${resourcePath}', 'Replace resource with resource using same FHIR version as the IG', 'errorNumber' +13126, 'Invalid extra resource. Only the following resource types are currently supported: StructureDefinition, ValueSet, CodeSystem, SearchParameter, OperationDefinition, CapabilityStatement, Conformance. Found: ${resourceType}.', 'Remove unsupported resource.', 'errorNumber' +13127, 'Invalid extra resource. Resource must be valid JSON: ${resourcePath}.', 'Remove invalid JSON resource.', 'errorNumber' +13128, 'Specified examples path is not valid: ${path}', 'Fix implementationGuide.examples config value to point to valid path', 'errorNumber' 14001, 'Unsupported value set rule type: ${vsRuleType}', 'Unknown', 'errorNumber' 14002, 'Unknown type for value ${value1}', 'Unknown', 'errorNumber' 14003, 'Unknown type for constraint ${constraint1}, 'Unknown', 'errorNumber' 14004, 'Unknown type for constraint ${constraint}, 'Unknown', 'errorNumber' -14005, 'Could not identify fixed value for: ${element1}', 'Unknown, 'errorNumber' -14006, 'Unable to find field with identifer ${elementId1} on element ${currDefId1}; Original Element: ${defId1}; full mapping: ${mapping1}; FHR Path: ${elementPath1} ID: ${elementId2}', 'Unknown, 'errorNumber' -14007, 'Value referenced in mapping but none exist on this element ${element1}', 'Unknown, 'errorNumber' -14008, 'Value referenced in mapping but none exist on this element ${element1}', 'Unknown, 'errorNumber' -14009, 'Cannot create proper inheritance tree w/ multiple based on elements. Using first element.', 'Unknown, 'errorNumber' 15001, 'Unable to successfully serialize element ${identifierName} into CIMCORE, failing with error ${errorText}', 'Unknown, 'errorNumber' 15002, 'Unable to successfully serialize value set ${valueSet} into CIMCORE, failing with error ${errorText}', 'Unknown, 'errorNumber' 15003, 'Unable to successfully serialize mapping ${mappingIdentifier} into CIMCORE, failing with error ${errorText}', 'Unknown, 'errorNumber' 15004, 'Unable to successfully serialize ${nameSpace} meta information ${} into CIMCORE, failing with error ${errorText}', 'Unknown, 'errorNumber' -15100, 'Failure in ES6 export. Aborting with error message: ${errorText}', 'Unknown, 'errorNumber' +15005, 'Failure in CIMCORE export. Aborting with error message: ${errorText}', 'Unknown, 'errorNumber' +15006, 'Failure in data dictionary export. Aborting with error message: ${errorText}', 'Unknown, 'errorNumber' +15007, 'Failure in ES6 export. Aborting with error message: ${errorText}', 'Unknown, 'errorNumber' +15008, 'Failure in FHIR export. Aborting with error message: ${errorText}', 'Unknown, 'errorNumber' +15009, 'Failure in JSON Schema export. Aborting with error message: ${errorText}', 'Unknown, 'errorNumber' +15010, 'Failure in Model Doc export. Aborting with error message: ${errorText}', 'Unknown, 'errorNumber' +15011, 'CIMCORE is required for generating Model Doc. Skipping Model Docs export.', 'Do not skip CIMCORE if Model Doc should be generated', 'errorNumber' 15101, 'Cumulative slice min cardinalities ${totalMinimum} exceed the max cardinality (${maxCard}) for the containing array at ${baseArray}.', 'Unknown, 'errorNumber' +16001, 'Could not identify fixed value for: ${element1}', 'Unknown, 'errorNumber' +16002, 'Unable to find field with identifer ${elementId1} on element ${currDefId1}; Original Element: ${defId1}; full mapping: ${mapping1}; FHR Path: ${elementPath1} ID: ${elementId2}', 'Unknown, 'errorNumber' +16003, 'Value referenced in mapping but none exist on this element ${element1}', 'Unknown, 'errorNumber' +16004, 'Value referenced in mapping but none exist on this element ${element1}', 'Unknown, 'errorNumber' +16005, 'Cannot create proper inheritance tree w/ multiple based on elements. Using first element.', 'Unknown, 'errorNumber' +16006, 'No profile to match on, for extension ${extensionId} element: ${elementIdOrPath}', 'Unknown', 'errorNumber' +16007, 'Unable to find matching extension with url ${profileUrl}', 'Unknown', 'errorNumber' +16008, 'Cannot resolve element definition for ${elementFqn}', 'Unknown', 'errorNumber' +17001, 'Error rendering model doc: ${errorText}', 'Unknown', 'errorNumber' +17002, 'Error copying files for export of model doc: ${errorText}', 'Unknown', 'errorNumber' +18001, 'Clashing property names: ${name1} and ${name2} ', 'Unknown', 'errorNumber' +18002, 'Ignoring field defined as a choice: ${field1}', 'Unknown', 'errorNumber' +18003, 'Ignoring name-less field: ${field1} ', 'Unknown', 'errorNumber' +18004, 'Ignoring restricted field name: Value : ${field1} ', 'Unknown', 'errorNumber' +18005, 'Choices with options with cardinalities that are not exactly one are illegal ${value1}. Ignoring option ${option1}', 'Unknown', 'errorNumber +18006, 'Unsupported Incomplete', 'Unknown', 'errorNumber' +18007, 'Unknown type for value ${value1} ', 'Unknown', 'errorNumber' +18008, 'Could not find definition for ${supertype1} which is a supertype of ${def1}', 'Unknown', 'errorNumber' +18009, 'Internal error unexpected constraint target: ${target1} for constraint ${constraint1}'A, 'Unknown', 'errorNumber' +18010, 'Multiple valueset constraints found on a single element ${element1}', 'Unknown', 'errorNumber' +18011, 'Multiple code constraints found on a single element ${element1} ', 'Unknown', 'errorNumber' +18012, 'Internal error: unhandled constraint ${constraint1} ', 'Unknown', 'errorNumber' +18013, 'Encountered a constraint path containing a primitive ${pathId1} at index ${index1} that was not the leaf: ${constraint1} ', 'Unknown', 'errorNumber' +18014, 'Encountered a constraint path with a primitive leaf ${pathId1} on an element that lacked a value: ${constraint1} ', 'Unknown', 'errorNumber' +18015, 'Encountered a constraint path with a primitive leaf ${pathId1} on an element with a mismatched value: ${constraint1} on valueDef ${valueDef1} ', 'Unknown', 'errorNumber' +18016, 'Encountered a constraint path with a primitive leaf ${pathId1} on an element with a mismatched value: ${constraint1} on valueDef ${valueDef1} ', 'Unknown', 'errorNumber' +18017, 'Cannot resolve element definition for ${pathId1} on constraint ${constraint1}. ', 'Unknown', 'errorNumber' +18018, 'Element ${element1} lacked any fields or a value that matched ${pathId1} as part of constraint ${constraint1} ', 'Unknown', 'errorNumber' +18019, 'Element ${element1} lacked a field or a value that matched ${pathId1} as part of constraint ${constraint1} ', 'Unknown', 'errorNumber' +18020, 'Encountered an unnormalized constraint path containing a primitive ${pathId1} at index ${index1}: ${constraint1} ', 'Unknown', 'errorNumber' +18021, 'Element ${element1} lacked any fields or a value that matched ${pathId1} as part of constraint ${constraint1} ', 'Unknown', 'errorNumber' +18022, 'Target of an unnormalized constraint: ${target1} does not have a value. Constraint is: ${constraint1} ', 'Unknown', 'errorNumber' +18023, 'Constraint should not be on the value (except for choices): ${target1} in an expanded object model ${constraint1}. Ignoring constraint.', 'Unknown', 'errorNumber' +18024, 'Target of an unnormalized constraint: ${constraint1} was a choice value that did not have a valid option: ${identifier1} ', 'Unknown', 'errorNumber' diff --git a/node_modules/shr-adl-bmm-export/lib/bmm/bmm-constructor.js b/node_modules/shr-adl-bmm-export/lib/bmm/bmm-constructor.js deleted file mode 100644 index 6d5f83a..0000000 --- a/node_modules/shr-adl-bmm-export/lib/bmm/bmm-constructor.js +++ /dev/null @@ -1,236 +0,0 @@ -/* -// /$$$$$$ /$$ /$$ -// /$$__ $$ | $$ | $$ -// | $$ \__/ /$$$$$$ /$$$$$$$ /$$$$$$$ /$$$$$$ /$$$$$$ /$$ /$$ /$$$$$$$ /$$$$$$ /$$$$$$ /$$$$$$ -// | $$ /$$__ $$| $$__ $$ /$$_____/|_ $$_/ /$$__ $$| $$ | $$ /$$_____/|_ $$_/ /$$__ $$ /$$__ $$ -// | $$ | $$ \ $$| $$ \ $$| $$$$$$ | $$ | $$ \__/| $$ | $$| $$ | $$ | $$ \ $$| $$ \__/ -// | $$ $$| $$ | $$| $$ | $$ \____ $$ | $$ /$$| $$ | $$ | $$| $$ | $$ /$$| $$ | $$| $$ -// | $$$$$$/| $$$$$$/| $$ | $$ /$$$$$$$/ | $$$$/| $$ | $$$$$$/| $$$$$$$ | $$$$/| $$$$$$/| $$ -// \______/ \______/ |__/ |__/|_______/ \___/ |__/ \______/ \_______/ \___/ \______/ |__/ -// -// Formatter - BMM -// Abhijay Bhatnagar -// 05/01/18 -*/ -const bunyan = require('bunyan'); - -var rootLogger = bunyan.createLogger({name: 'shr-adl-export'}); -var logger = rootLogger; -function setLogger(bunyanLogger) { - rootLogger = logger = bunyanLogger; -} - -const formatId = (idArray, type) => { - return `[${type}${idArray.join('.')}]`; -}; - -const makeCamlCased = (string) => { - return string.charAt(0).toLowerCase() + string.slice(1); -}; - -function reformatNamespace(ns) { - return ns.split('.').map(partial=>partial.charAt(0).toUpperCase() + partial.slice(1)).join(''); -} - - -class BmmSpecs { - constructor(specs, config) { - this._specs = specs; - this._config = config; - this.bmmSpecs = { - packages: this.constructPackages(), - definitions: this.constructDefinitions() - }; - } - - get specs() { return this._specs; } - get config() { return this._config; } - - constructBmmSpecs() { - this.constructPackages(); - } - - constructPackages() { - const packages = {}; - for (const ns of this.specs.namespaces.all) { - const namespace = reformatNamespace(ns.namespace); - const elements = this.specs.dataElements.byNamespace(ns.namespace); - packages[namespace] = elements; - } - return packages; - } - - constructDefinitions() { - const definitions = {}; - for (const de of this.specs.dataElements.all) { - const name = de.identifier.name; - const properties = this.constructProperties(de); - definitions[name] = { - name: name, - documentation: de.description, - ancestors: de.basedOn, - }; - if (Object.keys(properties).length > 0) { - definitions[name].properties = properties; - } - } - return definitions; - } - - constructProperties(de) { - const properties = {}; - for (const f of de.fields.filter(v=>v.inheritance == null)) { - if (f.identifier !== null) { - - //TBD: currently skipping includes constraint... - if (f.constraintsFilter.includesType.hasConstraints || f.constraintsFilter.includesCode.hasConstraints) { - continue; - } - - const fDef = this.specs.dataElements.findByIdentifier(f.identifier); - - const documentation = fDef.description; - const name = makeCamlCased(f.identifier.name); - const type = f.identifier.name; - const p_bmm_type = 'P_BMM_SINGLE_PROPERTY'; - - if (f.identifier.namespace == 'primitive') { - const a = 'b'; - } - properties[name] = { - p_bmm_type: p_bmm_type, - name: name, - type: type, - documentation: documentation - }; - - if (f.effectiveCard.toString().charAt(0) == 1) { - const is_mandatory = 'True'; - properties[name].is_mandatory = is_mandatory; - } - - if (f.effectiveCard.toString() !== '0..1') { - const cardinality = f.effectiveCard; - properties[name].cardinality = cardinality; - } - } else if (f.constructor.name == 'ChoiceValue') { - //13076, 'Unsupported choices in fields' , 'Unknown' , 'errorNumber' - logger.error('13076'); - } - } - - if (de.value && de.value.inheritance == null) { - if (de.value.identifier) { - const v = de.value; - - const name = 'value'; - const p_bmm_type = 'P_BMM_SINGLE_PROPERTY'; - - properties[name] = { - p_bmm_type: p_bmm_type, - name: name, - }; - - if (v.identifier.namespace == 'primitive') { - let documentation = `PrimitiveValue (original type: ${v.identifier.name})`; - let type = 'Any'; - - type = v.identifier.name.toUpperCase(); - // const conversionTable = { - // code: 'CodedText', - // string: 'String', - // dateTime: 'DateTime', - // decimal: 'Quantity', - // uri: 'URI', - // boolean: 'Boolean', - // time: 'Time' - // }; - // if (v.identifier.name in conversionTable) { - // type = conversionTable[v.identifier.name]; - // } else { - // console.log('unhandled prmitive %s', v.identifier.name); - // documentation = `Unsupported Primitive ${v.identifier.name}`; - // type = 'CodedText'; - // } - properties[name].documentation = documentation; - properties[name].type = type; - } else { - const vDef = this.specs.dataElements.findByIdentifier(v.identifier); - const documentation = vDef.description; - const type = v.identifier.name; - - properties[name].documentation = documentation; - properties[name].type = type; - } - - if (v.effectiveCard.toString().charAt(0) == 1) { - const is_mandatory = 'True'; - properties[name].is_mandatory = is_mandatory; - } - - if (v.effectiveCard.toString() !== '0..1') { - const cardinality = v.effectiveCard; - properties[name].cardinality = cardinality; - } - } else if (de.value.constructor.name == 'ChoiceValue') { - for (const opt of de.value.options) { - const name = `valueChoice${opt.identifier.name}`; - const p_bmm_type = 'P_BMM_SINGLE_PROPERTY'; - - properties[name] = { - p_bmm_type: p_bmm_type, - name: name, - }; - - if (opt.identifier.namespace == 'primitive') { - let documentation = `PrimitiveValue (original type: ${opt.identifier.name})`; - let type = 'Any'; - - type = opt.identifier.name.toUpperCase(); - // const conversionTable = { - // code: 'CodedText', - // string: 'String', - // dateTime: 'DateTime', - // decimal: 'Quantity', - // uri: 'URI', - // boolean: 'Boolean', - // time: 'Time' - // }; - // if (opt.identifier.name in conversionTable) { - // type = conversionTable[opt.identifier.name]; - // } else { - // console.log('unhandled prmitive %s', opt.identifier.name); - // documentation = `Unsupported Primitive ${opt.identifier.name}`; - // type = 'CodedText'; - // } - - properties[name].documentation = documentation; - properties[name].type = type; - - } else { - const vDef = this.specs.dataElements.findByIdentifier(opt.identifier); - const documentation = vDef.description; - const type = opt.identifier.name; - - properties[name].documentation = documentation; - properties[name].type = type; - } - - if (opt.effectiveCard.toString().charAt(0) == 1) { - const is_mandatory = 'True'; - properties[name].is_mandatory = is_mandatory; - } - - if (opt.effectiveCard.toString() !== '0..1') { - const cardinality = opt.effectiveCard; - properties[name].cardinality = cardinality; - } - } - } - } - - return properties; - } -} - -module.exports = { BmmSpecs, setLogger }; \ No newline at end of file diff --git a/node_modules/shr-json-export/lib/export.js b/node_modules/shr-json-export/lib/export.js deleted file mode 100644 index 93ed6e7..0000000 --- a/node_modules/shr-json-export/lib/export.js +++ /dev/null @@ -1,609 +0,0 @@ -// export SHR specification content as a hierarchy in JSON format -// Author: Greg Quinn -// The exportToJSON function produces a JSON object representing all the SHR content provided as input in a hierarchal form. Each node -// has a label, type, and children attribute. If label is omitted, the node is anonymous. The type attribute must always exist though. -// If children is omitted, the node is a leaf (no children). -// Some types of nodes can have additional attributes as appropriate to their type. -// In the representation below, the children attribute is shown by indented node(s) on the next numbered line(s). Note that each numbered line -// represents a type of node and can be repeated 0 to many times (except the root SHR node which there is only 1 of). -// If a node (numbered line) has 2 or more indented nodes under it, then it can have multiple types of nodes as children. Note that each of those -// may have children too so its all the numbered lines under it that are one indent level in that represent the possible types of children of a node. - -// Attribute values like mean that x is the label of the attribute on the JSON object in models.js that is the value of the attribute. The type -// attribute corresponds to the JSON object in models.js. -// More complicated value expressions like: -// ":" -// This means the value of the namespace attribute of identifier followed by a colon then the value of the name attribute of identifier. - -// When an attribute value is *Type, that's a reference to one of the Types at the end of the hierarchy which specifies the format of that JSON -// object. Often it will be "*Type1 or Type2" which means the value of that attribute can be Type1 or Type2 so see the definition of *Type1 and -// *Type2. -// Each line starts with a number representing its level in the hierarchy. Long node specifications are continued on the next line indented without a number - -// 1{ label: SHR, type: SHR} -// 2{ label: Namespaces, type: Namespaces} -// 3{ label: , type: Namespace, description: , grammarVersion: *Version or [ *Version, ... ] } -// 4{ label: , type: DataElement, isEntry: , isAbstract: , concepts: *Concepts, description: , -// basedOn: *Identifiers, value: *Value } -// 5 *Value -// 2{ label: Value Sets, type:ValueSets} -// 3{ label: , type: ValueSet, namespace: , description: , url: , -// concepts: *Concepts, grammarVersion: *Version } -// 4{ label: if exists or if exists or , type: , code: *Code, system: } -// 2{ label: Code Systems, type:CodeSystems} -// 3{ label: , type: CodeSystem, namespace: , description: , url: , -// grammarVersion: *Version } -// 4 *Concept - -// *Value = *ChoiceValue or *RefValue or *IdentifiableValue or *TBD or IncompleteValue -// *ChoiceValue = { type:"ChoiceValue", min: , max: but if max=* then this attribute left out, -// constraints:*Constraints, value: [ *Value ] } -// *RefValue = { type:"RefValue", min: , max: but if max=* then this attribute left out, -// constraints:*Constraints, label:"reference to "":", -// identifier: *Identifier } -// *IdentifiableValue = { type:"IdentifiableValue", min: , max: but if max=* then this attribute left out, -// constraints:*Constraints, -// label:":", identifier: *Identifier } -// *TBD = { type:"TBD", min: , max: but if max=* then this attribute left out, -// constraints:*Constraints, -// text: } -// *IncompleteValue = { type:"Incomplete", min: , max: but if max=* then this attribute left out, -// constraints:*Constraints, -// label:":", identifier: *Identifier } -// *Constraints = [ *Constraint ] -// *Constraint = *ValueSetConstraint or *CodeConstraint or *TypeConstraint or *CardConstraint -// *ValueSetConstraint = { type="ValueSetConstraint", valueset=, path= as string with : separator } -// *CodeConstraint = { type="CodeConstraint", code=, path= as string with : separator } -// *IncludesCodeConstraint= { type="IncludesCodeConstraint", code=, path= as string with : separator } -// *TypeConstraint = { type="TypeConstraint", isA=, onValue=, path= as string with : separator } -// *IncludesTypeConstraint = { type="IncludesTypeConstraint", isA=, min=, max= unless unbounded, -// path = as string with : separator } -// *BooleanConstraint = { type="BooleanConstraint", value=, path= as string with : separator } -// *CardConstraint = { type="CardConstraint", min=, max= unless unbounded, -// path= as string with : separator } -// *Identifiers = [ *Identifier ] -// *Identifier = { label: , type: "Identifier", namespace: } -// *Concepts = [ *Concept ] -// *Concept = { label: " "(":") else ":", type: "Concept", system: , -// code: , display:, url: } -// *Version = { major:, minor:, patch: } -// *Code = - -const bunyan = require('bunyan'); -const {IdentifiableValue, RefValue, ChoiceValue, TBD, IncompleteValue, ValueSetConstraint, IncludesCodeConstraint, CodeConstraint, CardConstraint, TypeConstraint, MODELS_INFO} = require('shr-models'); - -var rootLogger = bunyan.createLogger({name: 'shr-json-export'}); -var logger = rootLogger; -function setLogger(bunyanLogger) { - rootLogger = logger = bunyanLogger; -} - -var config; - -// *SHR -function exportToJSON(specifications, configuration=[]) { -// 1{ label: SHR, type: SHR} -// 2{ label: Namespaces, type: Namespaces} -// 3{ label: , type: "Namespace", description: , grammarVersion: } - - config = configuration; - - const namespaceResults = [], valuesetResults = [], codeSystemResults = []; - - - for (const ns of specifications.namespaces.all) { - namespaceResults.push( - namespaceToHierarchyJSON( ns, - specifications.dataElements.byNamespace(ns.namespace), - specifications.dataElements.grammarVersions )); // *Namespace - } - - var valueSetsForNamespace; - for (const ns of specifications.namespaces.all) { - - valueSetsForNamespace = valueSetsToHierarchyJSON( ns, - specifications.valueSets.byNamespace(ns.namespace), - specifications.valueSets.grammarVersions ); - if (valueSetsForNamespace.length > 0) { - for (const item of valueSetsForNamespace) { - valuesetResults.push(item); - } - } - } - - var codeSystemsForNamespace; - for (const ns of specifications.namespaces.all) { - - codeSystemsForNamespace = codeSystemsToHierarchyJSON( ns, - specifications.codeSystems.byNamespace(ns.namespace), - specifications.codeSystems.grammarVersions ); - if (codeSystemsForNamespace.length > 0) { - for (const item of codeSystemsForNamespace) { - codeSystemResults.push(item); - } - } - } - - const shr = { - label: configuration.projectShorthand, - type: configuration.projectShorthand, - children: [ { label: 'Namespaces', - type: 'Namespaces', - children: namespaceResults }, - { label: 'Value Sets', - type: 'ValueSets', - children: valuesetResults }, - { label: 'Code Systems', - type: 'CodeSystems', - children: codeSystemResults }] - }; - return shr; -} - -// { major:, minor:, patch: } -function versionToHierarchyJSON(v) { - return { - major: v.major, - minor: v.minor, - patch: v.patch - }; -} - -// *CodeSystems -function codeSystemsToHierarchyJSON(ns, codeSystems, grammarVersions) { - const result = []; - - for (const cs of codeSystems) { - result.push(codeSystemToHierarchyJSON(cs)); //*CodeSystem - } - return result; -} - -// *CodeSystem -function codeSystemToHierarchyJSON(cs) { - // Setup a child logger to associate logs with the current code system - logger = rootLogger.child({ shrId: cs.identifier.fqn }); - logger.debug('Start exporting code system'); - try { - var result = {}; - result['label'] = cs.identifier.name; - result['namespace'] = cs.identifier.namespace; - result['description'] = cs.description; - result['type'] = 'CodeSystem'; - result['url'] = cs.url; - if (cs.grammarVersion) result['grammarVersion'] = versionToHierarchyJSON(cs.grammarVersion); - var codes = conceptsToHierarchyJSON(cs.codes); // *Concepts - if (codes.length > 0) { - result['children'] = codes; - } - - return result; - } finally { - logger.debug('Done exporting code system'); - this.logger = rootLogger; - } -} - - -// *ValueSets -function valueSetsToHierarchyJSON(ns, valueSets, grammarVersions) { - const result = []; - - for (const vs of valueSets) { - result.push(valueSetToHierarchyJSON(vs)); //*ValueSet - } - return result; -} - -//*ValueSet -function valueSetToHierarchyJSON(vs) { - // Setup a child logger to associate logs with the current value set - logger = rootLogger.child({ shrId: vs.identifier.fqn }); - logger.debug('Start exporting value set'); - try { - var result = {}; - result['label'] = vs.identifier.name; - result['namespace'] = vs.identifier.namespace; - result['description'] = vs.description; - result['type'] = 'ValueSet'; - result['url'] = vs.url; - result['concepts'] = conceptsToHierarchyJSON(vs.concepts); // *Concepts - if (vs.grammarVersion) result['grammarVersion'] = versionToHierarchyJSON(vs.grammarVersion); - var rules = valueSetRulesToHierarchyJSON(vs.rules); - if (rules.length > 0) { - result['children'] = rules; - } - - return result; - } finally { - logger.debug('Done exporting value set'); - this.logger = rootLogger; - } -} - -// *ValueSetRules -function valueSetRulesToHierarchyJSON(rules) { - var result = []; - for (const rule of rules) { - result.push( valueSetRuleToHierarchyJSON(rule)); - } - return result; -} - -// *ValueSetRule -function valueSetRuleToHierarchyJSON(rule) { - var result = {}; - logger.debug('Rule: %s', rule.code ? rule.code : rule.system); - if (rule.code) { - if (rule.code.display) { - result["label"] = rule.code.display; - } else { - result["label"] = rule.code.code; - } - result["code"] = codeToHierarchyJSON(rule.code); - } else if (rule.system) { - result["label"] = rule.system; - result["system"] = rule.system; - } else { - //14001 , 'Unsupported value set rule type: ${vsRuleType}' , 'Unknown' , 'errorNumber' - logger.error({ vsRuleType : rule.constructor.name }, '14001' ); - } - result["type"] = rule.constructor.name; - /* - if (rule.constructor.name === "ValueSetIncludesCodeRule") { - } else if (rule.constructor.name === "ValueSetIncludesDescendentsRule") { - } else if (rule.constructor.name === "ValueSetExcludesDescendentsRule") { - } else if (rule.constructor.name === "ValueSetIncludesFromCodeRule") { - }*/ - return result; -} - -// *Namespace -function namespaceToHierarchyJSON(ns, dataElements, grammarVersions) { -// 3{ label: , type: "Namespace", description: , grammarVersion: } -// 4 *DataElement - - // Setup a child logger to associate logs with the current namespace - logger = rootLogger.child({ shrId: ns.namespace }); - logger.debug('Start exporting namespace'); - try { - const definitions = []; - - let defs = dataElements.sort(function(l,r) {return l.identifier.name.localeCompare(r.identifier.name);}); - for (const def of defs) { - definitions.push(definitionToHierarchyJSON(def)); // *DataElement - } - - var result = {}; - result['label'] = ns.namespace; - result['type'] = 'Namespace'; - if (ns.description) { - result['description'] = ns.description; - } - - if (grammarVersions.length > 0) { - result['grammarVersion'] = grammarVersionsToHierarchyJSON(grammarVersions); - } - result['children'] = definitions; - return result; - } finally { - logger.debug('Done exporting namespace'); - this.logger = rootLogger; - } -} - -function grammarVersionsToHierarchyJSON(grammarVersions) { - var versions = undefined; - if (grammarVersions.length > 0) { - if (grammarVersions.length === 1) { - versions = versionToHierarchyJSON(grammarVersions[0]); - } else { - versions = []; - for (const v of grammarVersions) { - versions.push(versionToHierarchyJSON(v)); - } - } - } - return versions; -} - -// *DataElement -function definitionToHierarchyJSON(def) { -// 4{ label: , type: "DataElement", isEntry: , isAbstract: , concepts: *Concepts, description: , -// basedOn: *Identifiers, value: *Value } -// 5 *Value - - // Setup a child logger to associate logs with the current namespace - const lastLogger = logger; - logger = rootLogger.child({ shrId: def.identifier.fqn }); - logger.debug('Start exporting data element'); - try { - var result = {}; - result['type'] = 'DataElement'; - result['label'] = def.identifier.name; - //result['identifier'] = identifierToHierarchyJSON(def.identifier); - result['isEntry'] = def.isEntry; - result['isAbstract'] = def.isAbstract; - result['concepts'] = conceptsToHierarchyJSON(def.concepts); // *Concepts - result['description'] = def.description; - //result['grammarVersion'] = def.grammarVersion; - if (def.grammarVersion) result['grammarVersion'] = versionToHierarchyJSON(def.grammarVersion); - if (def.basedOn.length > 0) { - //if (def.identifier.name === 'ViolentBehaviorRisk') { - // logger.info(def.basedOn); - // logger.info(def); - //} - result['basedOn'] = identifiersToHierarchyJSON(def.basedOn); // *Identifiers - } - if (def.value) { - result['value'] = valueToHierarchyJSON(def.value); // *Value - } - const children = []; - for (const el of def.fields) { - children.push(valueToHierarchyJSON(el)); // *Value - } - result['children'] = children; - return result; - } finally { - logger.debug('Done exporting data element'); - this.logger = lastLogger; - } -} - -// *Identifiers -function identifiersToHierarchyJSON(identifiers) { -// [ *Identifier ] - const result = []; - for (const identifier of identifiers) { - result.push(identifierToHierarchyJSON(identifier)); // *Identifier - } - return result; -} - -// *Concepts -function conceptsToHierarchyJSON(concepts) { -// [ *Concept ] - const result = []; - - if (concepts.length > 0) { - for (const concept of concepts) { - result.push(conceptToHierarchyJSON(concept)); - } - } - - return result; -} - -// *Value -function valueToHierarchyJSON(value) { - // 5 *Value - // *Value = *ChoiceValue or *RefValue or *IdentifiableValue or *TBD or IncompleteValue - // *ChoiceValue = { type:"ChoiceValue", min: , max: but if max=* then this attribute left out, - // constraints:*Constraints, value: [ *Value ] } - // *RefValue = { type:"RefValue", min: , max: but if max=* then this attribute left out, - // constraints:*Constraints, label:"reference to "":", - // identifier: *Identifier } - // *IdentifiableValue = { type:"IdentifiableValue", min: , max: but if max=* then this attribute left out, - // constraints:*Constraints, - // label:":", identifier: *Identifier } - // *TBD = { type:"TBD", min: , max: but if max=* then this attribute left out, - // constraints:*Constraints, - // text: } - // *IncompleteValue = { type:"Incomplete", min: , max: but if max=* then this attribute left out, - // constraints:*Constraints, - // label:":", identifier: *Identifier } - - const result = {}; - const card = value.card; - if (card) { - result['min'] = card.min; - if (!card.isMaxUnbounded) { - result['max'] = card.max; - } - } - // constraints - result['constraints'] = constraintsToHierarchyJSON(value); // *Constraints - - logger.debug('Value type: %s', value.constructor.name); - if (value.constructor.name === "ChoiceValue") { - result['type'] = 'ChoiceValue'; - result['value'] = choiceValuesToHierarchyJSON(value); - } else if (value.constructor.name === "RefValue") { - result['type'] = 'RefValue'; - result['label'] = 'reference to ' + identifierToString(value.identifier); - result['identifier'] = identifierToHierarchyJSON(value.identifier); - } else if (value.constructor.name === "IdentifiableValue") { - result['type'] = 'IdentifiableValue'; - result['label'] = identifierToString(value.identifier); - result['identifier'] = identifierToHierarchyJSON(value.identifier); - } else if (value.constructor.name === "TBD") { - result['type'] = 'TBD'; - result['text'] = value.text; - } else if (value.constructor.name === "IncompleteValue") { - result['type'] = 'Incomplete'; - result['label'] = identifierToString(value.identifier); - result['identifier'] = identifierToHierarchyJSON(value.identifier); - } else { - //14002 , 'Unknown type for value ${value1}' , 'Unknown' , 'errorNumber' - logger.error({value1 : value.constructor.name }, '14002'); - result['type'] = value.constructor.name; - } - return result; -} - -// *Constraints -function constraintsToHierarchyJSON(value) { - // [ *Constraint ] - const result = []; - for (const constraint of value.constraints) { - result.push(constraintToHierarchyJSON(constraint)); - } - - return result; -} - -// *Constraint -function constraintToHierarchyJSON(constraint) { - // *ValueSetConstraint or *CodeConstraint or *TypeConstraint or *CardConstraint - // *ValueSetConstraint = { type="ValueSetConstraint", valueset=, path= as string with : separator } - // *CodeConstraint = { type="CodeConstraint", code=, path= as string with : separator } - // *IncludesCodeConstraint= { type="IncludesCodeConstraint", code=, path= as string with : separator } - // *TypeConstraint = { type="TypeConstraint", isA=, onValue=, path= as string with : separator } - // *BooleanConstraint = { type="BooleanConstraint", value=, path= as string with : separator } - // *CardConstraint = { type="CardConstraint", min=, max= unless unbounded, - // path= as string with : separator } - const result = {}; - result['type'] = constraint.constructor.name; - if (constraint.constructor.name === "ValueSetConstraint") { - result['valueset'] = constraint.valueSet; - result['bindingStrength'] = constraint.bindingStrength; - } else if (constraint.constructor.name === "CodeConstraint") { - result['code'] = conceptToHierarchyJSON(constraint.code); // *Concept - } else if (constraint.constructor.name === "IncludesCodeConstraint") { - result['code'] = conceptToHierarchyJSON(constraint.code); // *Concept - } else if (constraint.constructor.name === "TypeConstraint") { - result['isA'] = identifierToHierarchyJSON(constraint.isA); - result['onValue'] = constraint.onValue; - } else if (constraint.constructor.name === "IncludesTypeConstraint") { - result['isA'] = identifierToHierarchyJSON(constraint.isA); - const card = constraint.card; - if (card) { - result['min'] = card.min; - if (!card.isMaxUnbounded) { - result['max'] = card.max; - } - } - } else if (constraint.constructor.name === "BooleanConstraint") { - result['value'] = constraint.value; - } else if (constraint.constructor.name === "CardConstraint") { - const card = constraint.card; - if (card) { - result['min'] = card.min; - if (!card.isMaxUnbounded) { - result['max'] = card.max; - } - } - } else { - //14003 , 'Unknown type for constraint ${constraint1} , 'Unknown' , 'errorNumber' - logger.error({constraint1 : constraint.constructor.name }, '14003' ); - } - result['path'] = constraint.path.map(p => p.toString()).join(':'); - //if (constraint.path.length > 0) { - // logger.debug(result['path']); - //} - return result; -} - -// *ChoiceValues -function choiceValuesToHierarchyJSON(value) { -// [ *Value ] - const valuesResult = []; - for (const v of value.options) { - valuesResult.push(valueToHierarchyJSON(v)); // *Value - } - return valuesResult; -} - -// *Code -function codeToHierarchyJSON(code) { - var result = {}; - if (code.display) { - result["label"] = code.display; - } else { - result["label"] = code.code; - } - result["type"] = 'code'; - result["code"] = code.code; - result["system"] = code.system; - result["url"] = systemAndCodeToUrl(code.system, code.code); - return result; -} - -function conceptToString(concept) { - if (concept.display) { - return `${concept.display} (${concept.system}:${concept.code})`; - } else { - return `${concept.system}:${concept.code}`; - } -} - -// *Concept -function conceptToHierarchyJSON(concept) { -// *Concept = { label: " "(":") else ":", type: "Concept", system: , code: , display:, url: } - const result = {}; - result['label'] = conceptToString(concept); - result['type'] = 'Concept'; - result['system'] = concept.system; - result['code'] = concept.code; - result['display'] = concept.display; - - result['url'] = systemAndCodeToUrl(concept.system, concept.code); - return result; -} - -function systemAndCodeToUrl(system, code) { - var url; - if (system == null) return ''; - switch (system) { - case 'http://uts.nlm.nih.gov/metathesaurus': - url = `https://uts.nlm.nih.gov/metathesaurus.html?cui=${code}`; - break; - case 'http://snomed.info/sct': - url = `https://uts.nlm.nih.gov/snomedctBrowser.html?conceptId=${code}`; - break; - case 'http://loinc.org': - url = `http://s.details.loinc.org/LOINC/${code}.html`; - break; - case 'http://unitsofmeasure.org': - url = 'http://unitsofmeasure.org/ucum.html#section-Alphabetic-Index-By-Symbol'; - break; - case 'http://ncimeta.nci.nih.gov': - url = `https://uts.nlm.nih.gov/metathesaurus.html#${code};0;1;CUI;2016AB;EXACT_MATCH;CUI;*;`; - break; - case 'http://www.genenames.org': - url = `http://www.genenames.org`; - break; - case 'https://evs.nci.nih.gov/ftp1/CDISC/SDTM/': - url = system; - break; - case 'urn:iso:std:iso:4217': - url = 'https://www.iso.org/iso-4217-currency-codes.html'; - break; - case 'https://sdt.cap.org': - // NOTE: https://sdt.cap.org goes to a completely unconfigured Microsoft IIS page, so direct to www.cap.org instead - url = 'http://www.cap.org/'; - break; - case 'urn:tbd': - url = ''; - break; - default: - if (system.startsWith('http://hl7.org/fhir/')) { - url = `${system}#definition`; - } else if (system.startsWith(config.projectURL)) { - url = system; - } else { - logger.warn('Unsupported code system: \'%s\'. ERROR_CODE:04001', system); - url = `${system}/${code}`; - } - } - return url; -} - -function identifierToString(identifier) { - return `${identifier.namespace}:${identifier.name}`; -} - -// *Identifier -function identifierToHierarchyJSON(identifier) { -// { label: , type: "Identifier", namespace: } - if (identifier.name) { - return { label: identifier.name, - type: 'Identifier', - namespace: identifier.namespace }; - } else { - return { label: identifier._text, - type: 'TBD'}; - } -} - -module.exports = {exportToJSON, setLogger, MODELS_INFO}; \ No newline at end of file diff --git a/package.json b/package.json index cb8414d..1a5c58c 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,6 @@ "lint:fix": "./node_modules/.bin/eslint . --fix" }, "dependencies": { - "@ojolabs/bunyan-prettystream": "^0.1.6", "bunyan": "^1.8.12", "commander": "^2.9.0", "fs-extra": "^7.0.0", diff --git a/yarn.lock b/yarn.lock index fb54a14..3c2d771 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,6 +2,7 @@ # yarn lockfile v1 +<<<<<<< HEAD "@ojolabs/bunyan-prettystream@^0.1.6": version "0.1.6" resolved "https://registry.yarnpkg.com/@ojolabs/bunyan-prettystream/-/bunyan-prettystream-0.1.6.tgz#5b254259bf6b13af4e34e653413978da20995452" @@ -10,6 +11,8 @@ version "12.7.2" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.7.2.tgz#c4e63af5e8823ce9cc3f0b34f7b998c2171f0c44" +======= +>>>>>>> Remove regex-based logger and default to new logger acorn-jsx@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-3.0.1.tgz#afdf9488fb1ecefc8348f6fb22f464e32a58b36b" @@ -156,6 +159,7 @@ bl@^1.0.0: bluebird@^3.5.1: version "3.5.5" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.5.tgz#a8d0afd73251effbbd5fe384a77d73003c17a71f" + integrity sha512-5am6HnnfN+urzt4yfg7IgTbotDjIT/u8AJpEt0sIU9FtXfVeezXAPKswrG+xKUCOYAINpSdgZVDU6QFh+cuH3w== bluebird@~3.4.1: version "3.4.7" @@ -228,6 +232,7 @@ callsites@^0.2.0: camelcase@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd" + integrity sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0= chainsaw@~0.1.0: version "0.1.0" @@ -274,6 +279,7 @@ cli-width@^2.0.0: cliui@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49" + integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ== dependencies: string-width "^2.1.1" strip-ansi "^4.0.0" @@ -286,6 +292,7 @@ co@^4.6.0: code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" + integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= color-convert@^1.9.0: version "1.9.3" @@ -369,6 +376,7 @@ debug@^3.1.0: decamelize@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= declare.js@~0.0.4: version "0.0.8" @@ -397,8 +405,9 @@ duplexer2@~0.1.4: readable-stream "^2.0.2" ejs@^2.5.7: - version "2.6.1" - resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.6.1.tgz#498ec0d495655abc6f23cd61868d926464071aa0" + version "2.6.2" + resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.6.2.tgz#3a32c63d1cd16d11266cd4703b14fec4e74ab4f6" + integrity sha512-PcW2a0tyTuPHz3tWyYqtK6r1fZ3gp+3Sop8Ph+ZYN81Ob5rwmbHEzaqs10N3BEsaGTkh/ooniXK+WwszGlc2+Q== end-of-stream@^1.0.0: version "1.4.1" @@ -520,6 +529,7 @@ exceljs@1.9.0: execa@^0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777" + integrity sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c= dependencies: cross-spawn "^5.0.1" get-stream "^3.0.0" @@ -587,6 +597,7 @@ file-entry-cache@^2.0.0: find-up@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c= dependencies: locate-path "^2.0.0" @@ -638,10 +649,12 @@ functional-red-black-tree@^1.0.1: get-caller-file@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" + integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== get-stream@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" + integrity sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ= glob@^6.0.1: version "6.0.4" @@ -741,6 +754,7 @@ inquirer@^3.0.6: invert-kv@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6" + integrity sha1-EEqOSqym09jNFXqO+L+rLXo//bY= is-extended@0.0.10, is-extended@~0.0.3, is-extended@~0.0.8: version "0.0.10" @@ -751,6 +765,7 @@ is-extended@0.0.10, is-extended@~0.0.3, is-extended@~0.0.8: is-fullwidth-code-point@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= dependencies: number-is-nan "^1.0.0" @@ -769,6 +784,7 @@ is-resolvable@^1.0.0: is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" + integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= isarray@~1.0.0: version "1.0.0" @@ -828,6 +844,7 @@ lazystream@^1.0.0: lcid@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835" + integrity sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU= dependencies: invert-kv "^1.0.0" @@ -851,6 +868,7 @@ listenercount@~1.0.1: locate-path@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4= dependencies: p-locate "^2.0.0" path-exists "^3.0.0" @@ -901,6 +919,7 @@ lru-cache@^4.0.1: mem@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/mem/-/mem-1.1.0.tgz#5edd52b485ca1d900fe64895505399a0dfa45f76" + integrity sha1-Xt1StIXKHZAP5kiVUFOZoN+kX3Y= dependencies: mimic-fn "^1.0.0" @@ -969,12 +988,14 @@ normalize-path@^3.0.0: npm-run-path@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" + integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= dependencies: path-key "^2.0.0" number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" + integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= object-assign@^4.0.1: version "4.1.1" @@ -1018,6 +1039,7 @@ optionator@^0.8.2: os-locale@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-2.1.0.tgz#42bc2900a6b5b8bd17376c8e882b65afccf24bf2" + integrity sha512-3sslG3zJbEYcaC4YVAvDorjGxc7tv6KVATnLPZONiljsUncvihe9BQoVCEs0RZ1kmf4Hk9OBqlZfJZWI4GanKA== dependencies: execa "^0.7.0" lcid "^1.0.0" @@ -1030,22 +1052,26 @@ os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" + integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= p-limit@^1.1.0: version "1.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" + integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== dependencies: p-try "^1.0.0" p-locate@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM= dependencies: p-limit "^1.1.0" p-try@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" + integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= pako@~1.0.2: version "1.0.10" @@ -1054,6 +1080,7 @@ pako@~1.0.2: path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= path-is-absolute@^1.0.0: version "1.0.1" @@ -1066,6 +1093,7 @@ path-is-inside@^1.0.2: path-key@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= pluralize@^7.0.0: version "7.0.0" @@ -1131,10 +1159,12 @@ remove-trailing-separator@^1.0.1: require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= require-main-filename@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" + integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= require-uncached@^1.0.3: version "1.0.3" @@ -1213,6 +1243,7 @@ semver@^5.3.0: set-blocking@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= setimmediate@~1.0.4: version "1.0.5" @@ -1231,6 +1262,7 @@ shebang-regex@^1.0.0: showdown@^1.8.6: version "1.9.0" resolved "https://registry.yarnpkg.com/showdown/-/showdown-1.9.0.tgz#d49d2a0b6db21b7c2e96ef855f7b3b2a28ef46f4" + integrity sha512-x7xDCRIaOlicbC57nMhGfKamu+ghwsdVkHMttyn+DelwzuHOx4OHCVL/UW/2QOLH7BxfCcCCVVUix3boKXJKXQ== dependencies: yargs "^10.0.3" @@ -1313,6 +1345,7 @@ string-extended@0.0.8: string-width@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= dependencies: code-point-at "^1.0.0" is-fullwidth-code-point "^1.0.0" @@ -1350,6 +1383,7 @@ strip-ansi@^4.0.0: strip-eof@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" + integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= strip-json-comments@~2.0.1: version "2.0.1" @@ -1452,6 +1486,7 @@ util-deprecate@~1.0.1: which-module@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" + integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= which@^1.2.9: version "1.3.1" @@ -1466,6 +1501,7 @@ wordwrap@~1.0.0: wrap-ansi@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" + integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= dependencies: string-width "^1.0.1" strip-ansi "^3.0.1" @@ -1487,6 +1523,7 @@ xtend@^4.0.0: y18n@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.1.tgz#6d15fba884c08679c0d77e88e7759e811e07fa41" + integrity sha1-bRX7qITAhnnA136I53WegR4H+kE= yallist@^2.1.2: version "2.1.2" @@ -1495,12 +1532,14 @@ yallist@^2.1.2: yargs-parser@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-8.1.0.tgz#f1376a33b6629a5d063782944da732631e966950" + integrity sha512-yP+6QqN8BmrgW2ggLtTbdrOyBNSI7zBa4IykmiV5R1wl1JWNxQvWhMfMdmzIYtKU7oP3OOInY/tl2ov3BDjnJQ== dependencies: camelcase "^4.1.0" yargs@^10.0.3: version "10.1.2" resolved "https://registry.yarnpkg.com/yargs/-/yargs-10.1.2.tgz#454d074c2b16a51a43e2fb7807e4f9de69ccb5c5" + integrity sha512-ivSoxqBGYOqQVruxD35+EyCFDYNEFL/Uo6FcOnz+9xZdZzK0Zzw4r4KhbrME1Oo2gOggwJod2MnsdamSG7H9ig== dependencies: cliui "^4.0.0" decamelize "^1.1.1"