(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i { // for each document if (utils.isObject(document)) { // if the data at the key is a document, then we retrieve the subHeading starting with an empty string heading and the doc return deepKeys(document, options); } return []; }); } function generateDeepKeysList(heading, data, options) { let keys = Object.keys(data).map((currentKey) => { // If the given heading is empty, then we set the heading to be the subKey, otherwise set it as a nested heading w/ a dot let keyName = buildKeyName(heading, currentKey); // If we have another nested document, recur on the sub-document to retrieve the full key name if (isDocumentToRecurOn(data[currentKey])) { return generateDeepKeysList(keyName, data[currentKey], options); } else if (options.expandArrayObjects && isArrayToRecurOn(data[currentKey])) { // If we have a nested array that we need to recur on return processArrayKeys(data[currentKey], keyName, options); } // Otherwise return this key name since we don't have a sub document return keyName; }); return utils.flatten(keys); } /** * Helper function to handle the processing of arrays when the expandArrayObjects * option is specified. * @param subArray * @param currentKeyPath * @param options * @returns {*} */ function processArrayKeys(subArray, currentKeyPath, options) { let subArrayKeys = deepKeysFromList(subArray); if (!subArray.length) { return options.ignoreEmptyArraysWhenExpanding ? [] : [currentKeyPath]; } else if (subArray.length && utils.flatten(subArrayKeys).length === 0) { // Has items in the array, but no objects return [currentKeyPath]; } else { subArrayKeys = subArrayKeys.map((schemaKeys) => { if (isEmptyArray(schemaKeys)) { return [currentKeyPath]; } return schemaKeys.map((subKey) => buildKeyName(currentKeyPath, subKey)); }); return utils.unique(utils.flatten(subArrayKeys)); } } /** * Function used to generate the key path * @param upperKeyName String accumulated key path * @param currentKeyName String current key name * @returns String */ function buildKeyName(upperKeyName, currentKeyName) { if (upperKeyName) { return upperKeyName + '.' + currentKeyName; } return currentKeyName; } /** * Returns whether this value is a document to recur on or not * @param val Any item whose type will be evaluated * @returns {boolean} */ function isDocumentToRecurOn(val) { return utils.isObject(val) && !utils.isNull(val) && !Array.isArray(val) && Object.keys(val).length; } /** * Returns whether this value is an array to recur on or not * @param val Any item whose type will be evaluated * @returns {boolean} */ function isArrayToRecurOn(val) { return Array.isArray(val); } /** * Helper function that determines whether or not a value is an empty array * @param val * @returns {boolean} */ function isEmptyArray(val) { return Array.isArray(val) && !val.length; } function mergeOptions(options) { return { expandArrayObjects: false, ignoreEmptyArraysWhenExpanding: false, ...options || {} }; } },{"./utils.js":2}],2:[function(require,module,exports){ 'use strict'; module.exports = { // underscore replacements: isString, isNull, isError, isDate, isFunction, isUndefined, isObject, unique, flatten }; /* * Helper functions which were created to remove underscorejs from this package. */ function isString(value) { return typeof value === 'string'; } function isObject(value) { return typeof value === 'object'; } function isFunction(value) { return typeof value === 'function'; } function isNull(value) { return value === null; } function isDate(value) { return value instanceof Date; } function isUndefined(value) { return typeof value === 'undefined'; } function isError(value) { return Object.prototype.toString.call(value) === '[object Error]'; } function unique(array) { return [...new Set(array)]; } function flatten(array) { return [].concat(...array); } },{}],3:[function(require,module,exports){ /** * @license MIT * doc-path * Copyright (c) 2015-present, Michael Rodrigues. */ "use strict";function evaluatePath(t,r){if(!t)return null;let{dotIndex:e,key:a,remaining:i}=state(r);return e>=0&&!t[r]?Array.isArray(t[a])?t[a].map(t=>evaluatePath(t,i)):evaluatePath(t[a],i):Array.isArray(t)?t.map(t=>evaluatePath(t,r)):t[r]}function setPath(t,r,e){if(!t)throw new Error("No object was provided.");if(!r)throw new Error("No keyPath was provided.");return r.startsWith("__proto__")||r.startsWith("constructor")||r.startsWith("prototype")?t:_sp(t,r,e)}function _sp(t,r,e){let{dotIndex:a,key:i,remaining:s}=state(r);if(a>=0){if(!t[i]&&Array.isArray(t))return t.forEach(t=>_sp(t,r,e));t[i]||(t[i]={}),_sp(t[i],s,e)}else{if(Array.isArray(t))return t.forEach(t=>_sp(t,s,e));t[r]=e}return t}function state(t){let r=t.indexOf(".");return{dotIndex:r,key:t.slice(0,r>=0?r:void 0),remaining:t.slice(r+1)}}module.exports={evaluatePath:evaluatePath,setPath:setPath}; },{}],4:[function(require,module,exports){ module.exports={ "errors" : { "callbackRequired": "A callback is required!", "optionsRequired": "Options were not passed and are required.", "json2csv": { "cannotCallOn": "Cannot call json2csv on ", "dataCheckFailure": "Data provided was not an array of documents.", "notSameSchema": "Not all documents have the same schema." }, "csv2json": { "cannotCallOn": "Cannot call csv2json on ", "dataCheckFailure": "CSV is not a string." } }, "defaultOptions" : { "delimiter" : { "field" : ",", "wrap" : "\"", "eol" : "\n" }, "excelBOM": false, "prependHeader" : true, "trimHeaderFields": false, "trimFieldValues" : false, "sortHeader" : false, "parseCsvNumbers" : false, "keys" : null, "checkSchemaDifferences": false, "expandArrayObjects": false, "unwindArrays": false, "useDateIso8601Format": false, "useLocaleFormat": false }, "values" : { "excelBOM": "\ufeff" } } },{}],5:[function(require,module,exports){ 'use strict'; let path = require('doc-path'), deeks = require('deeks'), constants = require('./constants.json'), utils = require('./utils'); const Json2Csv = function(options) { const wrapDelimiterCheckRegex = new RegExp(options.delimiter.wrap, 'g'), crlfSearchRegex = /\r?\n|\r/, expandingWithoutUnwinding = options.expandArrayObjects && !options.unwindArrays, deeksOptions = { expandArrayObjects: expandingWithoutUnwinding, ignoreEmptyArraysWhenExpanding: expandingWithoutUnwinding }; /** HEADER FIELD FUNCTIONS **/ /** * Returns the list of data field names of all documents in the provided list * @param data {Array} Data to be converted * @returns {Promise.} */ function getFieldNameList(data) { // If keys weren't specified, then we'll use the list of keys generated by the deeks module return Promise.resolve(deeks.deepKeysFromList(data, deeksOptions)); } /** * Processes the schemas by checking for schema differences, if so desired. * If schema differences are not to be checked, then it resolves the unique * list of field names. * @param documentSchemas * @returns {Promise.} */ function processSchemas(documentSchemas) { // If the user wants to check for the same schema (regardless of schema ordering) if (options.checkSchemaDifferences) { return checkSchemaDifferences(documentSchemas); } else { // Otherwise, we do not care if the schemas are different, so we should get the unique list of keys let uniqueFieldNames = utils.unique(utils.flatten(documentSchemas)); return Promise.resolve(uniqueFieldNames); } } /** * This function performs the schema difference check, if the user specifies that it should be checked. * If there are no field names, then there are no differences. * Otherwise, we get the first schema and the remaining list of schemas * @param documentSchemas * @returns {*} */ function checkSchemaDifferences(documentSchemas) { // have multiple documents - ensure only one schema (regardless of field ordering) let firstDocSchema = documentSchemas[0], restOfDocumentSchemas = documentSchemas.slice(1), schemaDifferences = computeNumberOfSchemaDifferences(firstDocSchema, restOfDocumentSchemas); // If there are schema inconsistencies, throw a schema not the same error if (schemaDifferences) { return Promise.reject(new Error(constants.errors.json2csv.notSameSchema)); } return Promise.resolve(firstDocSchema); } /** * Computes the number of schema differences * @param firstDocSchema * @param restOfDocumentSchemas * @returns {*} */ function computeNumberOfSchemaDifferences(firstDocSchema, restOfDocumentSchemas) { return restOfDocumentSchemas.reduce((schemaDifferences, documentSchema) => { // If there is a difference between the schemas, increment the counter of schema inconsistencies let numberOfDifferences = utils.computeSchemaDifferences(firstDocSchema, documentSchema).length; return numberOfDifferences > 0 ? schemaDifferences + 1 : schemaDifferences; }, 0); } /** * If so specified, this sorts the header field names alphabetically * @param fieldNames {Array} * @returns {Array} sorted field names, or unsorted if sorting not specified */ function sortHeaderFields(fieldNames) { if (options.sortHeader) { return fieldNames.sort(); } return fieldNames; } /** * Trims the header fields, if the user desires them to be trimmed. * @param params * @returns {*} */ function trimHeaderFields(params) { if (options.trimHeaderFields) { params.headerFields = params.headerFields.map((field) => field.split('.') .map((component) => component.trim()) .join('.') ); } return params; } /** * Wrap the headings, if desired by the user. * @param params * @returns {*} */ function wrapHeaderFields(params) { // only perform this if we are actually prepending the header if (options.prependHeader) { params.headerFields = params.headerFields.map(function(headingKey) { return wrapFieldValueIfNecessary(headingKey); }); } return params; } /** * Generates the CSV header string by joining the headerFields by the field delimiter * @param params * @returns {*} */ function generateCsvHeader(params) { params.header = params.headerFields.join(options.delimiter.field); return params; } /** * Retrieve the headings for all documents and return it. * This checks that all documents have the same schema. * @param data * @returns {Promise} */ function retrieveHeaderFields(data) { if (options.keys && !options.unwindArrays) { return Promise.resolve(options.keys) .then(sortHeaderFields); } return getFieldNameList(data) .then(processSchemas) .then(sortHeaderFields); } /** RECORD FIELD FUNCTIONS **/ /** * Unwinds objects in arrays within record objects if the user specifies the * expandArrayObjects option. If not specified, this passes the params * argument through to the next function in the promise chain. * @param params {Object} * @returns {Promise} */ function unwindRecordsIfNecessary(params, finalPass = false) { if (options.unwindArrays) { const originalRecordsLength = params.records.length; // Unwind each of the documents at the given headerField params.headerFields.forEach((headerField) => { params.records = utils.unwind(params.records, headerField); }); return retrieveHeaderFields(params.records) .then((headerFields) => { params.headerFields = headerFields; // If we were able to unwind more arrays, then try unwinding again... if (originalRecordsLength !== params.records.length) { return unwindRecordsIfNecessary(params); } // Otherwise, we didn't unwind any additional arrays, so continue... // Run a final time in case the earlier unwinding exposed additional // arrays to unwind... if (!finalPass) { return unwindRecordsIfNecessary(params, true); } // If keys were provided, set the headerFields to the provided keys: if (options.keys) { params.headerFields = options.keys; } return params; }); } return params; } /** * Main function which handles the processing of a record, or document to be converted to CSV format * This function specifies and performs the necessary operations in the necessary order * in order to obtain the data and convert it to CSV form while maintaining RFC 4180 compliance. * * Order of operations: * - Get fields from provided key list (as array of actual values) * - Convert the values to csv/string representation [possible option here for custom converters?] * - Trim fields * - Determine if they need to be wrapped (& wrap if necessary) * - Combine values for each line (by joining by field delimiter) * @param params * @returns {*} */ function processRecords(params) { params.records = params.records.map((record) => { // Retrieve data for each of the headerFields from this record let recordFieldData = retrieveRecordFieldData(record, params.headerFields), // Process the data in this record and return the processedRecordData = recordFieldData.map((fieldValue) => { fieldValue = trimRecordFieldValue(fieldValue); fieldValue = recordFieldValueToString(fieldValue); fieldValue = wrapFieldValueIfNecessary(fieldValue); return fieldValue; }); // Join the record data by the field delimiter return generateCsvRowFromRecord(processedRecordData); }).join(options.delimiter.eol); return params; } /** * Helper function intended to process *just* array values when the expandArrayObjects setting is set to true * @param recordFieldValue * @returns {*} processed array value */ function processRecordFieldDataForExpandedArrayObject(recordFieldValue) { let filteredRecordFieldValue = utils.removeEmptyFields(recordFieldValue); // If we have an array and it's either empty of full of empty values, then use an empty value representation if (!recordFieldValue.length || !filteredRecordFieldValue.length) { return options.emptyFieldValue || ''; } else if (filteredRecordFieldValue.length === 1) { // Otherwise, we have an array of actual values... // Since we are expanding array objects, we will want to key in on values of objects. return filteredRecordFieldValue[0]; // Extract the single value in the array } return recordFieldValue; } /** * Gets all field values from a particular record for the given list of fields * @param record * @param fields * @returns {Array} */ function retrieveRecordFieldData(record, fields) { let recordValues = []; fields.forEach((field) => { let recordFieldValue = path.evaluatePath(record, field); if (!utils.isUndefined(options.emptyFieldValue) && utils.isEmptyField(recordFieldValue)) { recordFieldValue = options.emptyFieldValue; } else if (options.expandArrayObjects && Array.isArray(recordFieldValue)) { recordFieldValue = processRecordFieldDataForExpandedArrayObject(recordFieldValue); } recordValues.push(recordFieldValue); }); return recordValues; } /** * Converts a record field value to its string representation * @param fieldValue * @returns {*} */ function recordFieldValueToString(fieldValue) { const isDate = utils.isDate(fieldValue); // store to avoid checking twice if (utils.isNull(fieldValue) || Array.isArray(fieldValue) || utils.isObject(fieldValue) && !isDate) { return JSON.stringify(fieldValue); } else if (utils.isUndefined(fieldValue)) { return 'undefined'; } else if (isDate && options.useDateIso8601Format) { return fieldValue.toISOString(); } else { return !options.useLocaleFormat ? fieldValue.toString() : fieldValue.toLocaleString(); } } /** * Trims the record field value, if specified by the user's provided options * @param fieldValue * @returns {*} */ function trimRecordFieldValue(fieldValue) { if (options.trimFieldValues) { if (Array.isArray(fieldValue)) { return fieldValue.map(trimRecordFieldValue); } else if (utils.isString(fieldValue)) { return fieldValue.trim(); } return fieldValue; } return fieldValue; } /** * Escapes quotation marks in the field value, if necessary, and appropriately * wraps the record field value if it contains a comma (field delimiter), * quotation mark (wrap delimiter), or a line break (CRLF) * @param fieldValue * @returns {*} */ function wrapFieldValueIfNecessary(fieldValue) { const wrapDelimiter = options.delimiter.wrap; // eg. includes quotation marks (default delimiter) if (fieldValue.includes(options.delimiter.wrap)) { // add an additional quotation mark before each quotation mark appearing in the field value fieldValue = fieldValue.replace(wrapDelimiterCheckRegex, wrapDelimiter + wrapDelimiter); } // if the field contains a comma (field delimiter), quotation mark (wrap delimiter), line break, or CRLF // then enclose it in quotation marks (wrap delimiter) if (fieldValue.includes(options.delimiter.field) || fieldValue.includes(options.delimiter.wrap) || fieldValue.match(crlfSearchRegex)) { // wrap the field's value in a wrap delimiter (quotation marks by default) fieldValue = wrapDelimiter + fieldValue + wrapDelimiter; } return fieldValue; } /** * Generates the CSV record string by joining the field values together by the field delimiter * @param recordFieldValues */ function generateCsvRowFromRecord(recordFieldValues) { return recordFieldValues.join(options.delimiter.field); } /** CSV COMPONENT COMBINER/FINAL PROCESSOR **/ /** * Performs the final CSV construction by combining the fields in the appropriate * order depending on the provided options values and sends the generated CSV * back to the user * @param params */ function generateCsvFromComponents(params) { let header = params.header, records = params.records, // If we are prepending the header, then add an EOL, otherwise just return the records csv = (options.excelBOM ? constants.values.excelBOM : '') + (options.prependHeader ? header + options.delimiter.eol : '') + records; return params.callback(null, csv); } /** MAIN CONVERTER FUNCTION **/ /** * Internally exported json2csv function * Takes data as either a document or array of documents and a callback that will be used to report the results * @param data {Object|Array} documents to be converted to csv * @param callback {Function} callback function */ function convert(data, callback) { // Single document, not an array if (utils.isObject(data) && !data.length) { data = [data]; // Convert to an array of the given document } // Retrieve the heading and then generate the CSV with the keys that are identified retrieveHeaderFields(data) .then((headerFields) => ({ headerFields, callback, records: data })) .then(unwindRecordsIfNecessary) .then(processRecords) .then(wrapHeaderFields) .then(trimHeaderFields) .then(generateCsvHeader) .then(generateCsvFromComponents) .catch(callback); } return { convert, validationFn: utils.isObject, validationMessages: constants.errors.json2csv }; }; module.exports = { Json2Csv }; },{"./constants.json":4,"./utils":6,"deeks":1,"doc-path":3}],6:[function(require,module,exports){ 'use strict'; let path = require('doc-path'), constants = require('./constants.json'); const dateStringRegex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/, MAX_ARRAY_LENGTH = 100000; module.exports = { isStringRepresentation, isDateRepresentation, computeSchemaDifferences, deepCopy, convert, isEmptyField, removeEmptyFields, getNCharacters, unwind, isInvalid, // underscore replacements: isString, isNull, isError, isDate, isUndefined, isObject, unique, flatten }; /** * Build the options to be passed to the appropriate function * If a user does not provide custom options, then we use our default * If options are provided, then we set each valid key that was passed * @param opts {Object} options object * @return {Object} options object */ function buildOptions(opts) { opts = {...constants.defaultOptions, ...opts || {}}; // Note: Object.assign does a shallow default, we need to deep copy the delimiter object opts.delimiter = {...constants.defaultOptions.delimiter, ...opts.delimiter}; // Otherwise, send the options back return opts; } /** * When promisified, the callback and options argument ordering is swapped, so * this function is intended to determine which argument is which and return * them in the correct order * @param arg1 {Object|Function} options or callback * @param arg2 {Object|Function} options or callback */ function parseArguments(arg1, arg2) { // If this was promisified (callback and opts are swapped) then fix the argument order. if (isObject(arg1) && !isFunction(arg1)) { return { options: arg1, callback: arg2 }; } // Regular ordering where the callback is provided before the options object return { options: arg2, callback: arg1 }; } /** * Validates the parameters passed in to json2csv and csv2json * @param config {Object} of the form: { data: {Any}, callback: {Function}, dataCheckFn: Function, errorMessages: {Object} } */ function validateParameters(config) { // If a callback wasn't provided, throw an error if (!config.callback) { throw new Error(constants.errors.callbackRequired); } // If we don't receive data, report an error if (!config.data) { config.callback(new Error(config.errorMessages.cannotCallOn + config.data + '.')); return false; } // The data provided data does not meet the type check requirement if (!config.dataCheckFn(config.data)) { config.callback(new Error(config.errorMessages.dataCheckFailure)); return false; } // If we didn't hit any known error conditions, then the data is so far determined to be valid // Note: json2csv/csv2json may perform additional validity checks on the data return true; } /** * Abstracted function to perform the conversion of json-->csv or csv-->json * depending on the converter class that is passed via the params object * @param params {Object} */ function convert(params) { let {options, callback} = parseArguments(params.callback, params.options); options = buildOptions(options); let converter = new params.converter(options), // Validate the parameters before calling the converter's convert function valid = validateParameters({ data: params.data, callback, errorMessages: converter.validationMessages, dataCheckFn: converter.validationFn }); if (valid) converter.convert(params.data, callback); } /** * Utility function to deep copy an object, used by the module tests * @param obj * @returns {any} */ function deepCopy(obj) { return JSON.parse(JSON.stringify(obj)); } /** * Helper function that determines whether the provided value is a representation * of a string. Given the RFC4180 requirements, that means that the value is * wrapped in value wrap delimiters (usually a quotation mark on each side). * @param fieldValue * @param options * @returns {boolean} */ function isStringRepresentation(fieldValue, options) { const firstChar = fieldValue[0], lastIndex = fieldValue.length - 1, lastChar = fieldValue[lastIndex]; // If the field starts and ends with a wrap delimiter return firstChar === options.delimiter.wrap && lastChar === options.delimiter.wrap; } /** * Helper function that determines whether the provided value is a representation * of a date. * @param fieldValue * @returns {boolean} */ function isDateRepresentation(fieldValue) { return dateStringRegex.test(fieldValue); } /** * Helper function that determines the schema differences between two objects. * @param schemaA * @param schemaB * @returns {*} */ function computeSchemaDifferences(schemaA, schemaB) { return arrayDifference(schemaA, schemaB) .concat(arrayDifference(schemaB, schemaA)); } /** * Utility function to check if a field is considered empty so that the emptyFieldValue can be used instead * @param fieldValue * @returns {boolean} */ function isEmptyField(fieldValue) { return isUndefined(fieldValue) || isNull(fieldValue) || fieldValue === ''; } /** * Helper function that removes empty field values from an array. * @param fields * @returns {Array} */ function removeEmptyFields(fields) { return fields.filter((field) => !isEmptyField(field)); } /** * Helper function that retrieves the next n characters from the start index in * the string including the character at the start index. This is used to * check if are currently at an EOL value, since it could be multiple * characters in length (eg. '\r\n') * @param str * @param start * @param n * @returns {string} */ function getNCharacters(str, start, n) { return str.substring(start, start + n); } /** * The following unwind functionality is a heavily modified version of @edwincen's * unwind extension for lodash. Since lodash is a large package to require in, * and all of the required functionality was already being imported, either * natively or with doc-path, I decided to rewrite the majority of the logic * so that an additional dependency would not be required. The original code * with the lodash dependency can be found here: * * https://github.com/edwincen/unwind/blob/master/index.js */ /** * Core function that unwinds an item at the provided path * @param accumulator {Array} * @param item {any} * @param fieldPath {String} */ function unwindItem(accumulator, item, fieldPath) { const valueToUnwind = path.evaluatePath(item, fieldPath); let cloned = deepCopy(item); if (Array.isArray(valueToUnwind) && valueToUnwind.length) { valueToUnwind.forEach((val) => { cloned = deepCopy(item); accumulator.push(path.setPath(cloned, fieldPath, val)); }); } else if (Array.isArray(valueToUnwind) && valueToUnwind.length === 0) { // Push an empty string so the value is empty since there are no values path.setPath(cloned, fieldPath, ''); accumulator.push(cloned); } else { accumulator.push(cloned); } } /** * Main unwind function which takes an array and a field to unwind. * @param array {Array} * @param field {String} * @returns {Array} */ function unwind(array, field) { const result = []; array.forEach((item) => { unwindItem(result, item, field); }); return result; } /* * Helper functions which were created to remove underscorejs from this package. */ function isString(value) { return typeof value === 'string'; } function isObject(value) { return typeof value === 'object'; } function isFunction(value) { return typeof value === 'function'; } function isNull(value) { return value === null; } function isDate(value) { return value instanceof Date; } function isUndefined(value) { return typeof value === 'undefined'; } function isError(value) { return Object.prototype.toString.call(value) === '[object Error]'; } function arrayDifference(a, b) { return a.filter((x) => !b.includes(x)); } function unique(array) { return [...new Set(array)]; } function flatten(array) { // Node 11+ - use the native array flattening function if (array.flat) { return array.flat(); } // #167 - allow browsers to flatten very long 200k+ element arrays if (array.length > MAX_ARRAY_LENGTH) { let safeArray = []; for (let a = 0; a < array.length; a += MAX_ARRAY_LENGTH) { safeArray = safeArray.concat(...array.slice(a, a + MAX_ARRAY_LENGTH)); } return safeArray; } return [].concat(...array); } /** * Used to help avoid incorrect values returned by JSON.parse when converting * CSV back to JSON, such as '39e1804' which JSON.parse converts to Infinity */ function isInvalid(parsedJson) { return parsedJson === Infinity || parsedJson === -Infinity; } },{"./constants.json":4,"doc-path":3}]},{},[5]);