| | |
| | | }) |
| | | ) |
| | | } |
| | | |
| | | res.forEach((item) => { |
| | | const obj = { |
| | | baseType: item.typeField.baseType, |
| | |
| | | } |
| | | |
| | | export function UpdateworksDataBytool(initData, res, value, linkList) { |
| | | let arr = [] |
| | | let arr = []; |
| | | let newArr = []; |
| | | for (let i = 0; i < initData.length; i++) { |
| | | const ele = initData[i] |
| | | const ele = initData[i]; |
| | | for (let j = 0; j < res.length; j++) { |
| | | const item = res[j] |
| | | const item = res[j]; |
| | | if (item.sequenceNum == ele.sequenceNum) { |
| | | item.refCode = ele.typeField.refCode |
| | | item.refCode = ele.typeField.refCode; |
| | | } |
| | | } |
| | | } |
| | | res.forEach((item) => { |
| | | const obj = { |
| | | baseType: item.baseType, |
| | | order: 0, |
| | | id: item.id, |
| | | typeFieldId: item.fieldId, |
| | | sequenceNum: item.sequenceNum, |
| | | setDataAndFileLinkListRequest: [] |
| | | } |
| | | for (let k in value) { |
| | | if (item.refCode === k) { |
| | | if (typeof value[k] == 'object') { |
| | | obj.strValue = JSON.stringify(value[k][0]) |
| | | obj.setDataAndFileLinkListRequest = linkList |
| | | } else { |
| | | if (obj.baseType === 'String') { |
| | | obj.strValue = value[k] + '' |
| | | } else if (obj.baseType === 'Text') { |
| | | obj.textValue = value[k] + '' |
| | | initData.forEach((citem) => { |
| | | const updateOldData = res.find((f) => f.sequenceNum == citem.sequenceNum); |
| | | if (updateOldData) { |
| | | const obj = { |
| | | baseType: citem.typeField.baseType, |
| | | order: 0, |
| | | id: updateOldData.id, |
| | | typeFieldId: citem.typeField.id, |
| | | sequenceNum: citem.sequenceNum, |
| | | setDataAndFileLinkListRequest: [], |
| | | }; |
| | | for (let k in value) { |
| | | if (citem.typeField.refCode === k) { |
| | | if (typeof value[k] == "object" && k != "region") { |
| | | obj.strValue = JSON.stringify(linkList); |
| | | obj.setDataAndFileLinkListRequest = linkList; |
| | | } else if (typeof value[k] == "object" && k == "region") { |
| | | obj.strValue = value[k]?.join("/"); |
| | | obj.setDataAndFileLinkListRequest = [{ area: value[k] }]; |
| | | } else { |
| | | obj.strValue = value[k] + '' |
| | | obj.strValue = value[k].toString(); |
| | | } |
| | | } |
| | | } |
| | | if (obj.strValue) { |
| | | arr.push(obj); |
| | | } |
| | | } else { |
| | | const newObj = { |
| | | baseType: citem.typeField.baseType, |
| | | order: 0, |
| | | typeFieldId: citem.typeField.id, |
| | | sequenceNum: citem.sequenceNum, |
| | | setDataAndFileLinkListRequest: [], |
| | | }; |
| | | for (let k in value) { |
| | | if (citem.typeField.refCode === k) { |
| | | if (typeof value[k] == "object") { |
| | | newObj.strValue = JSON.stringify(linkList); |
| | | newObj.setDataAndFileLinkListRequest = linkList; |
| | | } else { |
| | | newObj.strValue = value[k].toString(); |
| | | } |
| | | } |
| | | } |
| | | if (newObj.strValue) { |
| | | newArr.push(newObj); |
| | | } |
| | | } |
| | | // if (obj.strValue) { |
| | | arr.push(obj) |
| | | // } |
| | | }) |
| | | return arr |
| | | }); |
| | | return { |
| | | updateData: arr, |
| | | newData: newArr, |
| | | }; |
| | | } |
| | | |
| | | export function download(url) { |
| | |
| | | return uuid.join('') |
| | | } |
| | | |
| | | export function getFileMd5(file, chunkSize) { |
| | | export function getFileMd5(file) { |
| | | return new Promise((resolve, reject) => { |
| | | let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice |
| | | let chunks = Math.ceil(file.size / chunkSize) |
| | | let currentChunk = 0 |
| | | let spark = new SparkMD5.ArrayBuffer() |
| | | let fileReader = new FileReader() |
| | | fileReader.onload = function (e) { |
| | | spark.append(e.target.result) |
| | | currentChunk++ |
| | | if (currentChunk < chunks) { |
| | | loadNext() |
| | | } else { |
| | | const md5 = spark.end() |
| | | resolve(md5) |
| | | } |
| | | } |
| | | fileReader.onerror = function (e) { |
| | | reject(e) |
| | | } |
| | | |
| | | function loadNext() { |
| | | let start = currentChunk * chunkSize |
| | | let end = start + chunkSize |
| | | if (end > file.size) { |
| | | end = file.size |
| | | } |
| | | fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)) |
| | | } |
| | | loadNext() |
| | | spark.append(file) |
| | | const md5 = spark.end(false) |
| | | resolve(md5) |
| | | }) |
| | | } |
| | | |