feat(multi load): implemented matching libds and parsing of the multiple sheets
This commit is contained in:
parent
eb7c44333c
commit
efcdc694dd
@ -215,6 +215,7 @@ hot-table {
|
|||||||
padding: 10px;
|
padding: 10px;
|
||||||
background: #dbdbdb;
|
background: #dbdbdb;
|
||||||
border-radius: 5px;
|
border-radius: 5px;
|
||||||
|
color: black;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -14,17 +14,6 @@ import Handsontable from 'handsontable'
|
|||||||
import { Subject, Subscription } from 'rxjs'
|
import { Subject, Subscription } from 'rxjs'
|
||||||
import { SasStoreService } from '../services/sas-store.service'
|
import { SasStoreService } from '../services/sas-store.service'
|
||||||
|
|
||||||
import * as XLSX from '@sheet/crypto'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Used in combination with buffer
|
|
||||||
*/
|
|
||||||
const iconv = require('iconv-lite')
|
|
||||||
/**
|
|
||||||
* In combination with `iconv` is used for encoding json data captured with sheet js from excel file into a file again
|
|
||||||
* Which will be send to backend
|
|
||||||
*/
|
|
||||||
const Buffer = require('buffer/').Buffer
|
|
||||||
type AOA = any[][]
|
type AOA = any[][]
|
||||||
|
|
||||||
import { HotTableRegisterer } from '@handsontable/angular'
|
import { HotTableRegisterer } from '@handsontable/angular'
|
||||||
@ -42,7 +31,6 @@ import {
|
|||||||
Version
|
Version
|
||||||
} from '../models/sas/editors-getdata.model'
|
} from '../models/sas/editors-getdata.model'
|
||||||
import { DataFormat } from '../models/sas/common/DateFormat'
|
import { DataFormat } from '../models/sas/common/DateFormat'
|
||||||
import SheetInfo from '../models/SheetInfo'
|
|
||||||
import { Approver, ExcelRule } from '../models/TableData'
|
import { Approver, ExcelRule } from '../models/TableData'
|
||||||
import { QueryComponent } from '../query/query.component'
|
import { QueryComponent } from '../query/query.component'
|
||||||
import { EventService } from '../services/event.service'
|
import { EventService } from '../services/event.service'
|
||||||
@ -485,6 +473,7 @@ export class EditorComponent implements OnInit, AfterViewInit {
|
|||||||
|
|
||||||
this.spreadsheetService.parseExcelFile({
|
this.spreadsheetService.parseExcelFile({
|
||||||
file: file,
|
file: file,
|
||||||
|
uploader: this.uploader,
|
||||||
dcValidator: this.dcValidator!,
|
dcValidator: this.dcValidator!,
|
||||||
headerPks: this.headerPks,
|
headerPks: this.headerPks,
|
||||||
headerArray: this.headerArray,
|
headerArray: this.headerArray,
|
||||||
@ -496,14 +485,23 @@ export class EditorComponent implements OnInit, AfterViewInit {
|
|||||||
encoding: this.encoding
|
encoding: this.encoding
|
||||||
}, (uploadState: string) => {
|
}, (uploadState: string) => {
|
||||||
this.appendUploadState(uploadState)
|
this.appendUploadState(uploadState)
|
||||||
|
}, (tableFoundInfo: string) => {
|
||||||
|
this.eventService.showInfoModal('Table Found', tableFoundInfo)
|
||||||
}).then((parseResult: ParseResult | undefined) => {
|
}).then((parseResult: ParseResult | undefined) => {
|
||||||
if (parseResult) {
|
if (parseResult) {
|
||||||
this.excelFileReady = true
|
this.excelFileReady = true
|
||||||
|
|
||||||
this.data = parseResult.data
|
|
||||||
this.uploader = parseResult.uploader
|
this.uploader = parseResult.uploader
|
||||||
|
|
||||||
this.getPendingExcelPreview()
|
if (parseResult.data && parseResult.headerShow) {
|
||||||
|
// If data is returned it means we parsed excel file
|
||||||
|
this.data = parseResult.data
|
||||||
|
this.headerShow = parseResult.headerShow
|
||||||
|
this.getPendingExcelPreview()
|
||||||
|
} else {
|
||||||
|
// otherwise it's csv file, and we send them directly
|
||||||
|
this.uploadParsedFiles()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}).catch((error: string) => {
|
}).catch((error: string) => {
|
||||||
this.eventService.showInfoModal('Error', error)
|
this.eventService.showInfoModal('Error', error)
|
||||||
|
@ -7,4 +7,5 @@ export default interface SheetInfo {
|
|||||||
missingHeaders: string[]
|
missingHeaders: string[]
|
||||||
rangeStartRow: number
|
rangeStartRow: number
|
||||||
rangeStartCol: number
|
rangeStartCol: number
|
||||||
|
rangeAddress?: string
|
||||||
}
|
}
|
||||||
|
@ -15,18 +15,19 @@
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<ng-container *ngIf="datasets.length > 0">
|
<ng-container *ngIf="parsedDatasets.length > 0">
|
||||||
<p cds-text="caption" class="ml-10">Found tables:</p>
|
<p cds-text="caption" class="ml-10">Found tables:</p>
|
||||||
<clr-tree>
|
<clr-tree>
|
||||||
<clr-tree-node *ngFor="let dataset of datasets">
|
<clr-tree-node *ngFor="let dataset of parsedDatasets">
|
||||||
<button
|
<button
|
||||||
|
(click)="onParsedDatasetClick(dataset)"
|
||||||
class="clr-treenode-link"
|
class="clr-treenode-link"
|
||||||
[class.active]="dataset.active"
|
[class.active]="dataset.active"
|
||||||
>
|
>
|
||||||
<cds-icon *ngIf="dataset.status === 'error'" status="danger" shape="exclamation-circle"></cds-icon>
|
<cds-icon *ngIf="dataset.status === 'error'" status="danger" shape="exclamation-circle"></cds-icon>
|
||||||
<cds-icon *ngIf="dataset.status === 'success'" status="success" shape="check-circle"></cds-icon>
|
<cds-icon *ngIf="dataset.status === 'success'" status="success" shape="check-circle"></cds-icon>
|
||||||
<cds-icon shape="file"></cds-icon>
|
<cds-icon shape="file"></cds-icon>
|
||||||
{{ dataset.name }}
|
{{ dataset.libds }}
|
||||||
</button>
|
</button>
|
||||||
</clr-tree-node>
|
</clr-tree-node>
|
||||||
</clr-tree>
|
</clr-tree>
|
||||||
@ -61,29 +62,61 @@
|
|||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="d-flex clr-justify-content-center mt-15">
|
<ng-container *ngIf="!parsedDatasets.length">
|
||||||
<div class="dataset-input-wrapper">
|
<div class="d-flex clr-justify-content-center mt-15">
|
||||||
<p cds-text="caption" class="mb-20">Selected file: <strong>{{ selectedFile?.name }}</strong></p>
|
<div class="dataset-input-wrapper">
|
||||||
<p cds-text="caption">Paste or type the list of datasets to upload:</p>
|
<p cds-text="secondary regular" class="mb-20">Selected file: <strong>{{ selectedFile?.name }}</strong></p>
|
||||||
|
<p cds-text="secondary regular">Paste or type the list of datasets to upload:</p>
|
||||||
|
|
||||||
<button (click)="onAutoDetectColumns()" class="mt-15 btn btn-primary-outline btn-sm">Auto detect</button>
|
<button (click)="onAutoDetectColumns()" class="mt-15 btn btn-primary-outline btn-sm">Auto detect</button>
|
||||||
|
|
||||||
<clr-textarea-container class="m-0">
|
<clr-textarea-container class="m-0">
|
||||||
<textarea clrTextarea [(ngModel)]="userInputDatasets" (input)="onUserInputDatasetsChange()" class="w-100-i"></textarea>
|
<textarea clrTextarea [(ngModel)]="userInputDatasets" (input)="onUserInputDatasetsChange()" class="w-100-i"></textarea>
|
||||||
<clr-control-helper>Every row is one dataset. Format: LIBRARY.TABLE</clr-control-helper>
|
<clr-control-helper>Every row is one dataset. Format: LIBRARY.TABLE</clr-control-helper>
|
||||||
</clr-textarea-container>
|
</clr-textarea-container>
|
||||||
|
|
||||||
<div class="text-right mt-10">
|
<div class="text-right mt-10">
|
||||||
<button (click)="onDiscardFile()" class="btn btn-danger btn-sm">Discard file</button>
|
<button (click)="onDiscardFile()" class="btn btn-danger btn-sm" [disabled]="uploadLoading">Discard file</button>
|
||||||
<button (click)="onUploadFile()" class="btn btn-primary btn-sm">Upload</button>
|
<button (click)="onUploadFile()" class="btn btn-primary btn-sm" [disabled]="!matchedDatasets.length" [clrLoading]="uploadLoading">Continue</button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div *ngIf="matchedDatasets.length">
|
<div *ngIf="matchedDatasets.length">
|
||||||
<p><strong>Matched datasets:</strong></p>
|
<p><strong>Matched datasets:</strong></p>
|
||||||
<p *ngFor="let matchedDataset of matchedDatasets" class="m-0 ml-5-i">{{ matchedDataset }}</p>
|
<p *ngFor="let matchedDataset of matchedDatasets" class="m-0 ml-5-i">{{ matchedDataset }}</p>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</ng-container>
|
||||||
|
|
||||||
|
<ng-container *ngIf="parsedDatasets.length">
|
||||||
|
<div
|
||||||
|
*ngIf="!activeParsedDataset"
|
||||||
|
class="no-table-selected pointer-events-none"
|
||||||
|
>
|
||||||
|
<clr-icon
|
||||||
|
shape="warning-standard"
|
||||||
|
size="40"
|
||||||
|
class="is-info icon-dc-fill"
|
||||||
|
></clr-icon>
|
||||||
|
<p class="text-center color-gray mt-10" cds-text="section">
|
||||||
|
Please select a dataset on the left
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="d-flex clr-justify-content-between p-10">
|
||||||
|
<div>
|
||||||
|
<p cds-text="secondary regular" class="mb-10">Found in range: <strong>"{{ activeParsedDataset?.parseResult?.rangeSheetRes?.sheetName }}"!{{ activeParsedDataset?.parseResult?.rangeSheetRes?.rangeAddress }}</strong></p>
|
||||||
|
<p cds-text="secondary regular">Matched with dataset: <strong>LIB1.MPE_X_DATA</strong></p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<clr-toggle-wrapper>
|
||||||
|
<input type="checkbox" clrToggle name="options" required value="option1"/>
|
||||||
|
<label>Include in submission</label>
|
||||||
|
</clr-toggle-wrapper>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</ng-container>
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
<div *ngIf="!noData && !noDataReqErr && table" class="clr-flex-1">
|
<div *ngIf="!noData && !noDataReqErr && table" class="clr-flex-1">
|
||||||
|
@ -14,6 +14,12 @@ import {
|
|||||||
} from '../services'
|
} from '../services'
|
||||||
import * as XLSX from '@sheet/crypto'
|
import * as XLSX from '@sheet/crypto'
|
||||||
import { globals } from '../_globals'
|
import { globals } from '../_globals'
|
||||||
|
import { EditorsGetDataServiceResponse } from '../models/sas/editors-getdata.model'
|
||||||
|
import { DcValidator } from '../shared/dc-validator/dc-validator'
|
||||||
|
import { ExcelRule } from '../models/TableData'
|
||||||
|
import { HotTableInterface } from '../models/HotTable.interface'
|
||||||
|
import { Col } from '../shared/dc-validator/models/col.model'
|
||||||
|
import { ParseResult, SpreadsheetService } from '../services/spreadsheet.service'
|
||||||
|
|
||||||
@Component({
|
@Component({
|
||||||
selector: 'app-multi-dataset',
|
selector: 'app-multi-dataset',
|
||||||
@ -30,25 +36,15 @@ export class MultiDatasetComponent implements OnInit {
|
|||||||
this.licenceState.value.viewer_rows_allowed || Infinity
|
this.licenceState.value.viewer_rows_allowed || Infinity
|
||||||
|
|
||||||
public selectedFile: File | null = null
|
public selectedFile: File | null = null
|
||||||
public datasets: any[] = [
|
public parsedDatasets: ParsedDataset[] = []
|
||||||
// {
|
|
||||||
// name: 'LIB1.TABLE21',
|
|
||||||
// status: 'error'
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: 'LIB1.BLEJA',
|
|
||||||
// status: 'success'
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: 'LIB1.NIDZA',
|
|
||||||
// status: 'success'
|
|
||||||
// }
|
|
||||||
]
|
|
||||||
public datasetsLoading: boolean = false
|
public datasetsLoading: boolean = false
|
||||||
|
|
||||||
public matchedDatasets: string[] = []
|
public matchedDatasets: string[] = []
|
||||||
public userInputDatasets: string = ''
|
public userInputDatasets: string = ''
|
||||||
|
|
||||||
|
public uploadLoading: boolean = false
|
||||||
|
|
||||||
public libsAndTables: {
|
public libsAndTables: {
|
||||||
[key: string]: string[]
|
[key: string]: string[]
|
||||||
} = {}
|
} = {}
|
||||||
@ -57,18 +53,10 @@ export class MultiDatasetComponent implements OnInit {
|
|||||||
private eventService: EventService,
|
private eventService: EventService,
|
||||||
private licenceService: LicenceService,
|
private licenceService: LicenceService,
|
||||||
private helperService: HelperService,
|
private helperService: HelperService,
|
||||||
private loggerService: LoggerService,
|
|
||||||
private route: ActivatedRoute,
|
|
||||||
private router: Router,
|
|
||||||
private sasStoreService: SasStoreService,
|
private sasStoreService: SasStoreService,
|
||||||
private sasService: SasService
|
private spreadsheetService: SpreadsheetService
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
public afterGetColHeader(column: number, th: any) {
|
|
||||||
// Dark mode
|
|
||||||
th.classList.add('darkTH')
|
|
||||||
}
|
|
||||||
|
|
||||||
ngOnInit() {
|
ngOnInit() {
|
||||||
this.licenceService.hot_license_key.subscribe(
|
this.licenceService.hot_license_key.subscribe(
|
||||||
(hot_license_key: string | undefined) => {
|
(hot_license_key: string | undefined) => {
|
||||||
@ -92,22 +80,22 @@ export class MultiDatasetComponent implements OnInit {
|
|||||||
}
|
}
|
||||||
|
|
||||||
onFileChange(event: any) {
|
onFileChange(event: any) {
|
||||||
// if (!event?.target?.files[0]) {
|
if (!event?.target?.files[0]) {
|
||||||
// this.eventService.showAbortModal(null, 'No file found.', null, 'File Upload')
|
this.eventService.showAbortModal(null, 'No file found.', null, 'File Upload')
|
||||||
// return
|
return
|
||||||
// }
|
}
|
||||||
|
|
||||||
// const file = event.target.files[0];
|
const file = event.target.files[0];
|
||||||
// const fileTitle = file.name;
|
const fileTitle = file.name;
|
||||||
// const fileExtension = fileTitle.split('.').pop()
|
const fileExtension = fileTitle.split('.').pop()
|
||||||
|
|
||||||
// if (!['xlsx', 'xlsm', 'xlm'].includes(fileExtension)) {
|
if (!['xlsx', 'xlsm', 'xlm'].includes(fileExtension)) {
|
||||||
// this.eventService.showAbortModal(null, 'Only excel extensions are allowed. (xlsx)', null, 'Extension Error')
|
this.eventService.showAbortModal(null, 'Only excel extensions are allowed. (xlsx)', null, 'Extension Error')
|
||||||
// return
|
return
|
||||||
// }
|
}
|
||||||
|
|
||||||
// this.selectedFile = event.target.files[0]
|
this.selectedFile = event.target.files[0]
|
||||||
// event.target.value = '' // Reset the upload input
|
event.target.value = '' // Reset the upload input
|
||||||
}
|
}
|
||||||
|
|
||||||
onDiscardFile() {
|
onDiscardFile() {
|
||||||
@ -115,8 +103,53 @@ export class MultiDatasetComponent implements OnInit {
|
|||||||
this.userInputDatasets = ''
|
this.userInputDatasets = ''
|
||||||
}
|
}
|
||||||
|
|
||||||
onUploadFile() {
|
async onUploadFile() {
|
||||||
|
this.uploadLoading = true
|
||||||
|
|
||||||
|
const datasetFetchingPromises: Promise<EditorsGetDataServiceResponse | undefined>[] = []
|
||||||
|
|
||||||
|
let datasets: EditorsGetDataServiceResponse[] = []
|
||||||
|
|
||||||
|
for (let datasetLibds of this.matchedDatasets) {
|
||||||
|
const promise = this.fetchDataset(datasetLibds)
|
||||||
|
|
||||||
|
datasetFetchingPromises.push(promise)
|
||||||
|
}
|
||||||
|
|
||||||
|
Promise.allSettled(datasetFetchingPromises).then((res) => {
|
||||||
|
res.forEach((promise) => {
|
||||||
|
if (promise.status === 'fulfilled' && promise.value) datasets.push(promise.value)
|
||||||
|
})
|
||||||
|
|
||||||
|
this.uploadLoading = false
|
||||||
|
|
||||||
|
const datasetObjects = this.buildDatasetsObjects(datasets)
|
||||||
|
|
||||||
|
for (let datasetObject of datasetObjects) {
|
||||||
|
this.spreadsheetService.parseExcelFile({
|
||||||
|
file: this.selectedFile!,
|
||||||
|
dcValidator: datasetObject.dcValidator!,
|
||||||
|
headerPks: datasetObject.headerPks,
|
||||||
|
headerArray: datasetObject.headerArray,
|
||||||
|
headerShow: [],
|
||||||
|
timeHeaders: datasetObject.timeHeaders,
|
||||||
|
dateHeaders: datasetObject.dateHeaders,
|
||||||
|
dateTimeHeaders: datasetObject.dateTimeHeaders,
|
||||||
|
xlRules: datasetObject.xlRules
|
||||||
|
}).then((parseResult: ParseResult | undefined) => {
|
||||||
|
console.log('parseResult', parseResult)
|
||||||
|
|
||||||
|
if (parseResult && parseResult.data) {
|
||||||
|
this.parsedDatasets.push({
|
||||||
|
libds: datasetObject.libds,
|
||||||
|
parseResult: parseResult
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}).catch((error: string) => {
|
||||||
|
this.eventService.showInfoModal('Error', error)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
onUserInputDatasetsChange() {
|
onUserInputDatasetsChange() {
|
||||||
@ -129,7 +162,7 @@ export class MultiDatasetComponent implements OnInit {
|
|||||||
inputDatasets.forEach((dataset: string) => {
|
inputDatasets.forEach((dataset: string) => {
|
||||||
const trimmedDataset = dataset.trim()
|
const trimmedDataset = dataset.trim()
|
||||||
|
|
||||||
if (this.isValidDatasetFormat(trimmedDataset) && this.isValidDatasetReference(trimmedDataset)) {
|
if (this.isValidDatasetFormat(trimmedDataset) && this.isValidDatasetReference(trimmedDataset) && !this.matchedDatasets.includes(trimmedDataset)) {
|
||||||
this.matchedDatasets.push(trimmedDataset)
|
this.matchedDatasets.push(trimmedDataset)
|
||||||
} else {
|
} else {
|
||||||
console.warn(`Sheet name: ${trimmedDataset} is not an actual dataset reference.`)
|
console.warn(`Sheet name: ${trimmedDataset} is not an actual dataset reference.`)
|
||||||
@ -165,39 +198,42 @@ export class MultiDatasetComponent implements OnInit {
|
|||||||
this.userInputDatasets = this.matchedDatasets.join('\n')
|
this.userInputDatasets = this.matchedDatasets.join('\n')
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
onParsedDatasetClick(parsedDataset: ParsedDataset) {
|
||||||
* Valid dataset format includes:
|
this.deselectAllParsedDatasets()
|
||||||
* - Name must contain a single period (.)
|
|
||||||
* - First part (before period) can be no more than 8 chars
|
|
||||||
* - Second part (after period) can be no more than 32 chars
|
|
||||||
* - (start with letter or underscore, and contain only letters / underscores / numbers)
|
|
||||||
* - can't start with a number
|
|
||||||
* - both left and right parts must be valid variable names
|
|
||||||
*
|
|
||||||
* example: LIB123.TABLE_123
|
|
||||||
*/
|
|
||||||
isValidDatasetFormat(sheetName: string) {
|
|
||||||
const regex = /^\w{1,8}\.\w{1,32}$/gmi
|
|
||||||
const correctFormat = regex.test(sheetName)
|
|
||||||
|
|
||||||
return correctFormat
|
parsedDataset.active = true
|
||||||
|
}
|
||||||
|
|
||||||
|
public get activeParsedDataset(): ParsedDataset | undefined {
|
||||||
|
return this.parsedDatasets.find(dataset => dataset.active)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks if @param datasetRef is valid variable which references library and table
|
* Fetches the table for given datasets params LIBRARY.TABLE
|
||||||
*/
|
*/
|
||||||
isValidDatasetReference(datasetRef: string) {
|
async fetchDataset(libds: string): Promise<EditorsGetDataServiceResponse | undefined> {
|
||||||
const library = datasetRef.split('.')[0]
|
let myParams: any = {
|
||||||
const table = datasetRef.split('.')[1]
|
LIBDS: libds,
|
||||||
|
OUTDEST: 'WEB'
|
||||||
|
}
|
||||||
|
|
||||||
const libTable = this.libsAndTables[library]?.includes(table)
|
if (libds) {
|
||||||
|
// this.getdataError = false
|
||||||
|
return this.sasStoreService
|
||||||
|
.callService(myParams, 'SASControlTable', 'editors/getdata', libds)
|
||||||
|
.then((res: EditorsGetDataServiceResponse) => {
|
||||||
|
return res
|
||||||
|
})
|
||||||
|
.catch((err: any) => {
|
||||||
|
console.warn(`Error fetching ${libds}`, err)
|
||||||
|
return undefined
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
if (libTable) return true
|
return undefined
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parseExcelSheetNames(): Promise<string[]> {
|
private parseExcelSheetNames(): Promise<string[]> {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const reader = new FileReader();
|
const reader = new FileReader();
|
||||||
|
|
||||||
@ -234,4 +270,137 @@ export class MultiDatasetComponent implements OnInit {
|
|||||||
reader.readAsBinaryString(this.selectedFile);
|
reader.readAsBinaryString(this.selectedFile);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Valid dataset format includes:
|
||||||
|
* - Name must contain a single period (.)
|
||||||
|
* - First part (before period) can be no more than 8 chars
|
||||||
|
* - Second part (after period) can be no more than 32 chars
|
||||||
|
* - (start with letter or underscore, and contain only letters / underscores / numbers)
|
||||||
|
* - can't start with a number
|
||||||
|
* - both left and right parts must be valid variable names
|
||||||
|
*
|
||||||
|
* example: LIB123.TABLE_123
|
||||||
|
*/
|
||||||
|
private isValidDatasetFormat(sheetName: string) {
|
||||||
|
const regex = /^\w{1,8}\.\w{1,32}$/gmi
|
||||||
|
const correctFormat = regex.test(sheetName)
|
||||||
|
|
||||||
|
return correctFormat
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if @param datasetRef is valid variable which references library and table
|
||||||
|
*/
|
||||||
|
private isValidDatasetReference(datasetRef: string) {
|
||||||
|
const library = datasetRef.split('.')[0]
|
||||||
|
const table = datasetRef.split('.')[1]
|
||||||
|
|
||||||
|
const libTable = this.libsAndTables[library]?.includes(table)
|
||||||
|
|
||||||
|
if (libTable) return true
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates array of objects, containing all `getdata` responses per item
|
||||||
|
* every object will have headers and validations parsed, to be used for
|
||||||
|
* parsing sheet file and uploading it
|
||||||
|
*
|
||||||
|
* @param response
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
private buildDatasetsObjects(responses: EditorsGetDataServiceResponse[]) {
|
||||||
|
if (!responses) return []
|
||||||
|
|
||||||
|
const datasetObjects: DatasetsObject[] = []
|
||||||
|
|
||||||
|
for (let response of responses) {
|
||||||
|
if (response.data) {
|
||||||
|
const datasetObject: DatasetsObject = {
|
||||||
|
...response,
|
||||||
|
hotTable: {
|
||||||
|
data: response.data.sasdata,
|
||||||
|
settings: {}
|
||||||
|
},
|
||||||
|
cols: [],
|
||||||
|
headerColumns: [],
|
||||||
|
headerPks: [],
|
||||||
|
headerArray: [],
|
||||||
|
dateHeaders: [],
|
||||||
|
timeHeaders: [],
|
||||||
|
dateTimeHeaders: [],
|
||||||
|
xlRules: [],
|
||||||
|
columnHeader: []
|
||||||
|
}
|
||||||
|
|
||||||
|
datasetObject.cols = response.data.cols
|
||||||
|
datasetObject.headerColumns = response.data.sasparams[0].COLHEADERS.split(',')
|
||||||
|
datasetObject.headerPks = response.data.sasparams[0].PK.split(' ')
|
||||||
|
|
||||||
|
if (datasetObject.headerColumns.indexOf('_____DELETE__THIS__RECORD_____') !== -1) {
|
||||||
|
datasetObject.headerColumns[
|
||||||
|
datasetObject.headerColumns.indexOf('_____DELETE__THIS__RECORD_____')
|
||||||
|
] = 'Delete?'
|
||||||
|
}
|
||||||
|
|
||||||
|
datasetObject.headerArray = datasetObject.headerColumns.slice(1)
|
||||||
|
|
||||||
|
if (response.data.sasparams[0].DTVARS !== '') {
|
||||||
|
datasetObject.dateHeaders = response.data.sasparams[0].DTVARS.split(' ')
|
||||||
|
}
|
||||||
|
if (response.data.sasparams[0].TMVARS !== '') {
|
||||||
|
datasetObject.timeHeaders = response.data.sasparams[0].TMVARS.split(' ')
|
||||||
|
}
|
||||||
|
if (response.data.sasparams[0].DTTMVARS !== '') {
|
||||||
|
datasetObject.dateTimeHeaders = response.data.sasparams[0].DTTMVARS.split(' ')
|
||||||
|
}
|
||||||
|
if (response.data.xl_rules.length > 0) {
|
||||||
|
datasetObject.xlRules = this.helperService.deepClone(response.data.xl_rules)
|
||||||
|
}
|
||||||
|
|
||||||
|
datasetObject.dcValidator = new DcValidator(
|
||||||
|
response.data.sasparams[0],
|
||||||
|
response.data.$sasdata,
|
||||||
|
response.data.cols,
|
||||||
|
response.data.dqrules,
|
||||||
|
response.data.dqdata
|
||||||
|
)
|
||||||
|
|
||||||
|
datasetObject.columnHeader = response.data.sasparams[0].COLHEADERS.split(',')
|
||||||
|
|
||||||
|
datasetObjects.push(datasetObject)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return datasetObjects
|
||||||
|
}
|
||||||
|
|
||||||
|
private deselectAllParsedDatasets() {
|
||||||
|
for (let parsedDataset of this.parsedDatasets) {
|
||||||
|
parsedDataset.active = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DatasetsObject extends EditorsGetDataServiceResponse {
|
||||||
|
hotTable: HotTableInterface
|
||||||
|
cols: Col[]
|
||||||
|
headerColumns: string[]
|
||||||
|
headerPks: string[]
|
||||||
|
headerArray: string[]
|
||||||
|
dateHeaders: string[]
|
||||||
|
timeHeaders: string[]
|
||||||
|
dateTimeHeaders: string[]
|
||||||
|
xlRules: ExcelRule[]
|
||||||
|
dcValidator?: DcValidator
|
||||||
|
columnHeader: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ParsedDataset {
|
||||||
|
libds: string
|
||||||
|
status?: 'success' | 'error'
|
||||||
|
active?: boolean
|
||||||
|
parseResult: ParseResult
|
||||||
}
|
}
|
@ -2,7 +2,7 @@ import { Injectable } from '@angular/core';
|
|||||||
import * as XLSX from '@sheet/crypto'
|
import * as XLSX from '@sheet/crypto'
|
||||||
import { ExcelPasswordModalService, Result } from '../shared/excel-password-modal/excel-password-modal.service';
|
import { ExcelPasswordModalService, Result } from '../shared/excel-password-modal/excel-password-modal.service';
|
||||||
import { EventService } from './event.service';
|
import { EventService } from './event.service';
|
||||||
import { isSpecialMissing } from '@sasjs/utils';
|
import { isSpecialMissing } from '@sasjs/utils/input/validators';
|
||||||
import { dateFormat, dateToUtcTime, dateToTime } from '../editor/utils/date.utils';
|
import { dateFormat, dateToUtcTime, dateToTime } from '../editor/utils/date.utils';
|
||||||
import { excelDateToJSDate, getMissingHeaders } from '../editor/utils/grid.utils';
|
import { excelDateToJSDate, getMissingHeaders } from '../editor/utils/grid.utils';
|
||||||
import { isStringNumber, isStringDecimal } from '../editor/utils/types.utils';
|
import { isStringNumber, isStringDecimal } from '../editor/utils/types.utils';
|
||||||
@ -28,6 +28,11 @@ type AOA = any[][]
|
|||||||
export interface ParseParams {
|
export interface ParseParams {
|
||||||
file: File,
|
file: File,
|
||||||
dcValidator: DcValidator
|
dcValidator: DcValidator
|
||||||
|
/**
|
||||||
|
* Parse function will manipulate and return the uploader array which can be provided with files already in the queue
|
||||||
|
* Otherwise new empty instance will be created.
|
||||||
|
*/
|
||||||
|
uploader?: FileUploader
|
||||||
headerPks: string[]
|
headerPks: string[]
|
||||||
headerArray: string[]
|
headerArray: string[]
|
||||||
headerShow: string[]
|
headerShow: string[]
|
||||||
@ -35,12 +40,20 @@ export interface ParseParams {
|
|||||||
dateHeaders: string[]
|
dateHeaders: string[]
|
||||||
dateTimeHeaders: string[]
|
dateTimeHeaders: string[]
|
||||||
xlRules: ExcelRule[]
|
xlRules: ExcelRule[]
|
||||||
encoding: FileUploadEncoding
|
encoding?: FileUploadEncoding
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ParseResult {
|
export interface ParseResult {
|
||||||
data: any[]
|
/**
|
||||||
uploader: FileUploader
|
* In case of CSV file, won't be returned
|
||||||
|
*/
|
||||||
|
data?: any[]
|
||||||
|
/**
|
||||||
|
* In case of CSV file, won't be returned
|
||||||
|
*/
|
||||||
|
headerShow?: string[]
|
||||||
|
rangeSheetRes?: SheetInfo
|
||||||
|
uploader: FileUploader,
|
||||||
}
|
}
|
||||||
|
|
||||||
@Injectable({
|
@Injectable({
|
||||||
@ -62,18 +75,25 @@ export class SpreadsheetService {
|
|||||||
* @param parseParams params required for parsing the file
|
* @param parseParams params required for parsing the file
|
||||||
* @param onParseStateChange callback used to inform about parsing state
|
* @param onParseStateChange callback used to inform about parsing state
|
||||||
* so the user of the function can update the UI with latest info
|
* so the user of the function can update the UI with latest info
|
||||||
|
* @param onTableFoundEvent callback fired when table range is found in the file
|
||||||
*
|
*
|
||||||
* @returns parsed list of files to upload and JSON data ready for HOT usage
|
* @returns parsed list of files to upload and JSON data ready for HOT usage
|
||||||
*/
|
*/
|
||||||
public parseExcelFile(parseParams: ParseParams, onParseStateChange: (uploadState: string) => void): Promise<ParseResult | undefined> {
|
public parseExcelFile(
|
||||||
|
parseParams: ParseParams,
|
||||||
|
onParseStateChange?: (uploadState: string) => void,
|
||||||
|
onTableFoundEvent?: (info: string) => void
|
||||||
|
): Promise<ParseResult | undefined> {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
let data: any[] = []
|
let data: any[] = []
|
||||||
let uploader: FileUploader = new FileUploader()
|
let uploader: FileUploader = parseParams.uploader || new FileUploader()
|
||||||
|
|
||||||
let file: File = parseParams.file
|
let file: File = parseParams.file
|
||||||
let filename = file.name
|
let filename = file.name
|
||||||
|
|
||||||
onParseStateChange(`Loading ${filename} into the browser`)
|
if (!parseParams.encoding) parseParams.encoding = 'UTF-8'
|
||||||
|
|
||||||
|
if (onParseStateChange) onParseStateChange(`Loading ${filename} into the browser`)
|
||||||
|
|
||||||
let foundData = {
|
let foundData = {
|
||||||
sheet: ''
|
sheet: ''
|
||||||
@ -140,7 +160,6 @@ export class SpreadsheetService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!wb) {
|
if (!wb) {
|
||||||
// RETURN ERROR: NO WB FOUND
|
|
||||||
return reject('No workbook found.')
|
return reject('No workbook found.')
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -170,7 +189,7 @@ export class SpreadsheetService {
|
|||||||
csvArrayHeadersMap = rangeSheetRes.csvArrayHeadersMap
|
csvArrayHeadersMap = rangeSheetRes.csvArrayHeadersMap
|
||||||
const ws: XLSX.WorkSheet = wb.Sheets[rangeSheetRes.sheetName]
|
const ws: XLSX.WorkSheet = wb.Sheets[rangeSheetRes.sheetName]
|
||||||
|
|
||||||
onParseStateChange(`Table found on sheet ${rangeSheetRes.sheetName} on row ${rangeSheetRes.startRow}`)
|
if (onParseStateChange) onParseStateChange(`Table found on sheet ${rangeSheetRes.sheetName} on row ${rangeSheetRes.startRow}`)
|
||||||
|
|
||||||
let startAddress = ''
|
let startAddress = ''
|
||||||
let endAddress = ''
|
let endAddress = ''
|
||||||
@ -189,9 +208,6 @@ export class SpreadsheetService {
|
|||||||
c: rangeSheetRes.rangeStartCol + col
|
c: rangeSheetRes.rangeStartCol + col
|
||||||
})
|
})
|
||||||
|
|
||||||
if (startAddress === '') startAddress = addr
|
|
||||||
endAddress = addr
|
|
||||||
|
|
||||||
let cell
|
let cell
|
||||||
|
|
||||||
if (!ws[addr]) {
|
if (!ws[addr]) {
|
||||||
@ -199,6 +215,10 @@ export class SpreadsheetService {
|
|||||||
} else {
|
} else {
|
||||||
cell = ws[addr]
|
cell = ws[addr]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (startAddress === '' && ws[addr]) startAddress = addr
|
||||||
|
endAddress = addr
|
||||||
|
|
||||||
arr.push(cell)
|
arr.push(cell)
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -209,10 +229,9 @@ export class SpreadsheetService {
|
|||||||
if (arrNonEmptyValue) csvArrayData.push(arr)
|
if (arrNonEmptyValue) csvArrayData.push(arr)
|
||||||
}
|
}
|
||||||
|
|
||||||
this.eventService.showInfoModal(
|
rangeSheetRes.rangeAddress = `${startAddress}:${endAddress}`
|
||||||
'Table Found',
|
|
||||||
`Sheet: ${rangeSheetRes.sheetName}\nRange: ${startAddress}:${endAddress}`
|
if (onTableFoundEvent) onTableFoundEvent(`Sheet: ${rangeSheetRes.sheetName}\nRange: ${rangeSheetRes.rangeAddress}`)
|
||||||
)
|
|
||||||
} else {
|
} else {
|
||||||
missingHeaders = rangeSheetRes.missingHeaders
|
missingHeaders = rangeSheetRes.missingHeaders
|
||||||
}
|
}
|
||||||
@ -341,16 +360,21 @@ export class SpreadsheetService {
|
|||||||
// Prepend headers
|
// Prepend headers
|
||||||
csvContentClean = csvArrayHeaders.join(',') + '\n' + csvContentClean
|
csvContentClean = csvArrayHeaders.join(',') + '\n' + csvContentClean
|
||||||
|
|
||||||
|
// Blob from which CSV file will be created depending of the selected
|
||||||
|
// encoding
|
||||||
|
let blob: Blob
|
||||||
|
|
||||||
if (parseParams.encoding === 'WLATIN1') {
|
if (parseParams.encoding === 'WLATIN1') {
|
||||||
|
// WLATIN1
|
||||||
let encoded = iconv.decode(Buffer.from(csvContentClean), 'CP-1252')
|
let encoded = iconv.decode(Buffer.from(csvContentClean), 'CP-1252')
|
||||||
let blob = new Blob([encoded], { type: 'application/csv' })
|
blob = new Blob([encoded], { type: 'application/csv' })
|
||||||
let newCSVFile: File = blobToFile(blob, filename + '.csv')
|
|
||||||
uploader.addToQueue([newCSVFile])
|
|
||||||
} else {
|
} else {
|
||||||
let blob = new Blob([csvContentClean], { type: 'application/csv' })
|
// UTF-8
|
||||||
let newCSVFile: File = blobToFile(blob, filename + '.csv')
|
blob = new Blob([csvContentClean], { type: 'application/csv' })
|
||||||
uploader.addToQueue([newCSVFile])
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let newCSVFile: File = blobToFile(blob, filename + '.csv')
|
||||||
|
uploader.addToQueue([newCSVFile])
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data.length === 0) {
|
if (data.length === 0) {
|
||||||
@ -359,7 +383,9 @@ export class SpreadsheetService {
|
|||||||
|
|
||||||
return resolve({
|
return resolve({
|
||||||
uploader,
|
uploader,
|
||||||
data
|
data,
|
||||||
|
rangeSheetRes,
|
||||||
|
headerShow: parseParams.headerShow
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
reader.readAsArrayBuffer(file)
|
reader.readAsArrayBuffer(file)
|
||||||
@ -384,16 +410,14 @@ export class SpreadsheetService {
|
|||||||
uploader.addToQueue([encodedFile])
|
uploader.addToQueue([encodedFile])
|
||||||
|
|
||||||
resolve({
|
resolve({
|
||||||
uploader,
|
uploader
|
||||||
data
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
reader.readAsArrayBuffer(file)
|
reader.readAsArrayBuffer(file)
|
||||||
} else {
|
} else {
|
||||||
return resolve({
|
return resolve({
|
||||||
uploader,
|
uploader
|
||||||
data
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
Loading…
Reference in New Issue
Block a user