Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
async validateData(descriptor, absPath) {
// TODO: handle inlined data resources
let options = {schema: descriptor.schema}
if (descriptor.dialect) {
Object.assign(options, descriptor.dialect)
}
const table = await Table.load(absPath, options)
try {
await table.read()
} catch (err) {
err.resource = descriptor.name
err.path = absPath
throw err
}
return true
}
async function validateData(schema, absPath) {
// TODO: handle inlined data resources
const table = await Table.load(absPath, {schema})
await table.read()
return true
}
newReference: this.state.newReference,
newRefReference: this.state.newRefReference
}
// TODO: hardcoded
if (resourceDescriptor.name === 'currencies' && !this.state.newReference) {
const source = [keys(payload.newResource.data[0])].concat([values(payload.newResource.data[0])]);
const relations = {
exchange_rates: referenceTables['exchange_rates']
};
let table;
const prefixedValues = {
"modified_currency": this.state.newResource['modified_currency'].value
}
try {
table = await Table.load(source, {schema});
const rows = await table.read({forceCast: true, relations});
const errors = rows.filter((row) => row.errors);
if (errors.length) {
this.setState({
prefixedValues,
resourceValid: {
valid: false,
message: errors[0].errors[0].errors[0].message
}
});
} else {
this.setState({
resourceValid: {valid: true}
})
this.props.onAddNew(payload)
}
try {
const tableLength = source.length;
const chunk = DEFAULT_CHUNK_SIZE;
let i = 0
let errors = []
for(i; i < tableLength; i += chunk) {
dispatch({
type: VALIDATE_TABLE_REQUEST,
payload: {
status: 'loading',
loader: `validating ${i} rows`
}
})
const offset = i / chunk
const chunkTable = [source[0]].concat(source.slice(i+1-offset, i+chunk-offset))
const table = await Table.load(chunkTable, {schema});
const rows = await table.read({forceCast: true, relations});
const chunkErrors = rows.filter((row) => row.errors)
if (chunkErrors.length) {
chunkErrors.forEach((error) => {
error.rowNumber = error.rowNumber + chunk * offset - offset
});
errors = errors.concat(chunkErrors)
}
}
// const table = await Table.load(source, {schema});
// const rows = await table.read({forceCast: true});
// const errors = rows.filter((row) => row.errors)
if (errors.length) {
dispatch({
type: VALIDATE_TABLE_FAILURE,
payload: {
var _ref2 = (0, _asyncToGenerator3.default)(function* (schema, absPath) {
// TODO: handle inlined data resources
const table = yield Table.load(absPath, { schema });
yield table.read();
return true;
});
dispatch(async() => {
let table;
try {
table = await Table.load(source.slice(0,2), {schema});
await table.read({limit: 1});
dispatch({
type: VALIDATE_HEADER_SUCCESS,
payload: {
status: 'done',
valid: true,
headers: table.headers
}
})
} catch (error) {
console.error(error)
if (error.type !== 'ERROR_HEADER') {
dispatch({
type: VALIDATE_HEADER_SUCCESS,
payload: {
status: 'done',
dispatch(async() => {
let table;
try {
table = await Table.load(source, {schema});
const rows = await table.read({forceCast: true, relations});
const errors = rows.filter((row) => row.errors);
if (errors.length) {
dispatch({
type: REVALIDATE_ROWS_FAILURE,
payload: {
status: 'done',
valid: false,
rowNumbers,
originalValue,
fixedValues
}
})
} else {
dispatch({
dispatch(async () => {
const table = await Table.load(descriptor.path)
const rows = await table.read({limit: config.EDITOR_UPLOAD_ROWS_LIMIT})
const headers = table.headers
dispatch({
type: 'UPLOAD_DATA',
rows,
headers,
resourceIndex,
})
})
} else {
dispatch(async () => {
const text = await readFile(ev.target.files[0])
const stream = new Readable()
stream.push(text)
stream.push(null)
const table = await Table.load(stream)
const rows = await table.read({limit: config.EDITOR_UPLOAD_ROWS_LIMIT})
const headers = table.headers
dispatch({
type: 'UPLOAD_DATA',
rows,
headers,
resourceIndex,
})
})
},