Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
function compareHashMD5({
obj,
clone
}) {
// equal
const hash1 = hash(JSON.stringify(obj));
const hash2 = hash(JSON.stringify(clone));
assert.equal(hash1, hash2);
// not equal
const hash3 = hash(JSON.stringify(obj));
const hash4 = hash(JSON.stringify(checkObj));
assert.notEqual(hash3, hash4);
return;
}
function compareHashMD5({
obj,
clone
}) {
// equal
const hash1 = hash(JSON.stringify(obj));
const hash2 = hash(JSON.stringify(clone));
assert.equal(hash1, hash2);
// not equal
const hash3 = hash(JSON.stringify(obj));
const hash4 = hash(JSON.stringify(checkObj));
assert.notEqual(hash3, hash4);
return;
}
text: "Married"
},
managingOrganization: {
identifier: {
$class: "org.fhir.datatypes.Identifier",
use: "usual",
system: "Blockchain:Provider",
value: "Provider::Provida"
}
}
};
// Hash the object and set the ID to the hash. This creates a unique ID with low chance of collision, which is good enough for our purposes here.
// USE THIS TO CREATE NEW PATIENTS. THIS IS COMMENTED OUT TO REUSE THE SAME DEFAULT PATIENT ID
json.id = "resource:org.fhir.core.Patient#com.instamed.patient.".concat(this.first_name, "_", this.last_name, "_",
Spark.hash(JSON.stringify(json)).toString().substring(0,8)
);
json.identifier[0].value = json.id;
this.patient_id = json.id.toString();
this.$emit("saveFhir", {name: 'fhir_patient', data: json})
return json;
},
savePatient() {
export const hashFile = (file, callback) => {
let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
chunkSize = 2097152, // Read in chunks of 2MB
chunks = Math.ceil(file.size / chunkSize),
currentChunk = 0,
spark = new SparkMD5.ArrayBuffer(),
fileReader = new FileReader();
fileReader.onload = function (e) {
spark.append(e.target.result); // Append array buffer
currentChunk++;
if (currentChunk < chunks) {
loadNext();
} else {
callback(spark.end());
}
};
fileReader.onerror = function () {
console.warn('oops, something went wrong.');
};
return (dispatch, getState) => {
// If the text box is empty
if (text.trim().length <= 0) return;
const id = md5.hash(text);
// Redux thunk's middleware receives the store methods `dispatch`
// and `getState` as parameters
const { topic } = getState();
const data = {
count: 1,
id,
text
};
// Conditional dispatch
// If the topic already exists, make sure we emit a dispatch event
if (topic.topics.filter(topicItem => topicItem.id === id).length > 0) {
// Currently there is no reducer that changes state for this
// For production you would ideally have a message reducer that
// notifies the user of a duplicate topic
return dispatch(createTopicDuplicate());
exports.MD5 = function (string) {
/* istanbul ignore else */
if (!process.browser) {
return crypto.createHash('md5').update(string).digest('hex');
} else {
return Md5.hash(string);
}
};
.then(function (results) {
removeCirculars(results.resolved);
// Fix circular references
_.each(results.refs, function (refDetails, refPtr) {
if (refDetails.circular) {
_.set(results.resolved, JsonRefs.pathFromPtr(refPtr), {});
}
});
cacheEntry.referencesMetadata = results.refs;
cacheEntry.resolved = results.resolved;
cacheEntry.resolvedId = SparkMD5.hash(JSON.stringify(results.resolved));
callback();
})
.catch(callback);
function md5SingleFile(file: object, cb): string {
const blobSlice =
File.prototype.slice ||
File.prototype.mozSlice ||
File.prototype.webkitSlice;
const chunkSize = 2097152; // Read in chunks of 2MB
const chunks = Math.ceil(file.size / chunkSize);
let currentChunk = 0;
const spark = new SparkMD5.ArrayBuffer();
const fileReader = new FileReader();
function loadNext() {
const start = currentChunk * chunkSize;
const end = start + chunkSize >= file.size ? file.size : start + chunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
}
fileReader.onload = event => {
// 该值记录了对某个文件进行MD5操作的进度
const percent = parseFloat((currentChunk / chunks * 100).toFixed(1));
spark.append(event.target.result); // Append array buffer
currentChunk++;
return new Promise(function(resolve, reject) {
var totalSize = src.size;
var chunkSize = opts && opts.chunkSize || DEFAULT_CHUNK_SIZE;
var file = src;
var count = Math.ceil (totalSize/chunkSize);
var current = 0;
var blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
var startTime = new Date().getTime();
var spark = new SparkMD5.ArrayBuffer();
var reader = new FileReader();
reader.onerror = function (e) {
reject (new Error("file read error!"));
}
reader.onload = function (e) {
spark.append( e.target.result); // append array buffer
//xdebug ("e:",e);
var percent = Math.floor(100 * (current * chunkSize + e.loaded) / src.size);
if (opts && opts.onProgress) opts.onProgress (percent);
current += 1;
if (current < count) {
loadNext();
} else {
var hash = spark.end().toUpperCase();
var endTime = new Date().getTime();
benefit: service.unitPrice * this.elgibilityPercentage
}
};
adjudications.push(s);
}
let json = {
txDate: this.claim_timestamp.toISOString().slice(0, 10),
uid: "resource:org.fhir.core.ClaimResponse#",
claimUid: this.claim.claim_uid,
accountUid: "resource:org.fhir.core.Account#",
invoiceUid: "resource:org.fhir.core.Invoice#",
adjudications: adjudications
};
let id1 = Spark.hash(JSON.stringify(json))
json.accountUid = json.accountUid.concat(id1);
json.uid = json.uid.concat(id1);
json.invoiceUid = json.invoiceUid.concat(id1, "-1");
this.invoice_uid = json.invoiceUid;
this.account_uid =json.accountUid;
this.claim_response_uid = json.uid
this.$emit("saveFhir", {name: 'fhir_adjudication', data: json})
return json;
},
approveClaim() {