Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
get: (buf, off) => {
return {
// === ZONE A - Traditional Xing VBR Tag data ===
// 4 bytes for HeaderFlags
headerFlags: new Token.BufferType(4).get(buf, off),
numFrames: Token.UINT32_BE.get(buf, off + 4),
streamSize: Token.UINT32_BE.get(buf, off + 8),
// the number of header data bytes (from original file)
vbrScale: Token.UINT32_BE.get(buf, off + 112),
/**
* LAME Tag, extends the Xing header format
* First added in LAME 3.12 for VBR
* The modified header is also included in CBR files (effective LAME 3.94), with "Info" instead of "XING" near the beginning.
*/
// === ZONE B - Initial LAME info ===
// Initial LAME info, e.g.: LAME3.99r
codec: new Token.StringType(9, 'ascii').get(buf, off + 116), // bytes $9A-$A => 154-164 (offset doc - 38)
private async handleSoundPropertyChunks(remainingSize: number): Promise {
debug(`Parsing sound-property-chunks, remainingSize=${remainingSize}`);
while (remainingSize > 0) {
const sndPropHeader = await this.tokenizer.readToken(ChunkHeader);
debug(`Sound-property-chunk[ID=${sndPropHeader.chunkID}, size=${sndPropHeader.chunkSize}]`);
const p0 = this.tokenizer.position;
switch (sndPropHeader.chunkID.trim()) {
case 'FS': // 3.2.1 Sample Rate Chunk
const sampleRate = await this.tokenizer.readToken(Token.UINT32_BE);
this.metadata.setFormat('sampleRate', sampleRate);
break;
case 'CHNL': // 3.2.2 Channels Chunk
const numChannels = await this.tokenizer.readToken(Token.UINT16_BE);
this.metadata.setFormat('numberOfChannels', numChannels);
await this.handleChannelChunks(sndPropHeader.chunkSize - Token.UINT16_BE.len);
break;
case 'CMPR': // 3.2.3 Compression Type Chunk
const compressionIdCode = (await this.tokenizer.readToken(FourCcToken)).trim();
const count = await this.tokenizer.readToken(Token.UINT8);
const compressionName = await this.tokenizer.readToken(new Token.StringType(count, 'ascii'));
if (compressionIdCode === 'DSD') {
this.metadata.setFormat('lossless', true);
this.metadata.setFormat('bitsPerSample', 1);
pic.type = AttachedPictureType[b[offset]];
offset += 1;
fzero = common.findZero(b, offset, length, encoding);
pic.description = common.decodeString(b.slice(offset, fzero), encoding);
offset = fzero + nullTerminatorLength;
pic.data = Buffer.from(b.slice(offset, length));
output = pic;
}
break;
case 'CNT':
case 'PCNT':
output = Token.UINT32_BE.get(b, 0);
break;
case 'SYLT':
// skip text encoding (1 byte),
// language (3 bytes),
// time stamp format (1 byte),
// content tagTypes (1 byte),
// content descriptor (1 byte)
offset += 7;
output = [];
while (offset < length) {
const txt = b.slice(offset, offset = common.findZero(b, offset, length, encoding));
offset += 5; // push offset forward one + 4 byte timestamp
output.push(common.decodeString(txt, encoding));
}
get: (buf, off): IExtendedHeader => {
return {
// Extended header size
size: Token.UINT32_BE.get(buf, off),
// Extended Flags
extendedFlags: Token.UINT16_BE.get(buf, off + 4),
// Size of padding
sizeOfPadding: Token.UINT32_BE.get(buf, off + 6),
// CRC data present
crcDataPresent: common.strtokBITSET.get(buf, off + 4, 31)
};
}
};
private async parseAtom_chap(chap: Atom): Promise {
const trackIds: number[] = [];
let len = chap.getPayloadLength();
while (len >= Token.UINT32_BE.len) {
trackIds.push(await this.tokenizer.readNumber(Token.UINT32_BE));
len -= Token.UINT32_BE.len;
}
return trackIds;
}
private async parseAtom_chap(chap: Atom): Promise {
const trackIds: number[] = [];
let len = chap.getPayloadLength();
while (len >= Token.UINT32_BE.len) {
trackIds.push(await this.tokenizer.readNumber(Token.UINT32_BE));
len -= Token.UINT32_BE.len;
}
return trackIds;
}
private static readFrameHeader(v, majorVer): IFrameHeader {
let header: IFrameHeader;
switch (majorVer) {
case 2:
header = {
id: v.toString('ascii', 0, 3),
length: Token.UINT24_BE.get(v, 3)
};
break;
case 3:
header = {
id: v.toString('ascii', 0, 4),
length: Token.UINT32_BE.get(v, 4),
flags: ID3v2Parser.readFrameFlags(v.slice(8, 10))
};
break;
case 4:
header = {
id: v.toString('ascii', 0, 4),
length: UINT32SYNCSAFE.get(v, 4),
flags: ID3v2Parser.readFrameFlags(v.slice(8, 10))
};
break;
default:
throw new Error('Unexpected majorVer: ' + majorVer);
}
return header;
private async parseAtom_chap(chap: Atom): Promise {
const trackIds: number[] = [];
let len = chap.getPayloadLength();
while (len >= Token.UINT32_BE.len) {
trackIds.push(await this.tokenizer.readNumber(Token.UINT32_BE));
len -= Token.UINT32_BE.len;
}
return trackIds;
}
public get(buffer: Buffer, offset: number): IVorbisPicture {
const type = AttachedPictureType[Token.UINT32_BE.get(buffer, offset)];
const mimeLen = Token.UINT32_BE.get(buffer, offset += 4);
const format = buffer.toString('utf-8', offset += 4, offset + mimeLen);
const descLen = Token.UINT32_BE.get(buffer, offset += mimeLen);
const description = buffer.toString('utf-8', offset += 4, offset + descLen);
const width = Token.UINT32_BE.get(buffer, offset += descLen);
const height = Token.UINT32_BE.get(buffer, offset += 4);
const colour_depth = Token.UINT32_BE.get(buffer, offset += 4);
const indexed_color = Token.UINT32_BE.get(buffer, offset += 4);
const picDataLen = Token.UINT32_BE.get(buffer, offset += 4);
const data = Buffer.from(buffer.slice(offset += 4, offset + picDataLen));
return {