From d77958e03a99131020cf46481c0e803681c40639 Mon Sep 17 00:00:00 2001 From: Stefan Schlatter Date: Wed, 14 Oct 2015 07:05:06 +0800 Subject: [PATCH 1/2] Added a check to confirm that the record length (sum of field lengths + 1) is correct according to the expected record length as specified in the header to avoid inappropriate reading of the non-standard high byte of nFieldLength --- FastDBF/DbfHeader.cs | 42 +++++++++++++++++++++++++++++++++++++++--- 1 file changed, 39 insertions(+), 3 deletions(-) diff --git a/FastDBF/DbfHeader.cs b/FastDBF/DbfHeader.cs index ae5f151..6cde13b 100644 --- a/FastDBF/DbfHeader.cs +++ b/FastDBF/DbfHeader.cs @@ -557,13 +557,40 @@ public void Write(BinaryWriter writer) } - /// /// Read header data, make sure the stream is positioned at the start of the file to read the header otherwise you will get an exception. /// When this function is done the position will be the first record. /// /// public void Read(BinaryReader reader) + { + var readerPos = reader.BaseStream.Position; + + // Attempt to read the header without extended FieldLength format + Read(reader, false); + + // Calculate the expected field length. + var calculatedDataLength = 1; + for (var i = 0; i < _fields.Count; i++) + { + calculatedDataLength += _fields[i].Length; + } + + // If the calculated field length does not match the expected length, re-processess the file with extended FieldLength format + if (RecordLength != calculatedDataLength) + { + reader.BaseStream.Position = readerPos; + Read(reader, true); + } + } + + + /// + /// Read header data, make sure the stream is positioned at the start of the file to read the header otherwise you will get an exception. + /// When this function is done the position will be the first record. + /// + /// + private void Read(BinaryReader reader, bool allowExtendedFieldLength) { // type of reader. @@ -626,8 +653,17 @@ public void Read(BinaryReader reader) int nDecimals = 0; if (cDbaseType == 'C' || cDbaseType == 'c') { - //treat decimal count as high byte - nFieldLength = (int)reader.ReadUInt16(); + if (allowExtendedFieldLength) + { + //treat decimal count as high byte + nFieldLength = (int)reader.ReadInt16(); + } + else + { + //treat decimal count as high byte + nFieldLength = (int)reader.ReadByte(); + reader.ReadByte(); + } } else { From 0651e8898dfac0874359078c551e8cf81dc68470 Mon Sep 17 00:00:00 2001 From: Stefan Schlatter Date: Sat, 17 Oct 2015 18:58:47 +0800 Subject: [PATCH 2/2] Added more descriptive comments --- FastDBF/DbfHeader.cs | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/FastDBF/DbfHeader.cs b/FastDBF/DbfHeader.cs index 6cde13b..08894f3 100644 --- a/FastDBF/DbfHeader.cs +++ b/FastDBF/DbfHeader.cs @@ -565,18 +565,18 @@ public void Write(BinaryWriter writer) public void Read(BinaryReader reader) { var readerPos = reader.BaseStream.Position; - - // Attempt to read the header without extended FieldLength format + + // 'Read' can read both standard headers and headers wih the wide charactor fields as used in Clipper and FoxPro. Read using standard method first. Read(reader, false); - // Calculate the expected field length. + // Calculate the record length from the sum of field length. var calculatedDataLength = 1; for (var i = 0; i < _fields.Count; i++) { calculatedDataLength += _fields[i].Length; } - // If the calculated field length does not match the expected length, re-processess the file with extended FieldLength format + // If the record length does not match the expected length, re-processess the header with support for wide charactor fields if (RecordLength != calculatedDataLength) { reader.BaseStream.Position = readerPos; @@ -590,6 +590,7 @@ public void Read(BinaryReader reader) /// When this function is done the position will be the first record. /// /// + /// true to use the Decimal byte as an extension of the field length field private void Read(BinaryReader reader, bool allowExtendedFieldLength) { @@ -653,6 +654,9 @@ private void Read(BinaryReader reader, bool allowExtendedFieldLength) int nDecimals = 0; if (cDbaseType == 'C' || cDbaseType == 'c') { + //if allowing extended field length, then read FieldLength and Decimal count as one number to allow char fields to be + //longer than 256 bytes (ASCII char). This is the way Clipper and FoxPro do it, and there is really no downside + //since for char fields decimal count should be zero for other versions that do not support this extended functionality. if (allowExtendedFieldLength) { //treat decimal count as high byte @@ -660,7 +664,7 @@ private void Read(BinaryReader reader, bool allowExtendedFieldLength) } else { - //treat decimal count as high byte + //read just the FieldLength byte as with standard DBF header. nFieldLength = (int)reader.ReadByte(); reader.ReadByte(); }