Fix lint errors

This commit is contained in:
lordwelch 2020-11-26 02:16:40 -08:00
parent 1c7c8e5055
commit 23cb8d1e85
8 changed files with 464 additions and 434 deletions

96
.golangci.yaml Normal file
View File

@ -0,0 +1,96 @@
run:
tests: false
build-tags:
- noasm
output:
# colored-line-number|line-number|json|tab|checkstyle|code-climate, default is "colored-line-number"
format: tab
# print lines of code with issue, default is true
print-issued-lines: true
# print linter name in the end of issue text, default is true
print-linter-name: true
# make issues output unique by line, default is true
uniq-by-line: true
# add a prefix to the output file references; default is no prefix
path-prefix: ""
# sorts results by: filepath, line and column
sort-results: true
linters-settings:
depguard:
list-type: blacklist
dupl:
threshold: 100
funlen:
lines: 100
statements: 50
gci:
local-prefixes: github.com/golangci/golangci-lint
goconst:
min-len: 2
min-occurrences: 2
gocritic:
enabled-tags:
- diagnostic
- experimental
- opinionated
- performance
- style
gocyclo:
min-complexity: 15
golint:
min-confidence: 0
gomnd:
settings:
mnd:
# don't include the "operation" and "assign"
checks: argument,case,condition,return
govet:
check-shadowing: true
lll:
line-length: 140
maligned:
suggest-new: true
misspell:
locale: US
nolintlint:
allow-leading-space: true # don't require machine-readable nolint directives (i.e. with no leading space)
allow-unused: false # report any unused nolint directives
require-explanation: false # don't require an explanation for nolint directives
require-specific: false # don't require nolint directives to be specific about which linter is being skipped
linters:
# please, do not use `enable-all`: it's deprecated and will be removed soon.
# inverted configuration with `enable-all` and `disable` is not scalable during updates of golangci-lint
disable-all: true
enable:
- deadcode
- depguard
- dogsled
- dupl
- errcheck
- funlen
- goconst
- gocritic
- gocyclo
- golint
- gosec
- gosimple
- govet
- ineffassign
- interfacer
- lll
- misspell
- nakedret
- noctx
- nolintlint
- rowserrcheck
- scopelint
- staticcheck
- structcheck
- stylecheck
- typecheck
- unconvert
- unparam
- varcheck
- whitespace

View File

@ -135,43 +135,43 @@ func (v Vec) MarshalXML(e *xml.Encoder, start xml.StartElement) error {
type DataType int type DataType int
const ( const (
DT_None DataType = iota DTNone DataType = iota
DT_Byte DTByte
DT_Short DTShort
DT_UShort DTUShort
DT_Int DTInt
DT_UInt DTUInt
DT_Float DTFloat
DT_Double DTDouble
DT_IVec2 DTIVec2
DT_IVec3 DTIVec3
DT_IVec4 DTIVec4
DT_Vec2 DTVec2
DT_Vec3 DTVec3
DT_Vec4 DTVec4
DT_Mat2 DTMat2
DT_Mat3 DTMat3
DT_Mat3x4 DTMat3x4
DT_Mat4x3 DTMat4x3
DT_Mat4 DTMat4
DT_Bool DTBool
DT_String DTString
DT_Path DTPath
DT_FixedString DTFixedString
DT_LSString DTLSString
DT_ULongLong DTULongLong
DT_ScratchBuffer DTScratchBuffer
// Seems to be unused? // Seems to be unused?
DT_Long DTLong
DT_Int8 DTInt8
DT_TranslatedString DTTranslatedString
DT_WString DTWString
DT_LSWString DTLSWString
DT_UUID DTUUID
DT_Int64 DTInt64
DT_TranslatedFSString DTTranslatedFSString
// Last supported datatype, always keep this one at the end // Last supported datatype, always keep this one at the end
DT_Max = iota - 1 DTMax = iota - 1
) )
func (dt *DataType) MarshalXMLAttr(name xml.Name) (xml.Attr, error) { func (dt *DataType) MarshalXMLAttr(name xml.Name) (xml.Attr, error) {
@ -183,73 +183,73 @@ func (dt *DataType) MarshalXMLAttr(name xml.Name) (xml.Attr, error) {
func (dt DataType) String() string { func (dt DataType) String() string {
switch dt { switch dt {
case DT_None: case DTNone:
return "None" return "None"
case DT_Byte: case DTByte:
return "uint8" return "uint8"
case DT_Short: case DTShort:
return "int16" return "int16"
case DT_UShort: case DTUShort:
return "uint16" return "uint16"
case DT_Int: case DTInt:
return "int32" return "int32"
case DT_UInt: case DTUInt:
return "uint32" return "uint32"
case DT_Float: case DTFloat:
return "float" return "float"
case DT_Double: case DTDouble:
return "double" return "double"
case DT_IVec2: case DTIVec2:
return "ivec2" return "ivec2"
case DT_IVec3: case DTIVec3:
return "ivec3" return "ivec3"
case DT_IVec4: case DTIVec4:
return "ivec4" return "ivec4"
case DT_Vec2: case DTVec2:
return "fvec2" return "fvec2"
case DT_Vec3: case DTVec3:
return "fvec3" return "fvec3"
case DT_Vec4: case DTVec4:
return "fvec4" return "fvec4"
case DT_Mat2: case DTMat2:
return "mat2x2" return "mat2x2"
case DT_Mat3: case DTMat3:
return "mat3x3" return "mat3x3"
case DT_Mat3x4: case DTMat3x4:
return "mat3x4" return "mat3x4"
case DT_Mat4x3: case DTMat4x3:
return "mat4x3" return "mat4x3"
case DT_Mat4: case DTMat4:
return "mat4x4" return "mat4x4"
case DT_Bool: case DTBool:
return "bool" return "bool"
case DT_String: case DTString:
return "string" return "string"
case DT_Path: case DTPath:
return "path" return "path"
case DT_FixedString: case DTFixedString:
return "FixedString" return "FixedString"
case DT_LSString: case DTLSString:
return "LSString" return "LSString"
case DT_ULongLong: case DTULongLong:
return "uint64" return "uint64"
case DT_ScratchBuffer: case DTScratchBuffer:
return "ScratchBuffer" return "ScratchBuffer"
case DT_Long: case DTLong:
return "old_int64" return "old_int64"
case DT_Int8: case DTInt8:
return "int8" return "int8"
case DT_TranslatedString: case DTTranslatedString:
return "TranslatedString" return "TranslatedString"
case DT_WString: case DTWString:
return "WString" return "WString"
case DT_LSWString: case DTLSWString:
return "LSWString" return "LSWString"
case DT_UUID: case DTUUID:
return "guid" return "guid"
case DT_Int64: case DTInt64:
return "int64" return "int64"
case DT_TranslatedFSString: case DTTranslatedFSString:
return "TranslatedFSString" return "TranslatedFSString"
} }
return "" return ""
@ -298,21 +298,21 @@ func (na NodeAttribute) MarshalXML(e *xml.Encoder, start xml.StartElement) error
func (na NodeAttribute) String() string { func (na NodeAttribute) String() string {
switch na.Type { switch na.Type {
case DT_ScratchBuffer: case DTScratchBuffer:
// ScratchBuffer is a special case, as its stored as byte[] and ToString() doesn't really do what we want // ScratchBuffer is a special case, as its stored as byte[] and ToString() doesn't really do what we want
if value, ok := na.Value.([]byte); ok { if value, ok := na.Value.([]byte); ok {
return base64.StdEncoding.EncodeToString(value) return base64.StdEncoding.EncodeToString(value)
} }
return fmt.Sprint(na.Value) return fmt.Sprint(na.Value)
case DT_Double: case DTDouble:
v := na.Value.(float64) v := na.Value.(float64)
if na.Value == 0 { if na.Value == 0 {
na.Value = 0 na.Value = 0
} }
return strconv.FormatFloat(v, 'f', -1, 64) return strconv.FormatFloat(v, 'f', -1, 64)
case DT_Float: case DTFloat:
v := na.Value.(float32) v := na.Value.(float32)
if na.Value == 0 { if na.Value == 0 {
na.Value = 0 na.Value = 0
@ -330,20 +330,20 @@ func (na NodeAttribute) GetRows() (int, error) {
func (dt DataType) GetRows() (int, error) { func (dt DataType) GetRows() (int, error) {
switch dt { switch dt {
case DT_IVec2, DT_IVec3, DT_IVec4, DT_Vec2, DT_Vec3, DT_Vec4: case DTIVec2, DTIVec3, DTIVec4, DTVec2, DTVec3, DTVec4:
return 1, nil return 1, nil
case DT_Mat2: case DTMat2:
return 2, nil return 2, nil
case DT_Mat3, DT_Mat3x4: case DTMat3, DTMat3x4:
return 3, nil return 3, nil
case DT_Mat4x3, DT_Mat4: case DTMat4x3, DTMat4:
return 4, nil return 4, nil
default: default:
return 0, errors.New("Data type does not have rows") return 0, errors.New("data type does not have rows")
} }
} }
@ -353,23 +353,23 @@ func (na NodeAttribute) GetColumns() (int, error) {
func (dt DataType) GetColumns() (int, error) { func (dt DataType) GetColumns() (int, error) {
switch dt { switch dt {
case DT_IVec2, DT_Vec2, DT_Mat2: case DTIVec2, DTVec2, DTMat2:
return 2, nil return 2, nil
case DT_IVec3, DT_Vec3, DT_Mat3, DT_Mat4x3: case DTIVec3, DTVec3, DTMat3, DTMat4x3:
return 3, nil return 3, nil
case DT_IVec4, DT_Vec4, DT_Mat3x4, DT_Mat4: case DTIVec4, DTVec4, DTMat3x4, DTMat4:
return 4, nil return 4, nil
default: default:
return 0, errors.New("Data type does not have columns") return 0, errors.New("data type does not have columns")
} }
} }
func (na NodeAttribute) IsNumeric() bool { func (na NodeAttribute) IsNumeric() bool {
switch na.Type { switch na.Type {
case DT_Byte, DT_Short, DT_Int, DT_UInt, DT_Float, DT_Double, DT_ULongLong, DT_Long, DT_Int8: case DTByte, DTShort, DTInt, DTUInt, DTFloat, DTDouble, DTULongLong, DTLong, DTInt8:
return true return true
default: default:
return false return false
@ -390,58 +390,62 @@ func (na *NodeAttribute) FromString(str string) error {
) )
switch na.Type { switch na.Type {
case DT_None: case DTNone:
// This is a null type, cannot have a value // This is a null type, cannot have a value
case DT_Byte: case DTByte:
na.Value = []byte(str) na.Value = []byte(str)
case DT_Short: case DTShort:
na.Value, err = strconv.ParseInt(str, 0, 16) na.Value, err = strconv.ParseInt(str, 0, 16)
if err != nil { if err != nil {
return err return err
} }
case DT_UShort: case DTUShort:
na.Value, err = strconv.ParseUint(str, 0, 16) na.Value, err = strconv.ParseUint(str, 0, 16)
if err != nil { if err != nil {
return err return err
} }
case DT_Int: case DTInt:
na.Value, err = strconv.ParseInt(str, 0, 32) na.Value, err = strconv.ParseInt(str, 0, 32)
if err != nil { if err != nil {
return err return err
} }
case DT_UInt: case DTUInt:
na.Value, err = strconv.ParseUint(str, 0, 16) na.Value, err = strconv.ParseUint(str, 0, 16)
if err != nil { if err != nil {
return err return err
} }
case DT_Float: case DTFloat:
na.Value, err = strconv.ParseFloat(str, 32) na.Value, err = strconv.ParseFloat(str, 32)
if err != nil { if err != nil {
return err return err
} }
case DT_Double: case DTDouble:
na.Value, err = strconv.ParseFloat(str, 64) na.Value, err = strconv.ParseFloat(str, 64)
if err != nil { if err != nil {
return err return err
} }
case DT_IVec2, DT_IVec3, DT_IVec4: case DTIVec2, DTIVec3, DTIVec4:
var (
nums []string
length int
)
nums := strings.Split(str, ".") nums = strings.Split(str, ".")
length, err := na.GetColumns() length, err = na.GetColumns()
if err != nil { if err != nil {
return err return err
} }
if length != len(nums) { if length != len(nums) {
return fmt.Errorf("A vector of length %d was expected, got %d", length, len(nums)) return fmt.Errorf("a vector of length %d was expected, got %d", length, len(nums))
} }
vec := make([]int, length) vec := make([]int, length)
@ -456,14 +460,18 @@ func (na *NodeAttribute) FromString(str string) error {
na.Value = vec na.Value = vec
case DT_Vec2, DT_Vec3, DT_Vec4: case DTVec2, DTVec3, DTVec4:
nums := strings.Split(str, ".") var (
length, err := na.GetColumns() nums []string
length int
)
nums = strings.Split(str, ".")
length, err = na.GetColumns()
if err != nil { if err != nil {
return err return err
} }
if length != len(nums) { if length != len(nums) {
return fmt.Errorf("A vector of length %d was expected, got %d", length, len(nums)) return fmt.Errorf("a vector of length %d was expected, got %d", length, len(nums))
} }
vec := make([]float64, length) vec := make([]float64, length)
@ -476,7 +484,7 @@ func (na *NodeAttribute) FromString(str string) error {
na.Value = vec na.Value = vec
case DT_Mat2, DT_Mat3, DT_Mat3x4, DT_Mat4x3, DT_Mat4: case DTMat2, DTMat3, DTMat3x4, DTMat4x3, DTMat4:
// var mat = Matrix.Parse(str); // var mat = Matrix.Parse(str);
// if (mat.cols != na.GetColumns() || mat.rows != na.GetRows()){ // if (mat.cols != na.GetColumns() || mat.rows != na.GetRows()){
// return errors.New("Invalid column/row count for matrix"); // return errors.New("Invalid column/row count for matrix");
@ -484,16 +492,16 @@ func (na *NodeAttribute) FromString(str string) error {
// value = mat; // value = mat;
return errors.New("not implemented") return errors.New("not implemented")
case DT_Bool: case DTBool:
na.Value, err = strconv.ParseBool(str) na.Value, err = strconv.ParseBool(str)
if err != nil { if err != nil {
return err return err
} }
case DT_String, DT_Path, DT_FixedString, DT_LSString, DT_WString, DT_LSWString: case DTString, DTPath, DTFixedString, DTLSString, DTWString, DTLSWString:
na.Value = str na.Value = str
case DT_TranslatedString: case DTTranslatedString:
// // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part // // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part
// // That can be changed separately via attribute.Value.Handle // // That can be changed separately via attribute.Value.Handle
// if (value == null) // if (value == null)
@ -501,7 +509,7 @@ func (na *NodeAttribute) FromString(str string) error {
// ((TranslatedString)value).Value = str; // ((TranslatedString)value).Value = str;
case DT_TranslatedFSString: case DTTranslatedFSString:
// // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part // // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part
// // That can be changed separately via attribute.Value.Handle // // That can be changed separately via attribute.Value.Handle
// if (value == null) // if (value == null)
@ -509,28 +517,31 @@ func (na *NodeAttribute) FromString(str string) error {
// ((TranslatedFSString)value).Value = str; // ((TranslatedFSString)value).Value = str;
case DT_ULongLong: case DTULongLong:
na.Value, err = strconv.ParseUint(str, 10, 64) na.Value, err = strconv.ParseUint(str, 10, 64)
if err != nil {
return err
}
case DT_ScratchBuffer: case DTScratchBuffer:
na.Value, err = base64.StdEncoding.DecodeString(str) na.Value, err = base64.StdEncoding.DecodeString(str)
if err != nil { if err != nil {
return err return err
} }
case DT_Long, DT_Int64: case DTLong, DTInt64:
na.Value, err = strconv.ParseInt(str, 10, 64) na.Value, err = strconv.ParseInt(str, 10, 64)
if err != nil { if err != nil {
return err return err
} }
case DT_Int8: case DTInt8:
na.Value, err = strconv.ParseInt(str, 10, 8) na.Value, err = strconv.ParseInt(str, 10, 8)
if err != nil { if err != nil {
return err return err
} }
case DT_UUID: case DTUUID:
na.Value, err = uuid.Parse(str) na.Value, err = uuid.Parse(str)
if err != nil { if err != nil {
return err return err
@ -538,7 +549,7 @@ func (na *NodeAttribute) FromString(str string) error {
default: default:
// This should not happen! // This should not happen!
return fmt.Errorf("FromString() not implemented for type %v", na.Type) return fmt.Errorf("not implemented for type %v", na.Type)
} }
return nil return nil
} }

View File

@ -8,7 +8,6 @@ import (
"fmt" "fmt"
"io" "io"
"io/ioutil" "io/ioutil"
"math"
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/google/uuid" "github.com/google/uuid"
@ -63,9 +62,9 @@ func CompressionFlagsToLevel(flags byte) CompressionLevel {
return MaxCompression return MaxCompression
default: default:
panic(errors.New("Invalid compression flags")) panic(errors.New("invalid compression flags"))
} }
return 0 // return 0
} }
func MakeCompressionFlags(method CompressionMethod, level CompressionLevel) int { func MakeCompressionFlags(method CompressionMethod, level CompressionLevel) int {
@ -108,22 +107,21 @@ func Decompress(compressed io.Reader, uncompressedSize int, compressionFlags byt
panic(err) panic(err)
} }
return bytes.NewReader(p) return bytes.NewReader(p)
} else { }
// logger.Println("lz4 block compressed") // logger.Println("lz4 block compressed")
// panic(errors.New("not implemented")) // panic(errors.New("not implemented"))
src, _ := ioutil.ReadAll(compressed) src, _ := ioutil.ReadAll(compressed)
// logger.Println(len(src)) // logger.Println(len(src))
dst := make([]byte, uncompressedSize*2) dst := make([]byte, uncompressedSize*2)
_, err := lz4.UncompressBlock(src, dst) _, err := lz4.UncompressBlock(src, dst)
if err != nil { if err != nil {
panic(err) panic(err)
}
return bytes.NewReader(dst)
} }
return bytes.NewReader(dst)
default: default:
panic(fmt.Errorf("No decompressor found for this format: %v", compressionFlags)) panic(fmt.Errorf("no decompressor found for this format: %v", compressionFlags))
} }
} }
@ -142,26 +140,6 @@ func ReadCString(r io.Reader, length int) (string, error) {
return string(buf[:clen(buf)]), nil return string(buf[:clen(buf)]), nil
} }
func roundFloat(x float64, prec int) float64 {
var rounder float64
pow := math.Pow(10, float64(prec))
intermed := x * pow
_, frac := math.Modf(intermed)
intermed += .5
x = .5
if frac < 0.0 {
x = -.5
intermed -= 1
}
if frac >= x {
rounder = math.Ceil(intermed)
} else {
rounder = math.Floor(intermed)
}
return rounder / pow
}
func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log.Logger) (NodeAttribute, error) { func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log.Logger) (NodeAttribute, error) {
var ( var (
attr = NodeAttribute{ attr = NodeAttribute{
@ -176,91 +154,83 @@ func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log
pos, err = r.Seek(0, io.SeekCurrent) pos, err = r.Seek(0, io.SeekCurrent)
switch DT { switch DT {
case DT_None: case DTNone:
l.Log("member", name, "read", length, "start position", pos, "value", nil) l.Log("member", name, "read", length, "start position", pos, "value", nil)
pos += int64(length)
return attr, nil return attr, nil
case DT_Byte: case DTByte:
p := make([]byte, 1) p := make([]byte, 1)
n, err = r.Read(p) n, err = r.Read(p)
attr.Value = p[0] attr.Value = p[0]
l.Log("member", name, "read", n, "start position", pos, "value", attr.Value) l.Log("member", name, "read", n, "start position", pos, "value", attr.Value)
pos += int64(n)
return attr, err return attr, err
case DT_Short: case DTShort:
var v int16 var v int16
err = binary.Read(r, binary.LittleEndian, &v) err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err return attr, err
case DT_UShort: case DTUShort:
var v uint16 var v uint16
err = binary.Read(r, binary.LittleEndian, &v) err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err return attr, err
case DT_Int: case DTInt:
var v int32 var v int32
err = binary.Read(r, binary.LittleEndian, &v) err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err return attr, err
case DT_UInt: case DTUInt:
var v uint32 var v uint32
err = binary.Read(r, binary.LittleEndian, &v) err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err return attr, err
case DT_Float: case DTFloat:
var v float32 var v float32
err = binary.Read(r, binary.LittleEndian, &v) err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err return attr, err
case DT_Double: case DTDouble:
var v float64 var v float64
err = binary.Read(r, binary.LittleEndian, &v) err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err return attr, err
case DT_IVec2, DT_IVec3, DT_IVec4: case DTIVec2, DTIVec3, DTIVec4:
var col int var col int
col, err = attr.GetColumns() col, err = attr.GetColumns()
if err != nil { if err != nil {
return attr, err return attr, err
} }
vec := make(Ivec, col) vec := make(Ivec, col)
for i, _ := range vec { for i := range vec {
var v int32 var v int32
err = binary.Read(r, binary.LittleEndian, &v) err = binary.Read(r, binary.LittleEndian, &v)
if err != nil { if err != nil {
@ -271,18 +241,17 @@ func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log
attr.Value = vec attr.Value = vec
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, nil return attr, nil
case DT_Vec2, DT_Vec3, DT_Vec4: case DTVec2, DTVec3, DTVec4:
var col int var col int
col, err = attr.GetColumns() col, err = attr.GetColumns()
if err != nil { if err != nil {
return attr, err return attr, err
} }
vec := make(Vec, col) vec := make(Vec, col)
for i, _ := range vec { for i := range vec {
var v float32 var v float32
err = binary.Read(r, binary.LittleEndian, &v) err = binary.Read(r, binary.LittleEndian, &v)
if err != nil { if err != nil {
@ -293,11 +262,10 @@ func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log
attr.Value = vec attr.Value = vec
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, nil return attr, nil
case DT_Mat2, DT_Mat3, DT_Mat3x4, DT_Mat4x3, DT_Mat4: case DTMat2, DTMat3, DTMat3x4, DTMat4x3, DTMat4:
var ( var (
row int row int
col int col int
@ -325,54 +293,52 @@ func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log
attr.Value = (*Mat)(mat.NewDense(row, col, []float64(vec))) attr.Value = (*Mat)(mat.NewDense(row, col, []float64(vec)))
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, nil return attr, nil
case DT_Bool: case DTBool:
var v bool var v bool
err = binary.Read(r, binary.LittleEndian, &v) err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err return attr, err
case DT_ULongLong: case DTULongLong:
var v uint64 var v uint64
err = binary.Read(r, binary.LittleEndian, &v) err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err return attr, err
case DT_Long, DT_Int64: case DTLong, DTInt64:
var v int64 var v int64
err = binary.Read(r, binary.LittleEndian, &v) err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err return attr, err
case DT_Int8: case DTInt8:
var v int8 var v int8
err = binary.Read(r, binary.LittleEndian, &v) err = binary.Read(r, binary.LittleEndian, &v)
attr.Value = v attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
pos += int64(length)
return attr, err return attr, err
case DT_UUID: case DTUUID:
var v uuid.UUID var v uuid.UUID
p := make([]byte, 16) p := make([]byte, 16)
n, err = r.Read(p) n, err = r.Read(p)
if err != nil {
return attr, err
}
reverse(p[:4]) reverse(p[:4])
reverse(p[4:6]) reverse(p[4:6])
reverse(p[6:8]) reverse(p[6:8])
@ -380,7 +346,6 @@ func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log
attr.Value = v attr.Value = v
l.Log("member", name, "read", n, "start position", pos, "value", attr.Value) l.Log("member", name, "read", n, "start position", pos, "value", attr.Value)
pos += int64(n)
return attr, err return attr, err
@ -388,18 +353,18 @@ func ReadAttribute(r io.ReadSeeker, name string, DT DataType, length uint, l log
// Strings are serialized differently for each file format and should be // Strings are serialized differently for each file format and should be
// handled by the format-specific ReadAttribute() // handled by the format-specific ReadAttribute()
// pretty.Log(attr) // pretty.Log(attr)
return attr, fmt.Errorf("ReadAttribute() not implemented for type %v", DT) return attr, fmt.Errorf("readAttribute() not implemented for type %v", DT)
} }
return attr, nil // return attr, nil
} }
// LimitReader returns a Reader that reads from r // LimitReadSeeker returns a Reader that reads from r
// but stops with EOF after n bytes. // but stops with EOF after n bytes.
// The underlying implementation is a *LimitedReader. // The underlying implementation is a *LimitedReader.
func LimitReadSeeker(r io.ReadSeeker, n int64) io.ReadSeeker { return &LimitedReadSeeker{r, n} } func LimitReadSeeker(r io.ReadSeeker, n int64) io.ReadSeeker { return &LimitedReadSeeker{r, n} }
// A LimitedReader reads from R but limits the amount of // A LimitedReadSeeker reads from R but limits the amount of
// data returned to just N bytes. Each call to Read // data returned to just N bytes. Each call to Read
// updates N to reflect the new amount remaining. // updates N to reflect the new amount remaining.
// Read returns EOF when N <= 0 or when the underlying R returns EOF. // Read returns EOF when N <= 0 or when the underlying R returns EOF.

View File

@ -41,14 +41,17 @@ func main() {
fmt.Fprintln(os.Stderr, err) fmt.Fprintln(os.Stderr, err)
os.Exit(1) os.Exit(1)
} }
if !fi.IsDir() { switch {
case !fi.IsDir():
err = openLSF(v) err = openLSF(v)
if err != nil && !errors.As(err, &lslib.HeaderError{}) { if err != nil && !errors.As(err, &lslib.HeaderError{}) {
fmt.Fprintln(os.Stderr, err) fmt.Fprintln(os.Stderr, err)
os.Exit(1) os.Exit(1)
} }
} else if *recurse {
filepath.Walk(v, func(path string, info os.FileInfo, err error) error { case *recurse:
_ = filepath.Walk(v, func(path string, info os.FileInfo, err error) error {
if err != nil { if err != nil {
return nil return nil
} }
@ -64,7 +67,8 @@ func main() {
} }
return nil return nil
}) })
} else {
default:
fmt.Fprintf(os.Stderr, "lsconvert: %s: Is a directory\n", v) fmt.Fprintf(os.Stderr, "lsconvert: %s: Is a directory\n", v)
os.Exit(1) os.Exit(1)
} }
@ -75,11 +79,14 @@ func openLSF(filename string) error {
l *lslib.Resource l *lslib.Resource
err error err error
n string n string
f strwr f interface {
io.Writer
io.StringWriter
}
) )
l, err = readLSF(filename) l, err = readLSF(filename)
if err != nil { if err != nil {
return fmt.Errorf("Reading LSF file %s failed: %w\n", filename, err) return fmt.Errorf("reading LSF file %s failed: %w", filename, err)
} }
if *printResource { if *printResource {
pretty.Log(l) pretty.Log(l)
@ -87,13 +94,13 @@ func openLSF(filename string) error {
if *printXML || *write { if *printXML || *write {
n, err = marshalXML(l) n, err = marshalXML(l)
if err != nil { if err != nil {
return fmt.Errorf("Creating XML from LSF file %s failed: %w\n", filename, err) return fmt.Errorf("creating XML from LSF file %s failed: %w", filename, err)
} }
if *write { if *write {
f, err = os.OpenFile(filename, os.O_TRUNC|os.O_RDWR, 0o666) f, err = os.OpenFile(filename, os.O_TRUNC|os.O_RDWR, 0o666)
if err != nil { if err != nil {
return fmt.Errorf("Writing XML from LSF file %s failed: %w\n", filename, err) return fmt.Errorf("writing XML from LSF file %s failed: %w", filename, err)
} }
} else if *printXML { } else if *printXML {
f = os.Stdout f = os.Stdout
@ -102,7 +109,7 @@ func openLSF(filename string) error {
err = writeXML(f, n) err = writeXML(f, n)
fmt.Fprint(f, "\n") fmt.Fprint(f, "\n")
if err != nil { if err != nil {
return fmt.Errorf("Writing XML from LSF file %s failed: %w\n", filename, err) return fmt.Errorf("writing XML from LSF file %s failed: %w", filename, err)
} }
} }
return nil return nil
@ -115,10 +122,10 @@ func readLSF(filename string) (*lslib.Resource, error) {
err error err error
) )
f, err = os.Open(filename) f, err = os.Open(filename)
defer f.Close()
if err != nil { if err != nil {
return nil, err return nil, err
} }
defer f.Close()
l, err = lslib.ReadLSF(f) l, err = lslib.ReadLSF(f)
if err != nil { if err != nil {
@ -149,12 +156,7 @@ func marshalXML(l *lslib.Resource) (string, error) {
return n, nil return n, nil
} }
type strwr interface { func writeXML(f io.StringWriter, n string) error {
io.Writer
io.StringWriter
}
func writeXML(f strwr, n string) error {
var ( var (
err error err error
) )

View File

@ -5,29 +5,19 @@ import "errors"
type FileVersion uint32 type FileVersion uint32
const ( const (
/// <summary> // Initial version of the LSF format
/// Initial version of the LSF format
/// </summary>
VerInitial FileVersion = iota + 1 VerInitial FileVersion = iota + 1
/// <summary> // LSF version that added chunked compression for substreams
/// LSF version that added chunked compression for substreams
/// </summary>
VerChunkedCompress VerChunkedCompress
/// <summary> // LSF version that extended the node descriptors
/// LSF version that extended the node descriptors
/// </summary>
VerExtendedNodes VerExtendedNodes
/// <summary> // BG3 version, no changes found so far apart from version numbering
/// BG3 version, no changes found so far apart from version numbering
/// </summary>
VerBG3 VerBG3
/// <summary> // Latest version supported by this library
/// Latest version supported by this library
/// </summary>
MaxVersion = iota MaxVersion = iota
) )

64
lsb.go
View File

@ -21,13 +21,13 @@ type LSBHeader struct {
} }
type LSBRegion struct { type LSBRegion struct {
name string name string
offset offset uint32
} }
type IdentifierDictionary map[int]string type IdentifierDictionary map[int]string
func ReadLSBDictionary(r io.Reader, endianness binary.ByteOrder) (IdentifierDictionary, error) { func ReadLSBDictionary(r io.ReadSeeker, endianness binary.ByteOrder) (IdentifierDictionary, error) {
var ( var (
dict IdentifierDictionary dict IdentifierDictionary
size uint32 size uint32
@ -56,11 +56,12 @@ func ReadLSBDictionary(r io.Reader, endianness binary.ByteOrder) (IdentifierDict
if err != nil { if err != nil {
return dict, err return dict, err
} }
dict[key] = str dict[int(key)] = str
} }
return dict, nil
} }
func ReadLSBRegions(r io.Reader, d IdentifierDictionary, endianness binary.ByteOrder) (Resource, error) { func ReadLSBRegions(r io.ReadSeeker, d IdentifierDictionary, endianness binary.ByteOrder) (Resource, error) {
var ( var (
nodes []struct { nodes []struct {
node *Node node *Node
@ -72,9 +73,12 @@ func ReadLSBRegions(r io.Reader, d IdentifierDictionary, endianness binary.ByteO
err = binary.Read(r, endianness, &nodeCount) err = binary.Read(r, endianness, &nodeCount)
if err != nil { if err != nil {
return dict, err return Resource{}, err
} }
nodes = make([]struct{ Node, offset uint32 }, nodeCount) nodes = make([]struct {
node *Node
offset uint32
}, nodeCount)
for _, n := range nodes { for _, n := range nodes {
var ( var (
key uint32 key uint32
@ -82,7 +86,7 @@ func ReadLSBRegions(r io.Reader, d IdentifierDictionary, endianness binary.ByteO
) )
err = binary.Read(r, endianness, &key) err = binary.Read(r, endianness, &key)
if err != nil { if err != nil {
return dict, err return Resource{}, err
} }
n.node = new(Node) n.node = new(Node)
if n.node.Name, ok = d[int(key)]; !ok { if n.node.Name, ok = d[int(key)]; !ok {
@ -90,21 +94,21 @@ func ReadLSBRegions(r io.Reader, d IdentifierDictionary, endianness binary.ByteO
} }
err = binary.Read(r, endianness, &n.offset) err = binary.Read(r, endianness, &n.offset)
if err != nil { if err != nil {
return dict, err return Resource{}, err
} }
} }
// TODO: Sort // TODO: Sort
for _, n := range nodes { for _, n := range nodes {
var ( var (
key uint32 key uint32
attrCount uint32 attrCount uint32
childCount uint32 // childCount uint32
) )
// TODO: Check offset // TODO: Check offset
err = binary.Read(r, endianness, &key) err = binary.Read(r, endianness, &key)
if err != nil { if err != nil {
return dict, err return Resource{}, err
} }
// if keyV, ok := d[int(key)]; !ok { // if keyV, ok := d[int(key)]; !ok {
// return Resource{}, ErrKeyDoesNotMatch // return Resource{}, ErrKeyDoesNotMatch
@ -112,36 +116,39 @@ func ReadLSBRegions(r io.Reader, d IdentifierDictionary, endianness binary.ByteO
err = binary.Read(r, endianness, &attrCount) err = binary.Read(r, endianness, &attrCount)
if err != nil { if err != nil {
return dict, err return Resource{}, err
} }
n.node.Attributes = make([]NodeAttribute, int(attrCount)) n.node.Attributes = make([]NodeAttribute, int(attrCount))
err = binary.Read(r, endianness, &nodeCount) err = binary.Read(r, endianness, &nodeCount)
if err != nil { if err != nil {
return dict, err return Resource{}, err
} }
} }
return Resource{}, nil
} }
func readLSBAttribute(r io.Reader) (NodeAttribute, err) { func readLSBAttribute(r io.ReadSeeker, d IdentifierDictionary, endianness binary.ByteOrder) (NodeAttribute, error) {
var ( var (
key uint32 key uint32
name string name string
attrType uint32 attrType uint32
attr NodeAttribute attr NodeAttribute
err error
ok bool
) )
err = binary.Read(r, endianness, &key) err = binary.Read(r, endianness, &key)
if err != nil { if err != nil {
return dict, err return attr, err
} }
if name, ok = d[int(key)]; !ok { if name, ok = d[int(key)]; !ok {
return Resource{}, ErrInvalidNameKey return attr, ErrInvalidNameKey
} }
err = binary.Read(r, endianness, &attrType) err = binary.Read(r, endianness, &attrType)
if err != nil { if err != nil {
return dict, err return attr, err
} }
ReadLSBAttribute(r, name, DataType(attrType)) return ReadLSBAttr(r, name, DataType(attrType))
} }
func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, error) { func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, error) {
@ -153,7 +160,8 @@ func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, erro
Type: DT, Type: DT,
Name: name, Name: name,
} }
err error err error
length uint32
l log.Logger l log.Logger
pos int64 pos int64
@ -162,7 +170,7 @@ func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, erro
pos, err = r.Seek(0, io.SeekCurrent) pos, err = r.Seek(0, io.SeekCurrent)
switch DT { switch DT {
case DT_String, DT_Path, DT_FixedString, DT_LSString, DT_WString, DT_LSWString: case DTString, DTPath, DTFixedString, DTLSString, DTWString, DTLSWString:
var v string var v string
v, err = ReadCString(r, int(length)) v, err = ReadCString(r, int(length))
attr.Value = v attr.Value = v
@ -172,9 +180,9 @@ func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, erro
return attr, err return attr, err
case DT_TranslatedString: case DTTranslatedString:
var v TranslatedString var v TranslatedString
v, err = ReadTranslatedString(r, Version, EngineVersion) // v, err = ReadTranslatedString(r, Version, EngineVersion)
attr.Value = v attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
@ -182,9 +190,9 @@ func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, erro
return attr, err return attr, err
case DT_TranslatedFSString: case DTTranslatedFSString:
var v TranslatedFSString var v TranslatedFSString
v, err = ReadTranslatedFSString(r, Version) // v, err = ReadTranslatedFSString(r, Version)
attr.Value = v attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
@ -192,7 +200,7 @@ func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, erro
return attr, err return attr, err
case DT_ScratchBuffer: case DTScratchBuffer:
v := make([]byte, length) v := make([]byte, length)
_, err = r.Read(v) _, err = r.Read(v)
@ -204,6 +212,6 @@ func ReadLSBAttr(r io.ReadSeeker, name string, DT DataType) (NodeAttribute, erro
return attr, err return attr, err
default: default:
return ReadAttribute(r, name, DT, length, l) return ReadAttribute(r, name, DT, uint(length), l)
} }
} }

332
lsf.go
View File

@ -1,7 +1,6 @@
package lslib package lslib
import ( import (
"bufio"
"encoding/binary" "encoding/binary"
"fmt" "fmt"
"io" "io"
@ -52,64 +51,48 @@ func (f filter) Log(keyvals ...interface{}) error {
} }
type LSFHeader struct { type LSFHeader struct {
/// summary // LSOF file signature
/// LSOF file signature
/// /summary
Signature [4]byte Signature [4]byte
/// summary // Version of the LSOF file D:OS EE is version 1/2, D:OS 2 is version 3
/// Version of the LSOF file D:OS EE is version 1/2, D:OS 2 is version 3
/// /summary
Version FileVersion Version FileVersion
/// summary
/// Possibly version number? (major, minor, rev, build) // Possibly version number? (major, minor, rev, build)
/// /summary
EngineVersion uint32 EngineVersion uint32
/// summary
/// Total uncompressed size of the string hash table // Total uncompressed size of the string hash table
/// /summary
StringsUncompressedSize uint32 StringsUncompressedSize uint32
/// summary
/// Compressed size of the string hash table // Compressed size of the string hash table
/// /summary
StringsSizeOnDisk uint32 StringsSizeOnDisk uint32
/// summary
/// Total uncompressed size of the node list // Total uncompressed size of the node list
/// /summary
NodesUncompressedSize uint32 NodesUncompressedSize uint32
/// summary
/// Compressed size of the node list // Compressed size of the node list
/// /summary
NodesSizeOnDisk uint32 NodesSizeOnDisk uint32
/// summary
/// Total uncompressed size of the attribute list // Total uncompressed size of the attribute list
/// /summary
AttributesUncompressedSize uint32 AttributesUncompressedSize uint32
/// summary
/// Compressed size of the attribute list // Compressed size of the attribute list
/// /summary
AttributesSizeOnDisk uint32 AttributesSizeOnDisk uint32
/// summary
/// Total uncompressed size of the raw value buffer // Total uncompressed size of the raw value buffer
/// /summary
ValuesUncompressedSize uint32 ValuesUncompressedSize uint32
/// summary
/// Compressed size of the raw value buffer // Compressed size of the raw value buffer
/// /summary
ValuesSizeOnDisk uint32 ValuesSizeOnDisk uint32
/// summary // summary
/// Compression method and level used for the string, node, attribute and value buffers.
/// Uses the same format as packages (see BinUtils.MakeCompressionFlags) // Uses the same format as packages (see BinUtils.MakeCompressionFlags)
/// /summary
CompressionFlags byte CompressionFlags byte
/// summary
/// Possibly unused, always 0 // Possibly unused, always 0
/// /summary
Unknown2 byte Unknown2 byte
Unknown3 uint16 Unknown3 uint16
/// summary
/// Extended node/attribute format indicator, 0 for V2, 0/1 for V3 // Extended node/attribute format indicator, 0 for V2, 0/1 for V3
/// /summary
Extended uint32 Extended uint32
} }
@ -256,28 +239,24 @@ func (lsfh LSFHeader) IsCompressed() bool {
type NodeEntry struct { type NodeEntry struct {
Long bool Long bool
/// summary // summary
/// Name of this node
/// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain) // (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain)
/// /summary
NameHashTableIndex uint32 NameHashTableIndex uint32
/// summary // summary
/// Index of the first attribute of this node
/// (-1: node has no attributes) // (-1: node has no attributes)
/// /summary
FirstAttributeIndex int32 FirstAttributeIndex int32
/// summary // summary
/// Index of the parent node
/// (-1: this node is a root region) // (-1: this node is a root region)
/// /summary
ParentIndex int32 ParentIndex int32
/// summary // summary
/// Index of the next sibling of this node
/// (-1: this is the last node) // (-1: this is the last node)
/// /summary
NextSiblingIndex int32 NextSiblingIndex int32
} }
@ -378,65 +357,49 @@ func (ne NodeEntry) NameOffset() int {
return int(ne.NameHashTableIndex & 0xffff) return int(ne.NameHashTableIndex & 0xffff)
} }
/// summary // Processed node information for a node in the LSF file
/// Processed node information for a node in the LSF file
/// /summary
type NodeInfo struct { type NodeInfo struct {
/// summary // summary
/// Index of the parent node
/// (-1: this node is a root region) // (-1: this node is a root region)
/// /summary
ParentIndex int ParentIndex int
/// summary // Index into name hash table
/// Index into name hash table
/// /summary
NameIndex int NameIndex int
/// summary // Offset in hash chain
/// Offset in hash chain
/// /summary
NameOffset int NameOffset int
/// summary // summary
/// Index of the first attribute of this node
/// (-1: node has no attributes) // (-1: node has no attributes)
/// /summary
FirstAttributeIndex int FirstAttributeIndex int
} }
/// summary // attribute extension in the LSF file
/// attribute extension in the LSF file
/// /summary
type AttributeEntry struct { type AttributeEntry struct {
Long bool Long bool
/// summary // summary
/// Name of this attribute
/// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain) // (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain)
/// /summary
NameHashTableIndex uint32 NameHashTableIndex uint32
/// summary // summary
/// 6-bit LSB: Type of this attribute (see NodeAttribute.DataType)
/// 26-bit MSB: Length of this attribute // 26-bit MSB: Length of this attribute
/// /summary
TypeAndLength uint32 TypeAndLength uint32
/// summary // summary
/// Index of the node that this attribute belongs to
/// Note: These indexes are assigned seemingly arbitrarily, and are not neccessarily indices into the node list // Note: These indexes are assigned seemingly arbitrarily, and are not necessarily indices into the node list
/// /summary
NodeIndex int32 NodeIndex int32
/// summary // summary
/// Index of the node that this attribute belongs to
/// Note: These indexes are assigned seemingly arbitrarily, and are not neccessarily indices into the node list // Note: These indexes are assigned seemingly arbitrarily, and are not necessarily indices into the node list
/// /summary
NextAttributeIndex int32 NextAttributeIndex int32
/// <summary> // Absolute position of attribute value in the value stream
/// Absolute position of attribute value in the value stream
/// </summary>
Offset uint32 Offset uint32
} }
@ -529,30 +492,22 @@ func (ae *AttributeEntry) readLong(r io.ReadSeeker) error {
return nil return nil
} }
/// summary // Index into name hash table
/// Index into name hash table
/// /summary
func (ae AttributeEntry) NameIndex() int { func (ae AttributeEntry) NameIndex() int {
return int(ae.NameHashTableIndex >> 16) return int(ae.NameHashTableIndex >> 16)
} }
/// summary // Offset in hash chain
/// Offset in hash chain
/// /summary
func (ae AttributeEntry) NameOffset() int { func (ae AttributeEntry) NameOffset() int {
return int(ae.NameHashTableIndex & 0xffff) return int(ae.NameHashTableIndex & 0xffff)
} }
/// summary // Type of this attribute (see NodeAttribute.DataType)
/// Type of this attribute (see NodeAttribute.DataType)
/// /summary
func (ae AttributeEntry) TypeID() DataType { func (ae AttributeEntry) TypeID() DataType {
return DataType(ae.TypeAndLength & 0x3f) return DataType(ae.TypeAndLength & 0x3f)
} }
/// summary // Length of this attribute
/// Length of this attribute
/// /summary
func (ae AttributeEntry) Len() int { func (ae AttributeEntry) Len() int {
return int(ae.TypeAndLength >> 6) return int(ae.TypeAndLength >> 6)
} }
@ -560,35 +515,24 @@ func (ae AttributeEntry) Len() int {
type AttributeInfo struct { type AttributeInfo struct {
V2 bool V2 bool
/// summary // Index into name hash table
/// Index into name hash table
/// /summary
NameIndex int NameIndex int
/// summary
/// Offset in hash chain
/// /summary
NameOffset int
/// summary
/// Type of this attribute (see NodeAttribute.DataType)
/// /summary
TypeId DataType
/// summary
/// Length of this attribute
/// /summary
Length uint
/// summary
/// Absolute position of attribute data in the values section
/// /summary
DataOffset uint
/// summary
/// Index of the next attribute in this node
/// (-1: this is the last attribute)
/// /summary
NextAttributeIndex int
}
type LSFReader struct { // Offset in hash chain
data *bufio.Reader NameOffset int
// Type of this attribute (see NodeAttribute.DataType)
TypeID DataType
// Length of this attribute
Length uint
// Absolute position of attribute data in the values section
DataOffset uint
// summary
// (-1: this is the last attribute)
NextAttributeIndex int
} }
// extract to lsf package // extract to lsf package
@ -614,7 +558,7 @@ func ReadNames(r io.ReadSeeker) ([][]string, error) {
pos += int64(n) pos += int64(n)
names = make([][]string, int(numHashEntries)) names = make([][]string, int(numHashEntries))
for i, _ := range names { for i := range names {
var numStrings uint16 var numStrings uint16
@ -624,7 +568,7 @@ func ReadNames(r io.ReadSeeker) ([][]string, error) {
pos += int64(n) pos += int64(n)
var hash = make([]string, int(numStrings)) var hash = make([]string, int(numStrings))
for x, _ := range hash { for x := range hash {
var ( var (
nameLen uint16 nameLen uint16
name []byte name []byte
@ -686,10 +630,8 @@ func readNodeInfo(r io.ReadSeeker, longNodes bool) ([]NodeInfo, error) {
return nodes[:len(nodes)-1], err return nodes[:len(nodes)-1], err
} }
/// <summary> // Reads the attribute headers for the LSOF resource
/// Reads the attribute headers for the LSOF resource // <param name="s">Stream to read the attribute headers from</param>
/// </summary>
/// <param name="s">Stream to read the attribute headers from</param>
func readAttributeInfo(r io.ReadSeeker, long bool) []AttributeInfo { func readAttributeInfo(r io.ReadSeeker, long bool) []AttributeInfo {
// var rawAttributes = new List<AttributeEntryV2>(); // var rawAttributes = new List<AttributeEntryV2>();
@ -717,7 +659,7 @@ func readAttributeInfo(r io.ReadSeeker, long bool) []AttributeInfo {
resolved := AttributeInfo{ resolved := AttributeInfo{
NameIndex: attribute.NameIndex(), NameIndex: attribute.NameIndex(),
NameOffset: attribute.NameOffset(), NameOffset: attribute.NameOffset(),
TypeId: attribute.TypeID(), TypeID: attribute.TypeID(),
Length: uint(attribute.Len()), Length: uint(attribute.Len()),
DataOffset: dataOffset, DataOffset: dataOffset,
NextAttributeIndex: nextAttrIndex, NextAttributeIndex: nextAttrIndex,
@ -730,7 +672,7 @@ func readAttributeInfo(r io.ReadSeeker, long bool) []AttributeInfo {
attributes[indexOfLastAttr].NextAttributeIndex = index attributes[indexOfLastAttr].NextAttributeIndex = index
} }
// set the previous attribute of this node to the current attribute, we are done with it and at the end of the loop // set the previous attribute of this node to the current attribute, we are done with it and at the end of the loop
dataOffset += uint(resolved.Length) dataOffset += resolved.Length
prevAttributeRefs[int(attribute.NodeIndex)] = index prevAttributeRefs[int(attribute.NodeIndex)] = index
} }
@ -775,21 +717,17 @@ func (he HeaderError) Error() string {
func ReadLSF(r io.ReadSeeker) (Resource, error) { func ReadLSF(r io.ReadSeeker) (Resource, error) {
var ( var (
err error err error
/// summary
/// Static string hash map // Static string hash map
/// /summary
names [][]string names [][]string
/// summary
/// Preprocessed list of nodes (structures) // Preprocessed list of nodes (structures)
/// /summary
nodeInfo []NodeInfo nodeInfo []NodeInfo
/// summary
/// Preprocessed list of node attributes // Preprocessed list of node attributes
/// /summary
attributeInfo []AttributeInfo attributeInfo []AttributeInfo
/// summary
/// Node instances // Node instances
/// /summary
nodeInstances []*Node nodeInstances []*Node
) )
var ( var (
@ -799,7 +737,7 @@ func ReadLSF(r io.ReadSeeker) (Resource, error) {
) )
l = log.With(Logger, "component", "LS converter", "file type", "lsf", "part", "file") l = log.With(Logger, "component", "LS converter", "file type", "lsf", "part", "file")
pos, err = r.Seek(0, io.SeekCurrent) pos, err = r.Seek(0, io.SeekCurrent)
l.Log("member", "LSF header", "start position", pos) l.Log("member", "header", "start position", pos)
hdr := &LSFHeader{} hdr := &LSFHeader{}
err = hdr.Read(r) err = hdr.Read(r)
@ -922,10 +860,10 @@ func ReadLSF(r io.ReadSeeker) (Resource, error) {
} }
} }
res.Metadata.MajorVersion = (hdr.EngineVersion & 0xf0000000) >> 28 res.Metadata.Major = (hdr.EngineVersion & 0xf0000000) >> 28
res.Metadata.MinorVersion = (hdr.EngineVersion & 0xf000000) >> 24 res.Metadata.Minor = (hdr.EngineVersion & 0xf000000) >> 24
res.Metadata.Revision = (hdr.EngineVersion & 0xff0000) >> 16 res.Metadata.Revision = (hdr.EngineVersion & 0xff0000) >> 16
res.Metadata.BuildNumber = (hdr.EngineVersion & 0xffff) res.Metadata.Build = (hdr.EngineVersion & 0xffff)
// pretty.Log(res) // pretty.Log(res)
return res, nil return res, nil
@ -934,11 +872,11 @@ func ReadLSF(r io.ReadSeeker) (Resource, error) {
var valueStart int64 var valueStart int64
func ReadRegions(r io.ReadSeeker, names [][]string, nodeInfo []NodeInfo, attributeInfo []AttributeInfo, Version FileVersion, EngineVersion uint32) ([]*Node, error) { func ReadRegions(r io.ReadSeeker, names [][]string, nodeInfo []NodeInfo, attributeInfo []AttributeInfo, version FileVersion, engineVersion uint32) ([]*Node, error) {
NodeInstances := make([]*Node, 0, len(nodeInfo)) NodeInstances := make([]*Node, 0, len(nodeInfo))
for _, nodeInfo := range nodeInfo { for _, nodeInfo := range nodeInfo {
if nodeInfo.ParentIndex == -1 { if nodeInfo.ParentIndex == -1 {
region, err := ReadNode(r, nodeInfo, names, attributeInfo, Version, EngineVersion) region, err := ReadNode(r, nodeInfo, names, attributeInfo, version, engineVersion)
// pretty.Log(err, region) // pretty.Log(err, region)
@ -949,7 +887,7 @@ func ReadRegions(r io.ReadSeeker, names [][]string, nodeInfo []NodeInfo, attribu
return NodeInstances, err return NodeInstances, err
} }
} else { } else {
node, err := ReadNode(r, nodeInfo, names, attributeInfo, Version, EngineVersion) node, err := ReadNode(r, nodeInfo, names, attributeInfo, version, engineVersion)
// pretty.Log(err, node) // pretty.Log(err, node)
@ -965,7 +903,7 @@ func ReadRegions(r io.ReadSeeker, names [][]string, nodeInfo []NodeInfo, attribu
return NodeInstances, nil return NodeInstances, nil
} }
func ReadNode(r io.ReadSeeker, ni NodeInfo, names [][]string, attributeInfo []AttributeInfo, Version FileVersion, EngineVersion uint32) (Node, error) { func ReadNode(r io.ReadSeeker, ni NodeInfo, names [][]string, attributeInfo []AttributeInfo, version FileVersion, engineVersion uint32) (Node, error) {
var ( var (
node = Node{} node = Node{}
index = ni.FirstAttributeIndex index = ni.FirstAttributeIndex
@ -993,7 +931,7 @@ func ReadNode(r io.ReadSeeker, ni NodeInfo, names [][]string, attributeInfo []At
panic("shit") panic("shit")
} }
} }
v, err = ReadLSFAttribute(r, names[attribute.NameIndex][attribute.NameOffset], attribute.TypeId, attribute.Length, Version, EngineVersion) v, err = ReadLSFAttribute(r, names[attribute.NameIndex][attribute.NameOffset], attribute.TypeID, attribute.Length, version, engineVersion)
node.Attributes = append(node.Attributes, v) node.Attributes = append(node.Attributes, v)
if err != nil { if err != nil {
return node, err return node, err
@ -1005,13 +943,13 @@ func ReadNode(r io.ReadSeeker, ni NodeInfo, names [][]string, attributeInfo []At
return node, nil return node, nil
} }
func ReadLSFAttribute(r io.ReadSeeker, name string, DT DataType, length uint, Version FileVersion, EngineVersion uint32) (NodeAttribute, error) { func ReadLSFAttribute(r io.ReadSeeker, name string, dt DataType, length uint, version FileVersion, engineVersion uint32) (NodeAttribute, error) {
// LSF and LSB serialize the buffer types differently, so specialized // LSF and LSB serialize the buffer types differently, so specialized
// code is added to the LSB and LSf serializers, and the common code is // code is added to the LSB and LSf serializers, and the common code is
// available in BinUtils.ReadAttribute() // available in BinUtils.ReadAttribute()
var ( var (
attr = NodeAttribute{ attr = NodeAttribute{
Type: DT, Type: dt,
Name: name, Name: name,
} }
err error err error
@ -1022,8 +960,8 @@ func ReadLSFAttribute(r io.ReadSeeker, name string, DT DataType, length uint, Ve
l = log.With(Logger, "component", "LS converter", "file type", "lsf", "part", "attribute") l = log.With(Logger, "component", "LS converter", "file type", "lsf", "part", "attribute")
pos, err = r.Seek(0, io.SeekCurrent) pos, err = r.Seek(0, io.SeekCurrent)
switch DT { switch dt {
case DT_String, DT_Path, DT_FixedString, DT_LSString, DT_WString, DT_LSWString: case DTString, DTPath, DTFixedString, DTLSString, DTWString, DTLSWString:
var v string var v string
v, err = ReadCString(r, int(length)) v, err = ReadCString(r, int(length))
attr.Value = v attr.Value = v
@ -1033,9 +971,9 @@ func ReadLSFAttribute(r io.ReadSeeker, name string, DT DataType, length uint, Ve
return attr, err return attr, err
case DT_TranslatedString: case DTTranslatedString:
var v TranslatedString var v TranslatedString
v, err = ReadTranslatedString(r, Version, EngineVersion) v, err = ReadTranslatedString(r, version, engineVersion)
attr.Value = v attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
@ -1043,9 +981,9 @@ func ReadLSFAttribute(r io.ReadSeeker, name string, DT DataType, length uint, Ve
return attr, err return attr, err
case DT_TranslatedFSString: case DTTranslatedFSString:
var v TranslatedFSString var v TranslatedFSString
v, err = ReadTranslatedFSString(r, Version) v, err = ReadTranslatedFSString(r, version)
attr.Value = v attr.Value = v
l.Log("member", name, "read", length, "start position", pos, "value", attr.Value) l.Log("member", name, "read", length, "start position", pos, "value", attr.Value)
@ -1053,7 +991,7 @@ func ReadLSFAttribute(r io.ReadSeeker, name string, DT DataType, length uint, Ve
return attr, err return attr, err
case DT_ScratchBuffer: case DTScratchBuffer:
v := make([]byte, length) v := make([]byte, length)
_, err = r.Read(v) _, err = r.Read(v)
@ -1065,21 +1003,37 @@ func ReadLSFAttribute(r io.ReadSeeker, name string, DT DataType, length uint, Ve
return attr, err return attr, err
default: default:
return ReadAttribute(r, name, DT, length, l) return ReadAttribute(r, name, dt, length, l)
} }
} }
func ReadTranslatedString(r io.ReadSeeker, Version FileVersion, EngineVersion uint32) (TranslatedString, error) { func ReadTranslatedString(r io.ReadSeeker, version FileVersion, engineVersion uint32) (TranslatedString, error) {
var ( var (
str TranslatedString str TranslatedString
err error err error
) )
if Version >= VerBG3 || EngineVersion == 0x4000001d { if version >= VerBG3 || engineVersion == 0x4000001d {
// logger.Println("decoding bg3 data") // logger.Println("decoding bg3 data")
var version uint16 var version uint16
/*err =*/ binary.Read(r, binary.LittleEndian, &version) err = binary.Read(r, binary.LittleEndian, &version)
if err != nil {
return str, err
}
str.Version = version str.Version = version
err = binary.Read(r, binary.LittleEndian, &version)
if err != nil {
return str, err
}
if version == 0 {
str.Value, err = ReadCString(r, int(str.Version))
if err != nil {
return str, err
}
str.Version = 0
} else {
_, err = r.Seek(-2, io.SeekCurrent)
}
} else { } else {
str.Version = 0 str.Version = 0
@ -1098,7 +1052,6 @@ func ReadTranslatedString(r io.ReadSeeker, Version FileVersion, EngineVersion ui
if err != nil { if err != nil {
return str, err return str, err
} }
// logger.Printf("value length: %d value: %s read length: %d len of v: %d", vlength, v, n, len(v))
str.Value = string(v) str.Value = string(v)
} }
@ -1115,15 +1068,18 @@ func ReadTranslatedString(r io.ReadSeeker, Version FileVersion, EngineVersion ui
return str, nil return str, nil
} }
func ReadTranslatedFSString(r io.ReadSeeker, Version FileVersion) (TranslatedFSString, error) { func ReadTranslatedFSString(r io.ReadSeeker, version FileVersion) (TranslatedFSString, error) {
var ( var (
str = TranslatedFSString{} str = TranslatedFSString{}
err error err error
) )
if Version >= VerBG3 { if version >= VerBG3 {
var version uint16 var version uint16
/*err =*/ binary.Read(r, binary.LittleEndian, &version) err = binary.Read(r, binary.LittleEndian, &version)
if err != nil {
return str, err
}
str.Version = version str.Version = version
} else { } else {
str.Version = 0 str.Version = 0
@ -1132,8 +1088,10 @@ func ReadTranslatedFSString(r io.ReadSeeker, Version FileVersion) (TranslatedFSS
length int32 length int32
) )
/*err =*/ err = binary.Read(r, binary.LittleEndian, &length)
binary.Read(r, binary.LittleEndian, &length) if err != nil {
return str, err
}
str.Value, err = ReadCString(r, int(length)) str.Value, err = ReadCString(r, int(length))
if err != nil { if err != nil {
return str, err return str, err
@ -1169,7 +1127,7 @@ func ReadTranslatedFSString(r io.ReadSeeker, Version FileVersion) (TranslatedFSS
return str, err return str, err
} }
arg.String, err = ReadTranslatedFSString(r, Version) arg.String, err = ReadTranslatedFSString(r, version)
if err != nil { if err != nil {
return str, err return str, err
} }

View File

@ -8,11 +8,11 @@ import (
type LSMetadata struct { type LSMetadata struct {
//public const uint CurrentMajorVersion = 33; //public const uint CurrentMajorVersion = 33;
Timestamp uint64 `xml:"-"` Timestamp uint64 `xml:"-"`
MajorVersion uint32 `xml:"major,attr"` Major uint32 `xml:"major,attr"`
MinorVersion uint32 `xml:"minor,attr"` Minor uint32 `xml:"minor,attr"`
Revision uint32 `xml:"revision,attr"` Revision uint32 `xml:"revision,attr"`
BuildNumber uint32 `xml:"build,attr"` Build uint32 `xml:"build,attr"`
} }
type format struct { type format struct {