diff --git a/parse_lax.go b/parse_lax.go index d4cd784..d4f4bcc 100644 --- a/parse_lax.go +++ b/parse_lax.go @@ -190,10 +190,15 @@ func normalizeTokens(tokens []tokenForNorm) (string, error) { // normalizeLicenseWords takes a slice of words that should form a license name // and tries to normalize them. It uses greedy matching from the start. +const maxLicenseWords = 256 + func normalizeLicenseWords(words []string) (string, error) { if len(words) == 0 { return "", ErrMissingOperand } + if len(words) > maxLicenseWords { + return "", &LicenseError{License: words[0], Err: ErrInvalidLicenseID} + } // Check for special values, LicenseRef or DocumentRef first if len(words) == 1 { diff --git a/parse_lax_test.go b/parse_lax_test.go index 59614d1..0119ecf 100644 --- a/parse_lax_test.go +++ b/parse_lax_test.go @@ -1,6 +1,7 @@ package spdx import ( + "strings" "testing" ) @@ -146,3 +147,16 @@ func BenchmarkParseLax(b *testing.B) { } } } + +func TestNormalizeLicenseWordsCapped(t *testing.T) { + words := make([]string, 300) + for i := range words { + words[i] = "word" + } + input := strings.Join(words, " ") + + _, err := ParseLax(input) + if err == nil { + t.Error("expected error for input with too many words") + } +}