mirror of
https://github.com/geoffleyland/lua-csv.git
synced 2024-11-23 01:34:19 +00:00
Whoops! Trying to find a unicode BOM at the start of a file with an anchored pattern when file_buffer:find doesn't understand anchoring lead to reading the whole file and running out of memory right at the start. Use :sub to check the first few characters of the file
This commit is contained in:
parent
1edff5f4ef
commit
54a7bb2221
@ -276,11 +276,11 @@ local function separated_values_iterator(buffer, parameters)
|
||||
|
||||
|
||||
-- Is there some kind of Unicode BOM here?
|
||||
if field_find("^\239\187\191") then -- UTF-8
|
||||
if field_sub(1, 3) == "\239\187\191" then -- UTF-8
|
||||
advance(3)
|
||||
elseif field_find("^\254\255") then -- UTF-16 big-endian
|
||||
elseif field_sub(1, 2) == "\254\255" then -- UTF-16 big-endian
|
||||
advance(2)
|
||||
elseif field_find("^\255\254") then -- UTF-16 little-endian
|
||||
elseif field_sub(1, 2) == "\255\254" then -- UTF-16 little-endian
|
||||
advance(2)
|
||||
end
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user