mirror of
https://github.com/lifestorm/wnsrc.git
synced 2025-12-17 21:53:46 +03:00
Upload
This commit is contained in:
123
lua/pac3/libraries/urlobj/cache.lua
Normal file
123
lua/pac3/libraries/urlobj/cache.lua
Normal file
@@ -0,0 +1,123 @@
|
||||
--[[
|
||||
| This file was obtained through the combined efforts
|
||||
| of Madbluntz & Plymouth Antiquarian Society.
|
||||
|
|
||||
| Credits: lifestorm, Gregory Wayne Rossel JR.,
|
||||
| Maloy, DrPepper10 @ RIP, Atle!
|
||||
|
|
||||
| Visit for more: https://plymouth.thetwilightzone.ru/
|
||||
--]]
|
||||
|
||||
local crypto = include("pac3/libraries/urlobj/crypto.lua")
|
||||
|
||||
local CACHE = {}
|
||||
|
||||
local function CreateCache(cacheId)
|
||||
local cache = {}
|
||||
setmetatable(cache, { __index = CACHE })
|
||||
|
||||
cache:Initialize(cacheId)
|
||||
|
||||
return cache
|
||||
end
|
||||
|
||||
function CACHE:Initialize(cacheId)
|
||||
self.Version = 3 -- Update this if the crypto library changes
|
||||
|
||||
self.Path = "pac3_cache/" .. string.lower(cacheId)
|
||||
|
||||
file.CreateDir(self.Path)
|
||||
end
|
||||
|
||||
function CACHE:AddItem(itemId, data)
|
||||
local hash = self:GetItemIdHash(itemId)
|
||||
local path = self.Path .. "/" .. hash .. ".txt"
|
||||
local key = self:GetItemIdEncryptionKey(itemId)
|
||||
|
||||
-- Version
|
||||
local f = file.Open(path, "wb", "DATA")
|
||||
if not f then return end
|
||||
f:WriteLong(self.Version)
|
||||
|
||||
-- Header
|
||||
local compressedItemId = util.Compress(itemId)
|
||||
local entryItemId = crypto.EncryptString(compressedItemId, key)
|
||||
f:WriteLong(#entryItemId)
|
||||
f:Write(entryItemId, #entryItemId)
|
||||
|
||||
-- Data
|
||||
local compressedData = util.Compress(data)
|
||||
data = crypto.EncryptString(compressedData, key)
|
||||
f:WriteLong(#data)
|
||||
f:Write(data, #data)
|
||||
|
||||
f:Close()
|
||||
end
|
||||
|
||||
function CACHE:Clear()
|
||||
for _, fileName in ipairs(file.Find(self.Path .. "/*", "DATA")) do
|
||||
file.Delete(self.Path .. "/" .. fileName)
|
||||
end
|
||||
end
|
||||
|
||||
function CACHE:ClearBefore(time)
|
||||
for _, fileName in ipairs(file.Find(self.Path .. "/*", "DATA")) do
|
||||
if file.Time(self.Path .. "/" .. fileName, "DATA") < time then
|
||||
file.Delete(self.Path .. "/" .. fileName)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function CACHE:ContainsItem(itemId)
|
||||
return self:GetItem(itemId) ~= nil
|
||||
end
|
||||
|
||||
function CACHE:GetItem(itemId)
|
||||
local hash = self:GetItemIdHash(itemId)
|
||||
local path = self.Path .. "/" .. hash .. ".txt"
|
||||
|
||||
if not file.Exists(path, "DATA") then return nil end
|
||||
|
||||
local f = file.Open(path, "rb", "DATA")
|
||||
if not f then return nil end
|
||||
|
||||
local key = self:GetItemIdEncryptionKey(itemId)
|
||||
|
||||
-- Version
|
||||
local version = f:ReadLong()
|
||||
if version ~= self.Version then
|
||||
f:Close()
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Header
|
||||
local entryItemIdLength = f:ReadLong()
|
||||
local entryItemId = crypto.DecryptString(f:Read(entryItemIdLength), key)
|
||||
entryItemId = util.Decompress(entryItemId)
|
||||
|
||||
if itemId ~= entryItemId then
|
||||
f:Close()
|
||||
return nil
|
||||
end
|
||||
|
||||
-- Data
|
||||
local dataLength = f:ReadLong()
|
||||
local data = f:Read(dataLength, key)
|
||||
|
||||
f:Close()
|
||||
|
||||
data = crypto.DecryptString(data, key)
|
||||
data = util.Decompress(data)
|
||||
|
||||
return data
|
||||
end
|
||||
|
||||
function CACHE:GetItemIdEncryptionKey(itemId)
|
||||
return crypto.GenerateKey(string.reverse(itemId))
|
||||
end
|
||||
|
||||
function CACHE:GetItemIdHash(itemId)
|
||||
return string.format("%08x", tonumber(util.CRC(itemId)))
|
||||
end
|
||||
|
||||
return CreateCache
|
||||
505
lua/pac3/libraries/urlobj/crypto.lua
Normal file
505
lua/pac3/libraries/urlobj/crypto.lua
Normal file
@@ -0,0 +1,505 @@
|
||||
--[[
|
||||
| This file was obtained through the combined efforts
|
||||
| of Madbluntz & Plymouth Antiquarian Society.
|
||||
|
|
||||
| Credits: lifestorm, Gregory Wayne Rossel JR.,
|
||||
| Maloy, DrPepper10 @ RIP, Atle!
|
||||
|
|
||||
| Visit for more: https://plymouth.thetwilightzone.ru/
|
||||
--]]
|
||||
|
||||
local crypto = {}
|
||||
|
||||
crypto.UInt8BlockSize = 64
|
||||
crypto.UInt32BlockSize = 16
|
||||
|
||||
crypto.KeySize = crypto.UInt32BlockSize
|
||||
|
||||
local bit_band = bit.band
|
||||
local bit_bxor = bit.bxor
|
||||
local bit_rshift = bit.rshift
|
||||
local math_ceil = math.ceil
|
||||
local math_floor = math.floor
|
||||
local math_random = math.random
|
||||
local string_byte = string.byte
|
||||
local string_char = string.char
|
||||
local string_sub = string.sub
|
||||
local table_concat = table.concat
|
||||
|
||||
-- byteCharacters [i] is faster than string_char (i)
|
||||
local byteCharacters1 = {}
|
||||
local byteCharacters2 = {}
|
||||
local byteCharacters = byteCharacters1
|
||||
for i = 0, 255 do byteCharacters1 [i] = string_char (i) end
|
||||
for uint80 = 0, 255 do
|
||||
for uint81 = 0, 255 do
|
||||
byteCharacters2 [uint80 + uint81 * 256] = string_char (uint80, uint81)
|
||||
end
|
||||
end
|
||||
|
||||
function crypto.GenerateKey (seed, length)
|
||||
length = length or crypto.KeySize
|
||||
|
||||
if isstring (seed) then
|
||||
-- LOL ONLY 32 BITS OF ENTROPY
|
||||
seed = tonumber (util.CRC (seed))
|
||||
end
|
||||
|
||||
if seed then
|
||||
math.randomseed (seed)
|
||||
end
|
||||
|
||||
return crypto.GenerateRandomUInt32Array (length)
|
||||
end
|
||||
|
||||
-- Encrypts a string
|
||||
function crypto.EncryptString (inputString, keyArray)
|
||||
local inputArray = crypto.StringToUInt32Array (inputString)
|
||||
inputArray = crypto.PadUInt32Array (inputArray)
|
||||
|
||||
local outputArray = {}
|
||||
outputArray = crypto.GenerateRandomUInt32Array (crypto.UInt32BlockSize, outputArray)
|
||||
|
||||
-- I have no idea either
|
||||
local keyArray = crypto.CloneArray (keyArray)
|
||||
keyArray = crypto.AppendArray (keyArray, keyArray)
|
||||
keyArray = crypto.AppendArray (keyArray, keyArray)
|
||||
keyArray = crypto.AppendArray (keyArray, keyArray)
|
||||
keyArray = crypto.AppendArray (keyArray, keyArray)
|
||||
keyArray = crypto.AppendArray (keyArray, keyArray)
|
||||
keyArray = crypto.AppendArray (keyArray, keyArray)
|
||||
|
||||
crypto.XorInt32Arrays (outputArray, 1, keyArray, 1, crypto.UInt32BlockSize, outputArray, 1)
|
||||
|
||||
local inputArrayLength = #inputArray
|
||||
local inputEndIndex = #inputArray
|
||||
|
||||
inputEndIndex = inputEndIndex - ((inputArrayLength / crypto.UInt32BlockSize) % 64) * crypto.UInt32BlockSize
|
||||
|
||||
local inputIndex = 1
|
||||
while inputIndex <= inputEndIndex do
|
||||
crypto.XorInt32Arrays3 (
|
||||
inputArray, inputIndex,
|
||||
outputArray, inputIndex,
|
||||
keyArray, 1,
|
||||
crypto.UInt32BlockSize * 64,
|
||||
outputArray, crypto.UInt32BlockSize + inputIndex
|
||||
)
|
||||
|
||||
inputIndex = inputIndex + crypto.UInt32BlockSize * 64
|
||||
end
|
||||
|
||||
-- Remainder
|
||||
inputEndIndex = #inputArray
|
||||
while inputIndex <= inputEndIndex do
|
||||
crypto.XorInt32Arrays3 (
|
||||
inputArray, inputIndex,
|
||||
outputArray, inputIndex,
|
||||
keyArray, 1,
|
||||
crypto.UInt32BlockSize,
|
||||
outputArray, crypto.UInt32BlockSize + inputIndex
|
||||
)
|
||||
|
||||
inputIndex = inputIndex + crypto.UInt32BlockSize
|
||||
end
|
||||
|
||||
local outputString = crypto.Int32ArrayToString (outputArray)
|
||||
return outputString
|
||||
end
|
||||
|
||||
-- Decrypts a string
|
||||
function crypto.DecryptString (inputString, keyArray)
|
||||
local inputArray = crypto.StringToUInt32Array (inputString)
|
||||
|
||||
local inputIndex = #inputArray - crypto.UInt32BlockSize + 1
|
||||
while inputIndex > crypto.UInt32BlockSize do
|
||||
crypto.XorInt32Arrays3 (
|
||||
inputArray, inputIndex,
|
||||
inputArray, inputIndex - crypto.UInt32BlockSize,
|
||||
keyArray, 1,
|
||||
crypto.UInt32BlockSize,
|
||||
inputArray, inputIndex
|
||||
)
|
||||
|
||||
inputIndex = inputIndex - crypto.UInt32BlockSize
|
||||
end
|
||||
|
||||
crypto.XorInt32Arrays (inputArray, 1, keyArray, 1, crypto.UInt32BlockSize, inputArray, 1)
|
||||
|
||||
inputArray = crypto.UnpadInt32Array (inputArray)
|
||||
local outputArray = inputArray
|
||||
|
||||
local outputString = crypto.Int32ArrayToString (outputArray, crypto.UInt32BlockSize + 1)
|
||||
return outputString
|
||||
end
|
||||
|
||||
-- Pads an array in place
|
||||
function crypto.PadUInt8Array (array)
|
||||
local targetLength = math_ceil (#array / crypto.UInt8BlockSize) * crypto.UInt8BlockSize
|
||||
if targetLength == #array then
|
||||
targetLength = targetLength + crypto.UInt8BlockSize
|
||||
end
|
||||
|
||||
array [#array + 1] = 0xFF
|
||||
for i = #array + 1, targetLength do
|
||||
array [i] = 0x00
|
||||
end
|
||||
|
||||
array.n = #array
|
||||
|
||||
return array
|
||||
end
|
||||
|
||||
-- Pads an array in place
|
||||
function crypto.PadUInt32Array (array)
|
||||
if array.n % 4 == 0 then
|
||||
array [#array + 1] = 0x000000FF
|
||||
elseif array.n % 4 == 1 then
|
||||
array [#array] = array [#array] + 0x0000FF00
|
||||
elseif array.n % 4 == 2 then
|
||||
array [#array] = array [#array] + 0x00FF0000
|
||||
elseif array.n % 4 == 3 then
|
||||
array [#array] = array [#array] + 0xFF000000
|
||||
end
|
||||
|
||||
local targetLength = math_ceil (#array / crypto.UInt32BlockSize) * crypto.UInt32BlockSize
|
||||
for i = #array + 1, targetLength do
|
||||
array [i] = 0x00000000
|
||||
end
|
||||
|
||||
array.n = #array * 4
|
||||
|
||||
return array
|
||||
end
|
||||
|
||||
-- Unpads an array in place
|
||||
function crypto.UnpadUInt8Array (array)
|
||||
for i = #array, 1, -1 do
|
||||
if array [i] ~= 0x00 then break end
|
||||
|
||||
array [i] = nil
|
||||
end
|
||||
|
||||
if array [#array] == 0xFF then
|
||||
array [#array] = nil
|
||||
end
|
||||
|
||||
array.n = #array
|
||||
|
||||
return array
|
||||
end
|
||||
|
||||
-- Unpads an array in place
|
||||
function crypto.UnpadUInt32Array (array)
|
||||
return crypto.UnpadInt32Array (array)
|
||||
end
|
||||
|
||||
-- Unpads an array in place
|
||||
function crypto.UnpadInt32Array (array)
|
||||
for i = #array, 1, -1 do
|
||||
if array [i] ~= 0x00000000 then break end
|
||||
|
||||
array [i] = nil
|
||||
end
|
||||
|
||||
array.n = #array * 4
|
||||
|
||||
if array [#array] < 0 then
|
||||
array [#array] = array [#array] + 4294967296
|
||||
end
|
||||
|
||||
if array [#array] - 0xFF000000 >= 0 then
|
||||
array [#array] = array [#array] - 0xFF000000
|
||||
array.n = array.n - 1
|
||||
elseif array [#array] - 0x00FF0000 >= 0 then
|
||||
array [#array] = array [#array] - 0x00FF0000
|
||||
array.n = array.n - 2
|
||||
elseif array [#array] - 0x0000FF00 >= 0 then
|
||||
array [#array] = array [#array] - 0x0000FF00
|
||||
array.n = array.n - 3
|
||||
elseif array [#array] - 0x000000FF >= 0 then
|
||||
array [#array] = nil
|
||||
array.n = array.n - 4
|
||||
end
|
||||
|
||||
return array
|
||||
end
|
||||
|
||||
-- Array operations
|
||||
-- Generates a random array of uint8s
|
||||
function crypto.GenerateRandomUInt8Array (length, out)
|
||||
out = out or {}
|
||||
|
||||
for i = 1, length do
|
||||
out [#out + 1] = math_random (0, 0xFF)
|
||||
end
|
||||
|
||||
return out
|
||||
end
|
||||
|
||||
-- Generates a random array of uint32s
|
||||
function crypto.GenerateRandomUInt32Array (length, out)
|
||||
out = out or {}
|
||||
|
||||
for i = 1, length do
|
||||
out [#out + 1] = math_random (0, 0xFFFFFFFF)
|
||||
end
|
||||
|
||||
return out
|
||||
end
|
||||
|
||||
-- Appends an array in place
|
||||
function crypto.AppendArray (array, array1)
|
||||
local array1Length = #array1
|
||||
|
||||
for i = 1, array1Length do
|
||||
array [#array + 1] = array1 [i]
|
||||
end
|
||||
|
||||
return array
|
||||
end
|
||||
|
||||
-- Clones an array
|
||||
function crypto.CloneArray (array, out)
|
||||
out = out or {}
|
||||
|
||||
for i = 1, #array do
|
||||
out [i] = array [i]
|
||||
end
|
||||
|
||||
return out
|
||||
end
|
||||
|
||||
-- Truncates an array in place
|
||||
function crypto.TruncateArray (array, endIndex)
|
||||
for i = endIndex + 1, #array do
|
||||
array [i] = nil
|
||||
end
|
||||
|
||||
return array
|
||||
end
|
||||
|
||||
-- Xors an array with another
|
||||
function crypto.XorArrays (array1, array1StartIndex, array2, array2StartIndex, length, out, outStartIndex)
|
||||
return crypto.XorArrays2 (array1, array1StartIndex, array2, array2StartIndex, length, out, outStartIndex)
|
||||
end
|
||||
|
||||
function crypto.XorArrays2 (array1, array1StartIndex, array2, array2StartIndex, length, out, outStartIndex)
|
||||
out = out or {}
|
||||
|
||||
array1StartIndex = array1StartIndex or 1
|
||||
array2StartIndex = array2StartIndex or 1
|
||||
outStartIndex = outStartIndex or 1
|
||||
|
||||
local array2Index = array2StartIndex
|
||||
local outputIndex = outStartIndex
|
||||
local array1EndIndex = array1StartIndex + length - 1
|
||||
for array1Index = array1StartIndex, array1EndIndex do
|
||||
out [outputIndex] = bit_bxor (array1 [array1Index], array2 [array2Index])
|
||||
|
||||
array2Index = array2Index + 1
|
||||
outputIndex = outputIndex + 1
|
||||
end
|
||||
|
||||
return out
|
||||
end
|
||||
|
||||
function crypto.XorArrays3 (array1, array1StartIndex, array2, array2StartIndex, array3, array3StartIndex, length, out, outStartIndex)
|
||||
out = out or {}
|
||||
|
||||
array1StartIndex = array1StartIndex or 1
|
||||
array2StartIndex = array2StartIndex or 1
|
||||
array3StartIndex = array3StartIndex or 1
|
||||
outStartIndex = outStartIndex or 1
|
||||
|
||||
local array2Index = array2StartIndex
|
||||
local array3Index = array3StartIndex
|
||||
local outputIndex = outStartIndex
|
||||
local array1EndIndex = array1StartIndex + length - 1
|
||||
for array1Index = array1StartIndex, array1EndIndex do
|
||||
out [outputIndex] = bit_bxor (array1 [array1Index], array2 [array2Index], array3 [array3Index])
|
||||
|
||||
array2Index = array2Index + 1
|
||||
array3Index = array3Index + 1
|
||||
outputIndex = outputIndex + 1
|
||||
end
|
||||
|
||||
return out
|
||||
end
|
||||
|
||||
-- Xors an array with another
|
||||
function crypto.XorUInt8Arrays (array1, array1StartIndex, array2, array2StartIndex, length, out, outStartIndex)
|
||||
return crypto.XorArrays2 (array1, array1StartIndex, array2, array2StartIndex, length, out, outStartIndex)
|
||||
end
|
||||
|
||||
function crypto.XorUInt8Arrays2 (array1, array1StartIndex, array2, array2StartIndex, length, out, outStartIndex)
|
||||
return crypto.XorArrays2 (array1, array1StartIndex, array2, array2StartIndex, length, out, outStartIndex)
|
||||
end
|
||||
|
||||
function crypto.XorUInt8Arrays3 (array1, array1StartIndex, array2, array2StartIndex, array3, array3StartIndex, length, out, outStartIndex)
|
||||
return crypto.XorArrays3 (array1, array1StartIndex, array2, array2StartIndex, array3, array3StartIndex, length, out, outStartIndex)
|
||||
end
|
||||
|
||||
-- Xors an array with another
|
||||
function crypto.XorInt32Arrays (array1, array1StartIndex, array2, array2StartIndex, length, out, outStartIndex)
|
||||
return crypto.XorArrays2 (array1, array1StartIndex, array2, array2StartIndex, length, out, outStartIndex)
|
||||
end
|
||||
|
||||
function crypto.XorInt32Arrays2 (array1, array1StartIndex, array2, array2StartIndex, length, out, outStartIndex)
|
||||
return crypto.XorArrays2 (array1, array1StartIndex, array2, array2StartIndex, length, out, outStartIndex)
|
||||
end
|
||||
|
||||
function crypto.XorInt32Arrays3 (array1, array1StartIndex, array2, array2StartIndex, array3, array3StartIndex, length, out, outStartIndex)
|
||||
return crypto.XorArrays3 (array1, array1StartIndex, array2, array2StartIndex, array3, array3StartIndex, length, out, outStartIndex)
|
||||
end
|
||||
|
||||
-- Converts a string to an array of uint8s
|
||||
function crypto.StringToUInt8Array (str, out)
|
||||
out = out or {}
|
||||
out.n = #str
|
||||
|
||||
-- ARE WE FAST YET?
|
||||
for i = 1, #str, 64 do
|
||||
out [i + 0], out [i + 1], out [i + 2], out [i + 3],
|
||||
out [i + 4], out [i + 5], out [i + 6], out [i + 7],
|
||||
out [i + 8], out [i + 9], out [i + 10], out [i + 11],
|
||||
out [i + 12], out [i + 13], out [i + 14], out [i + 15],
|
||||
out [i + 16], out [i + 17], out [i + 18], out [i + 19],
|
||||
out [i + 20], out [i + 21], out [i + 22], out [i + 23],
|
||||
out [i + 24], out [i + 25], out [i + 26], out [i + 27],
|
||||
out [i + 28], out [i + 29], out [i + 30], out [i + 31],
|
||||
out [i + 32], out [i + 33], out [i + 34], out [i + 35],
|
||||
out [i + 36], out [i + 37], out [i + 38], out [i + 39],
|
||||
out [i + 40], out [i + 41], out [i + 42], out [i + 43],
|
||||
out [i + 44], out [i + 45], out [i + 46], out [i + 47],
|
||||
out [i + 48], out [i + 49], out [i + 50], out [i + 51],
|
||||
out [i + 52], out [i + 53], out [i + 54], out [i + 55],
|
||||
out [i + 56], out [i + 57], out [i + 58], out [i + 59],
|
||||
out [i + 60], out [i + 61], out [i + 62], out [i + 63] = string_byte (str, i, i + 63)
|
||||
end
|
||||
|
||||
out = crypto.TruncateArray (out, #str)
|
||||
|
||||
return out
|
||||
end
|
||||
|
||||
-- Converts an array of uint8s to a string destructively
|
||||
function crypto.UInt8ArrayToString (array, startIndex)
|
||||
startIndex = startIndex or 1
|
||||
|
||||
-- Process pairs of uint8s
|
||||
local length = #array - startIndex + 1
|
||||
local endIndex = #array
|
||||
if length % 2 == 1 then endIndex = endIndex - 1 end
|
||||
|
||||
local j = startIndex
|
||||
for i = startIndex, endIndex, 2 do
|
||||
array [j] = byteCharacters2 [array [i] + array [i + 1] * 256]
|
||||
j = j + 1
|
||||
end
|
||||
|
||||
-- Process remaining uint8 if there is one
|
||||
if length % 2 == 1 then
|
||||
array [j] = byteCharacters [array [#array]]
|
||||
j = j + 1
|
||||
end
|
||||
|
||||
return table_concat (array, nil, startIndex, j - 1)
|
||||
end
|
||||
|
||||
local oneOver64 = 1 / 64
|
||||
-- Converts a string to an array of uint32s
|
||||
function crypto.StringToUInt32Array (str, out)
|
||||
out = out or {}
|
||||
out.n = #str
|
||||
|
||||
local fullChunkCount = math_floor (#str * oneOver64)
|
||||
local fullChunkCountMinusOne = fullChunkCount - 1
|
||||
for i = 0, fullChunkCountMinusOne do
|
||||
local uint80, uint81, uint82, uint83,
|
||||
uint84, uint85, uint86, uint87,
|
||||
uint88, uint89, uint810, uint811,
|
||||
uint812, uint813, uint814, uint815,
|
||||
uint816, uint817, uint818, uint819,
|
||||
uint820, uint821, uint822, uint823,
|
||||
uint824, uint825, uint826, uint827,
|
||||
uint828, uint829, uint830, uint831,
|
||||
uint832, uint833, uint834, uint835,
|
||||
uint836, uint837, uint838, uint839,
|
||||
uint840, uint841, uint842, uint843,
|
||||
uint844, uint845, uint846, uint847,
|
||||
uint848, uint849, uint850, uint851,
|
||||
uint852, uint853, uint854, uint855,
|
||||
uint856, uint857, uint858, uint859,
|
||||
uint860, uint861, uint862, uint863 = string_byte (str, i * 64 + 1, i * 64 + 64)
|
||||
|
||||
out [i * 16 + 1] = uint80 + uint81 * 256 + uint82 * 65536 + uint83 * 16777216
|
||||
out [i * 16 + 2] = uint84 + uint85 * 256 + uint86 * 65536 + uint87 * 16777216
|
||||
out [i * 16 + 3] = uint88 + uint89 * 256 + uint810 * 65536 + uint811 * 16777216
|
||||
out [i * 16 + 4] = uint812 + uint813 * 256 + uint814 * 65536 + uint815 * 16777216
|
||||
out [i * 16 + 5] = uint816 + uint817 * 256 + uint818 * 65536 + uint819 * 16777216
|
||||
out [i * 16 + 6] = uint820 + uint821 * 256 + uint822 * 65536 + uint823 * 16777216
|
||||
out [i * 16 + 7] = uint824 + uint825 * 256 + uint826 * 65536 + uint827 * 16777216
|
||||
out [i * 16 + 8] = uint828 + uint829 * 256 + uint830 * 65536 + uint831 * 16777216
|
||||
out [i * 16 + 9] = uint832 + uint833 * 256 + uint834 * 65536 + uint835 * 16777216
|
||||
out [i * 16 + 10] = uint836 + uint837 * 256 + uint838 * 65536 + uint839 * 16777216
|
||||
out [i * 16 + 11] = uint840 + uint841 * 256 + uint842 * 65536 + uint843 * 16777216
|
||||
out [i * 16 + 12] = uint844 + uint845 * 256 + uint846 * 65536 + uint847 * 16777216
|
||||
out [i * 16 + 13] = uint848 + uint849 * 256 + uint850 * 65536 + uint851 * 16777216
|
||||
out [i * 16 + 14] = uint852 + uint853 * 256 + uint854 * 65536 + uint855 * 16777216
|
||||
out [i * 16 + 15] = uint856 + uint857 * 256 + uint858 * 65536 + uint859 * 16777216
|
||||
out [i * 16 + 16] = uint860 + uint861 * 256 + uint862 * 65536 + uint863 * 16777216
|
||||
end
|
||||
|
||||
if #str % 64 ~= 0 then
|
||||
local startIndex = #str - #str % 64 + 1
|
||||
for i = startIndex, #str, 4 do
|
||||
local uint80, uint81, uint82, uint83 = string_byte (str, i, i + 3)
|
||||
uint80, uint81, uint82, uint83 = uint80 or 0, uint81 or 0, uint82 or 0, uint83 or 0
|
||||
out [#out + 1] = uint80 + uint81 * 256 + uint82 * 65536 + uint83 * 16777216
|
||||
end
|
||||
end
|
||||
|
||||
out = crypto.TruncateArray (out, math_ceil (#str * 0.25))
|
||||
|
||||
return out
|
||||
end
|
||||
|
||||
-- Converts an array of int32s to a string
|
||||
local bit = bit
|
||||
local oneOver65536 = 1 / 65536
|
||||
function crypto.Int32ArrayToString (array, startIndex)
|
||||
startIndex = startIndex or 1
|
||||
|
||||
local length = (array.n or (#array * 4)) - (startIndex - 1) * 4
|
||||
|
||||
local t = {}
|
||||
for i = startIndex, #array do
|
||||
local uint32 = array [i]
|
||||
local uint80 = uint32 % 256 uint32 = uint32 - uint80
|
||||
local uint81 = uint32 % 65536 uint32 = uint32 - uint81 uint32 = uint32 * oneOver65536
|
||||
local uint82 = uint32 % 256 uint32 = uint32 - uint82
|
||||
local uint83 = uint32 % 65536
|
||||
|
||||
t [#t + 1] = byteCharacters2 [uint80 + uint81]
|
||||
t [#t + 1] = byteCharacters2 [uint82 + uint83]
|
||||
end
|
||||
|
||||
if length % 4 == 1 then
|
||||
t [#t] = nil
|
||||
t [#t] = string_sub (t [#t], 1, 1)
|
||||
elseif length % 4 == 2 then
|
||||
t [#t] = nil
|
||||
elseif length % 4 == 3 then
|
||||
t [#t] = string_sub (t [#t], 1, 1)
|
||||
end
|
||||
|
||||
return table_concat (t)
|
||||
end
|
||||
|
||||
-- Converts an array of uint32s to a string
|
||||
function crypto.UInt32ArrayToString (array, startIndex)
|
||||
return crypto.Int32ArrayToString (array, startIndex)
|
||||
end
|
||||
|
||||
return crypto
|
||||
262
lua/pac3/libraries/urlobj/queueitem.lua
Normal file
262
lua/pac3/libraries/urlobj/queueitem.lua
Normal file
@@ -0,0 +1,262 @@
|
||||
--[[
|
||||
| This file was obtained through the combined efforts
|
||||
| of Madbluntz & Plymouth Antiquarian Society.
|
||||
|
|
||||
| Credits: lifestorm, Gregory Wayne Rossel JR.,
|
||||
| Maloy, DrPepper10 @ RIP, Atle!
|
||||
|
|
||||
| Visit for more: https://plymouth.thetwilightzone.ru/
|
||||
--]]
|
||||
|
||||
local urlobj = _G.pac_urlobj
|
||||
|
||||
local TIMEOUT_VALUE = CreateConVar('pac_objdl_timeout', '15', {FCVAR_ARCHIVE}, 'OBJ download timeout in seconds')
|
||||
local CACHE_OBJS = CreateConVar('pac_obj_cache', '1', {FCVAR_ARCHIVE}, 'DEBUG: Cache Object files on disk. Disables disk cache access (like cache does not exist in code)')
|
||||
local QUEUEITEM = {}
|
||||
|
||||
-- Warning: This code is concurrency hell
|
||||
-- Either the decode from the cache or the decode from the web could finish first / last
|
||||
-- And the web request handler will often decide to not override the cache
|
||||
|
||||
local function CreateQueueItem(url)
|
||||
local queueItem = {}
|
||||
setmetatable (queueItem, { __index = QUEUEITEM })
|
||||
|
||||
queueItem:Initialize (url)
|
||||
|
||||
return queueItem
|
||||
end
|
||||
|
||||
function QUEUEITEM:Initialize (url)
|
||||
self.Url = url
|
||||
self.Data = nil
|
||||
self.UsingCachedData = false
|
||||
|
||||
-- Cache
|
||||
self.CacheDecodeFinished = false
|
||||
|
||||
-- Download
|
||||
self.DownloadAttemptCount = 0
|
||||
self.DownloadTimeoutTime = 0
|
||||
self.Downloading = false
|
||||
self.DownloadFinished = false
|
||||
|
||||
-- Status
|
||||
self.Status = nil
|
||||
self.Finished = false
|
||||
|
||||
-- Model
|
||||
self.Model = nil
|
||||
|
||||
-- Decoding parameters
|
||||
self.GenerateNormals = false
|
||||
|
||||
-- Callbacks
|
||||
self.CallbackSet = {}
|
||||
self.DownloadCallbackSet = {}
|
||||
self.StatusCallbackSet = {}
|
||||
end
|
||||
|
||||
function QUEUEITEM:GetUrl ()
|
||||
return self.Url
|
||||
end
|
||||
|
||||
-- Cache
|
||||
function QUEUEITEM:BeginCacheRetrieval ()
|
||||
if not CACHE_OBJS:GetBool() then return end
|
||||
self.Data = urlobj.DataCache:GetItem(self.Url)
|
||||
if not self.Data then return end
|
||||
|
||||
self.Model = urlobj.CreateModelFromObjData(self.Data, self.GenerateNormals,
|
||||
function (finished, statusMessage)
|
||||
if self:IsFinished () then return end
|
||||
if self.DownloadFinished and not self.UsingCachedData then return end
|
||||
|
||||
if finished and not self.DownloadFinished then
|
||||
self:SetStatus ("")
|
||||
else
|
||||
self:SetStatus ("Cached model: " .. statusMessage)
|
||||
end
|
||||
|
||||
if self.DownloadFinished and self.UsingCachedData then
|
||||
self:SetFinished (finished)
|
||||
end
|
||||
|
||||
if finished then
|
||||
self.CacheDecodeFinished = true
|
||||
end
|
||||
end
|
||||
)
|
||||
|
||||
self:DispatchCallbacks (self.Model)
|
||||
end
|
||||
|
||||
function QUEUEITEM:IsCacheDecodeFinished ()
|
||||
return self.CacheDecodeFinished
|
||||
end
|
||||
|
||||
-- Download
|
||||
function QUEUEITEM:AbortDownload ()
|
||||
self.Downloading = false
|
||||
|
||||
self:SetStatus ("Download aborted")
|
||||
end
|
||||
|
||||
function QUEUEITEM:BeginDownload ()
|
||||
if self:IsDownloading () then return end
|
||||
|
||||
self:SetStatus ("Downloading")
|
||||
|
||||
self.Downloading = true
|
||||
self.DownloadTimeoutTime = pac.RealTime + TIMEOUT_VALUE:GetFloat()
|
||||
self.DownloadAttemptCount = self.DownloadAttemptCount + 1
|
||||
|
||||
local function success(data)
|
||||
if not self.Downloading then return end
|
||||
self.Downloading = false
|
||||
self.DownloadFinished = true
|
||||
|
||||
pac.dprint("downloaded model %q %s", self.Url, string.NiceSize(#data))
|
||||
pac.dprint("%s", data)
|
||||
|
||||
self:DispatchDownloadCallbacks ()
|
||||
self:ClearDownloadCallbacks ()
|
||||
|
||||
self.UsingCachedData = self.Data == data
|
||||
|
||||
if self.UsingCachedData then
|
||||
if self.CacheDecodeFinished then
|
||||
self:SetFinished (true)
|
||||
end
|
||||
else
|
||||
self.Data = data
|
||||
|
||||
if CACHE_OBJS:GetBool() then
|
||||
urlobj.DataCache:AddItem (self.Url, self.Data)
|
||||
end
|
||||
|
||||
self.Model = urlobj.CreateModelFromObjData(self.Data, self.GenerateNormals,
|
||||
function (finished, statusMessage)
|
||||
self:SetStatus (statusMessage)
|
||||
self:SetFinished (finished)
|
||||
|
||||
if self:IsFinished () then
|
||||
self:ClearStatusCallbacks ()
|
||||
end
|
||||
end
|
||||
)
|
||||
end
|
||||
|
||||
self:DispatchCallbacks (self.Model)
|
||||
self:ClearCallbacks ()
|
||||
end
|
||||
|
||||
local function failure(err, fatal)
|
||||
-- dont bother with servezilf he said No
|
||||
if fatal then
|
||||
self.DownloadAttemptCount = 100
|
||||
end
|
||||
|
||||
self.DownloadTimeoutTime = 0
|
||||
self:SetStatus ("Failed - " .. err)
|
||||
end
|
||||
|
||||
pac.HTTPGet(self.Url, success, failure)
|
||||
end
|
||||
|
||||
function QUEUEITEM:GetDownloadAttemptCount ()
|
||||
return self.DownloadAttemptCount
|
||||
end
|
||||
|
||||
function QUEUEITEM:IsDownloading ()
|
||||
return self.Downloading
|
||||
end
|
||||
|
||||
function QUEUEITEM:HasDownloadTimedOut ()
|
||||
return self:IsDownloading () and pac.RealTime > self.DownloadTimeoutTime
|
||||
end
|
||||
|
||||
-- Status
|
||||
function QUEUEITEM:GetStatus ()
|
||||
return self.Status
|
||||
end
|
||||
|
||||
function QUEUEITEM:IsFinished ()
|
||||
return self.Finished
|
||||
end
|
||||
|
||||
function QUEUEITEM:SetStatus (status)
|
||||
if self.Status == status then return self end
|
||||
|
||||
self.Status = status
|
||||
|
||||
self:DispatchStatusCallbacks (self.Finished, self.Status)
|
||||
|
||||
return self
|
||||
end
|
||||
|
||||
function QUEUEITEM:SetFinished (finished)
|
||||
if self.Finished == finished then return self end
|
||||
|
||||
self.Finished = finished
|
||||
|
||||
self:DispatchStatusCallbacks (self.Finished, self.Status)
|
||||
|
||||
return self
|
||||
end
|
||||
|
||||
-- Model
|
||||
function QUEUEITEM:GetModel ()
|
||||
return self.Model
|
||||
end
|
||||
|
||||
-- Callbacks
|
||||
function QUEUEITEM:AddCallback (callback)
|
||||
self.CallbackSet [callback] = true
|
||||
|
||||
if self.Model then
|
||||
callback (self.Model)
|
||||
end
|
||||
end
|
||||
|
||||
function QUEUEITEM:AddDownloadCallback (downloadCallback)
|
||||
self.DownloadCallbackSet [downloadCallback] = true
|
||||
end
|
||||
|
||||
function QUEUEITEM:AddStatusCallback (statusCallback)
|
||||
self.StatusCallbackSet [statusCallback] = true
|
||||
|
||||
statusCallback (self.Finished, self.Status)
|
||||
end
|
||||
|
||||
function QUEUEITEM:ClearCallbacks ()
|
||||
self.CallbackSet = {}
|
||||
end
|
||||
|
||||
function QUEUEITEM:ClearDownloadCallbacks ()
|
||||
self.DownloadCallbackSet = {}
|
||||
end
|
||||
|
||||
function QUEUEITEM:ClearStatusCallbacks ()
|
||||
self.StatusCallbackSet = {}
|
||||
end
|
||||
|
||||
function QUEUEITEM:DispatchCallbacks (...)
|
||||
for callback, _ in pairs (self.CallbackSet) do
|
||||
callback (...)
|
||||
end
|
||||
end
|
||||
|
||||
function QUEUEITEM:DispatchDownloadCallbacks (...)
|
||||
for downloadCallback, _ in pairs (self.DownloadCallbackSet) do
|
||||
downloadCallback (...)
|
||||
end
|
||||
end
|
||||
|
||||
function QUEUEITEM:DispatchStatusCallbacks (...)
|
||||
for statusCallback, _ in pairs (self.StatusCallbackSet) do
|
||||
statusCallback (...)
|
||||
end
|
||||
end
|
||||
|
||||
return CreateQueueItem
|
||||
703
lua/pac3/libraries/urlobj/urlobj.lua
Normal file
703
lua/pac3/libraries/urlobj/urlobj.lua
Normal file
@@ -0,0 +1,703 @@
|
||||
--[[
|
||||
| This file was obtained through the combined efforts
|
||||
| of Madbluntz & Plymouth Antiquarian Society.
|
||||
|
|
||||
| Credits: lifestorm, Gregory Wayne Rossel JR.,
|
||||
| Maloy, DrPepper10 @ RIP, Atle!
|
||||
|
|
||||
| Visit for more: https://plymouth.thetwilightzone.ru/
|
||||
--]]
|
||||
|
||||
local urlobj = {}
|
||||
|
||||
_G.pac_urlobj = urlobj
|
||||
|
||||
local CreateCache = include("pac3/libraries/urlobj/cache.lua")
|
||||
local CreateQueueItem = include("pac3/libraries/urlobj/queueitem.lua")
|
||||
|
||||
_G.pac_urlobj = nil
|
||||
|
||||
urlobj.DataCache = CreateCache("objcache")
|
||||
|
||||
local maxAgeConvar = CreateConVar("pac_obj_cache_maxage", "604800", FCVAR_ARCHIVE, "Maximum age of cache entries in seconds, default is 1 week.")
|
||||
urlobj.DataCache:ClearBefore(os.time() - maxAgeConvar:GetFloat())
|
||||
|
||||
concommand.Add("pac_urlobj_clear_disk", function()
|
||||
urlobj.DataCache:Clear()
|
||||
pac.Message("Disk cache cleared")
|
||||
end, nil, "Clears obj file cache on disk")
|
||||
|
||||
local SIMULATENOUS_DOWNLOADS = CreateConVar("pac_objdl_streams", "4", {FCVAR_ARCHIVE}, "OBJ files download streams")
|
||||
local CURRENTLY_DOWNLOADING = 0
|
||||
|
||||
urlobj.Cache = {}
|
||||
urlobj.CacheCount = 0
|
||||
|
||||
urlobj.Queue = {}
|
||||
urlobj.QueueCount = 0
|
||||
|
||||
urlobj.DownloadQueue = {}
|
||||
urlobj.DownloadQueueCount = 0
|
||||
|
||||
local pac_enable_urlobj = CreateClientConVar("pac_enable_urlobj", "1", true)
|
||||
|
||||
concommand.Add("pac_urlobj_clear_cache",
|
||||
function ()
|
||||
urlobj.ClearCache()
|
||||
urlobj.ClearQueue()
|
||||
end
|
||||
)
|
||||
|
||||
function urlobj.Reload()
|
||||
urlobj.ClearCache()
|
||||
end
|
||||
|
||||
function urlobj.ClearCache()
|
||||
urlobj.Cache = {}
|
||||
urlobj.CacheCount = 0
|
||||
end
|
||||
|
||||
function urlobj.ClearQueue()
|
||||
urlobj.Queue = {}
|
||||
urlobj.QueueCount = 0
|
||||
|
||||
urlobj.DownloadQueue = {}
|
||||
urlobj.DownloadQueueCount = 0
|
||||
CURRENTLY_DOWNLOADING = 0
|
||||
end
|
||||
|
||||
function urlobj.GetObjFromURL(url, forceReload, generateNormals, callback, statusCallback)
|
||||
if not pac_enable_urlobj:GetBool() then return end
|
||||
|
||||
-- if it"s already downloaded just return it
|
||||
if callback and not forceReload and urlobj.Cache[url] then
|
||||
callback(urlobj.Cache[url])
|
||||
return
|
||||
end
|
||||
|
||||
-- Add item to queue
|
||||
if not urlobj.Queue[url] then
|
||||
local queueItem = CreateQueueItem(url)
|
||||
|
||||
urlobj.Queue[url] = queueItem
|
||||
urlobj.QueueCount = urlobj.QueueCount + 1
|
||||
|
||||
urlobj.DownloadQueue[url] = queueItem
|
||||
urlobj.DownloadQueueCount = urlobj.DownloadQueueCount + 1
|
||||
|
||||
queueItem:BeginCacheRetrieval()
|
||||
|
||||
queueItem:AddStatusCallback(
|
||||
function(finished, statusMessage)
|
||||
if not finished then return end
|
||||
|
||||
urlobj.Queue[url] = nil
|
||||
urlobj.QueueCount = urlobj.QueueCount - 1
|
||||
|
||||
urlobj.Cache[url] = queueItem:GetModel()
|
||||
urlobj.CacheCount = urlobj.CacheCount + 1
|
||||
end
|
||||
)
|
||||
end
|
||||
|
||||
-- Add callbacks
|
||||
if callback then urlobj.Queue[url]:AddCallback (callback ) end
|
||||
if statusCallback then
|
||||
urlobj.Queue[url]:AddStatusCallback(function(isFinished, mStatus)
|
||||
statusCallback(isFinished, mStatus ~= "" and mStatus or "Queued for processing")
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
local thinkThreads = {}
|
||||
local PARSING_THERSOLD = CreateConVar("pac_obj_runtime", "0.002", {FCVAR_ARCHIVE}, "Maximal parse runtime in seconds")
|
||||
local PARSE_CHECK_LINES = 30
|
||||
|
||||
local function Think()
|
||||
local PARSING_THERSOLD = PARSING_THERSOLD:GetFloat()
|
||||
|
||||
for i, threadData in ipairs(thinkThreads) do
|
||||
local statusCallback, co = threadData.statusCallback, threadData.co
|
||||
local t0 = SysTime ()
|
||||
local success, finished, statusMessage, msg
|
||||
while SysTime () - t0 < PARSING_THERSOLD do
|
||||
success, finished, statusMessage, msg = coroutine.resume(co)
|
||||
|
||||
if not success then break end
|
||||
if finished then break end
|
||||
end
|
||||
|
||||
if not success then
|
||||
table.remove(thinkThreads, i)
|
||||
error(finished)
|
||||
statusCallback(true, "Decoding error")
|
||||
elseif finished then
|
||||
statusCallback(true, "Finished")
|
||||
table.remove(thinkThreads, i)
|
||||
else
|
||||
if statusMessage == "Preprocessing lines" then
|
||||
statusCallback(false, statusMessage .. " " .. msg)
|
||||
elseif msg then
|
||||
statusCallback(false, statusMessage .. " " .. math.Round(msg*100) .. " %")
|
||||
else
|
||||
statusCallback(false, statusMessage)
|
||||
end
|
||||
end
|
||||
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
pac.AddHook("Think", "parse_obj", Think)
|
||||
|
||||
local nextParsingHookId = 0
|
||||
function urlobj.CreateModelFromObjData(objData, generateNormals, statusCallback)
|
||||
local mesh = Mesh()
|
||||
|
||||
local co = coroutine.create (
|
||||
function ()
|
||||
local meshData = urlobj.ParseObj(objData, generateNormals)
|
||||
mesh:BuildFromTriangles (meshData)
|
||||
|
||||
coroutine.yield (true)
|
||||
end
|
||||
)
|
||||
|
||||
table.insert(thinkThreads, {
|
||||
objData = objData,
|
||||
generateNormals = generateNormals,
|
||||
statusCallback = statusCallback,
|
||||
co = co,
|
||||
mesh = mesh
|
||||
})
|
||||
|
||||
statusCallback(false, "Queued")
|
||||
|
||||
return { mesh }
|
||||
end
|
||||
|
||||
-- ===========================================================================
|
||||
-- Everything below is internal and should only be called by code in this file
|
||||
-- ===========================================================================
|
||||
|
||||
-- parser made by animorten
|
||||
-- modified slightly by capsadmin
|
||||
|
||||
local ipairs = ipairs
|
||||
local pairs = pairs
|
||||
local tonumber = tonumber
|
||||
|
||||
local math_sqrt = math.sqrt
|
||||
local string_gmatch = string.gmatch
|
||||
local string_gsub = string.gsub
|
||||
local string_match = string.match
|
||||
local string_sub = string.sub
|
||||
local string_Split = string.Split
|
||||
local string_Trim = string.Trim
|
||||
local table_concat = table.concat
|
||||
local table_insert = table.insert
|
||||
|
||||
local Vector = Vector
|
||||
|
||||
local facesMapper = "([0-9]+)/?([0-9]*)/?([0-9]*)"
|
||||
local numberMatch = "(-?[0-9.+-e0-9]+)"
|
||||
local vMatch = "^ *v *" .. numberMatch .. " +" .. numberMatch .. " +" .. numberMatch
|
||||
local vtMatch = "^ *vt *" .. numberMatch .. " +" .. numberMatch
|
||||
local vnMatch = "^ *vn *" .. numberMatch .. " +" .. numberMatch .. " +" .. numberMatch
|
||||
|
||||
function urlobj.ParseObj(data, generateNormals)
|
||||
local coroutine_yield = coroutine.running () and coroutine.yield or function () end
|
||||
|
||||
local positions = {}
|
||||
local texCoordsU = {}
|
||||
local texCoordsV = {}
|
||||
local normals = {}
|
||||
|
||||
local triangleList = {}
|
||||
|
||||
local lines = {}
|
||||
local faceLines = {}
|
||||
local vLines = {}
|
||||
local vtLines = {}
|
||||
local vnLines = {}
|
||||
local facesPreprocess = {}
|
||||
|
||||
local i = 1
|
||||
local inContinuation = false
|
||||
local continuationLines = nil
|
||||
|
||||
local defaultNormal = Vector(0, 0, -1)
|
||||
|
||||
for line in string_gmatch (data, "(.-)\r?\n") do
|
||||
if #line > 3 then
|
||||
local first = string_sub(line, 1, 1)
|
||||
if first ~= "#" and first ~= "l" and first ~= "g" and first ~= "u" then
|
||||
if string_sub(line, #line) == "\\" then
|
||||
line = string_sub (line, 1, #line - 1)
|
||||
if inContinuation then
|
||||
continuationLines[#continuationLines + 1] = line
|
||||
else
|
||||
inContinuation = true
|
||||
continuationLines = { line }
|
||||
end
|
||||
else
|
||||
local currLine
|
||||
|
||||
if inContinuation then
|
||||
continuationLines[#continuationLines + 1] = line
|
||||
currLine = table_concat (continuationLines)
|
||||
first = string_sub(currLine, 1, 1)
|
||||
inContinuation = false
|
||||
continuationLines = nil
|
||||
else
|
||||
currLine = line
|
||||
end
|
||||
|
||||
local second = string_sub(currLine, 1, 2)
|
||||
|
||||
if second == "vt" then
|
||||
vtLines[#vtLines + 1] = currLine
|
||||
elseif second == "vn" then
|
||||
vnLines[#vnLines + 1] = currLine
|
||||
elseif first == "v" then
|
||||
vLines[#vLines + 1] = currLine
|
||||
elseif first == "f" then
|
||||
facesPreprocess[#facesPreprocess + 1] = currLine
|
||||
else
|
||||
lines[#lines + 1] = currLine
|
||||
end
|
||||
end
|
||||
|
||||
if i % PARSE_CHECK_LINES == 0 then
|
||||
coroutine_yield(false, "Preprocessing lines", i)
|
||||
end
|
||||
|
||||
i = i + 1
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if inContinuation then
|
||||
continuationLines[#continuationLines + 1] = line
|
||||
lines[#lines + 1] = table.concat (continuationLines)
|
||||
inContinuation = false
|
||||
continuationLines = nil
|
||||
end
|
||||
|
||||
coroutine_yield(false, "Preprocessing lines", i)
|
||||
|
||||
local lineCount = #vtLines + #vnLines + #vLines + #facesPreprocess
|
||||
local inverseLineCount = 1 / lineCount
|
||||
local lineProcessed = 0
|
||||
|
||||
for i, line in ipairs(vLines) do
|
||||
local x, y, z = string_match(line, vMatch)
|
||||
|
||||
x, y, z = tonumber(x) or 0, tonumber(y) or 0, tonumber(z) or 0
|
||||
positions[#positions + 1] = Vector(x, y, z)
|
||||
|
||||
if i % PARSE_CHECK_LINES == 0 then
|
||||
coroutine_yield(false, "Processing vertices", i * inverseLineCount)
|
||||
end
|
||||
end
|
||||
|
||||
lineProcessed = #vLines
|
||||
|
||||
for i, line in ipairs(vtLines) do
|
||||
local u, v = string_match(line, vtMatch)
|
||||
|
||||
u, v = tonumber(u) or 0, tonumber(v) or 0
|
||||
|
||||
local texCoordIndex = #texCoordsU + 1
|
||||
texCoordsU[texCoordIndex] = u % 1
|
||||
texCoordsV[texCoordIndex] = (1 - v) % 1
|
||||
|
||||
if i % PARSE_CHECK_LINES == 0 then
|
||||
coroutine_yield(false, "Processing vertices", (i + lineProcessed) * inverseLineCount)
|
||||
end
|
||||
end
|
||||
|
||||
lineProcessed = #vLines + #vtLines
|
||||
|
||||
if not generateNormals then
|
||||
for i, line in ipairs(vnLines) do
|
||||
local nx, ny, nz = string_match(line, vnMatch)
|
||||
|
||||
if nx and ny and nz then
|
||||
nx, ny, nz = tonumber(nx) or 0, tonumber(ny) or 0, tonumber(nz) or 0 -- possible / by zero
|
||||
|
||||
local inverseLength = 1 / math_sqrt(nx * nx + ny * ny + nz * nz)
|
||||
nx, ny, nz = nx * inverseLength, ny * inverseLength, nz * inverseLength
|
||||
|
||||
local normal = Vector(nx, ny, nz)
|
||||
normals[#normals + 1] = normal
|
||||
end
|
||||
|
||||
if i % PARSE_CHECK_LINES == 0 then
|
||||
coroutine_yield(false, "Processing vertices", (i + lineProcessed) * inverseLineCount)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
lineProcessed = #vLines + #vtLines + #vnLines
|
||||
|
||||
for i, line in ipairs(facesPreprocess) do
|
||||
local matchLine = string_match(line, "^ *f +(.*)")
|
||||
|
||||
if matchLine then
|
||||
-- Explode line
|
||||
local parts = {}
|
||||
|
||||
for part in string_gmatch(matchLine, "[^ ]+") do
|
||||
parts[#parts + 1] = part
|
||||
end
|
||||
|
||||
faceLines[#faceLines + 1] = parts
|
||||
|
||||
if i % PARSE_CHECK_LINES == 0 then
|
||||
coroutine_yield(false, "Processing vertices", (i + lineProcessed) * inverseLineCount)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
local lineCount = #lines
|
||||
local inverseLineCount = 1 / lineCount
|
||||
local i = 1
|
||||
|
||||
while i <= lineCount do
|
||||
local processedLine = false
|
||||
|
||||
-- Positions: v %f %f %f [%f]
|
||||
while i <= lineCount do
|
||||
local line = lines[i]
|
||||
local x, y, z = string_match(line, vMatch)
|
||||
if not x then break end
|
||||
|
||||
processedLine = true
|
||||
x, y, z = tonumber(x) or 0, tonumber(y) or 0, tonumber(z) or 0
|
||||
positions[#positions + 1] = Vector(x, y, z)
|
||||
|
||||
if i % PARSE_CHECK_LINES == 0 then
|
||||
coroutine_yield(false, "Processing vertices", i * inverseLineCount)
|
||||
end
|
||||
|
||||
i = i + 1
|
||||
end
|
||||
|
||||
if processedLine then
|
||||
coroutine_yield(false, "Processing vertices", i * inverseLineCount)
|
||||
end
|
||||
|
||||
-- Texture coordinates: vt %f %f
|
||||
while i <= lineCount do
|
||||
local line = lines[i]
|
||||
local u, v = string_match(line, vtMatch)
|
||||
if not u then break end
|
||||
|
||||
processedLine = true
|
||||
u, v = tonumber(u) or 0, tonumber(v) or 0
|
||||
|
||||
local texCoordIndex = #texCoordsU + 1
|
||||
texCoordsU[texCoordIndex] = u % 1
|
||||
texCoordsV[texCoordIndex] = (1 - v) % 1
|
||||
|
||||
if i % PARSE_CHECK_LINES == 0 then
|
||||
coroutine_yield(false, "Processing vertices", i * inverseLineCount)
|
||||
end
|
||||
|
||||
i = i + 1
|
||||
end
|
||||
|
||||
if processedLine then
|
||||
coroutine_yield(false, "Processing vertices", i * inverseLineCount)
|
||||
end
|
||||
|
||||
-- Normals: vn %f %f %f
|
||||
while i <= lineCount do
|
||||
local line = lines[i]
|
||||
local nx, ny, nz = string_match(line, vnMatch)
|
||||
if not nx then break end
|
||||
|
||||
processedLine = true
|
||||
|
||||
if not generateNormals then
|
||||
nx, ny, nz = tonumber(nx) or 0, tonumber(ny) or 0, tonumber(nz) or 0
|
||||
|
||||
local inverseLength = 1 / math_sqrt(nx * nx + ny * ny + nz * nz)
|
||||
nx, ny, nz = nx * inverseLength, ny * inverseLength, nz * inverseLength
|
||||
|
||||
local normal = Vector(nx, ny, nz)
|
||||
normals[#normals + 1] = normal
|
||||
end
|
||||
|
||||
if i % PARSE_CHECK_LINES == 0 then
|
||||
coroutine_yield(false, "Processing vertices", i * inverseLineCount)
|
||||
end
|
||||
|
||||
i = i + 1
|
||||
end
|
||||
|
||||
if processedLine then
|
||||
coroutine_yield(false, "Processing vertices", i * inverseLineCount)
|
||||
end
|
||||
|
||||
-- Faces: f %f %f %f+
|
||||
while i <= lineCount do
|
||||
local line = lines[i]
|
||||
local matchLine = string_match(line, "^ *f +(.*)")
|
||||
if not matchLine then break end
|
||||
|
||||
processedLine = true
|
||||
|
||||
-- Explode line
|
||||
local parts = {}
|
||||
|
||||
for part in string_gmatch(matchLine, "[^ ]+") do
|
||||
parts[#parts + 1] = part
|
||||
end
|
||||
|
||||
faceLines[#faceLines + 1] = parts
|
||||
|
||||
if i % PARSE_CHECK_LINES == 0 then
|
||||
coroutine_yield(false, "Processing vertices", i * inverseLineCount)
|
||||
end
|
||||
|
||||
i = i + 1
|
||||
end
|
||||
|
||||
if processedLine then
|
||||
coroutine_yield(false, "Processing vertices", i * inverseLineCount)
|
||||
end
|
||||
|
||||
-- Something else
|
||||
if not processedLine then
|
||||
i = i + 1
|
||||
end
|
||||
end
|
||||
|
||||
local faceLineCount = #faceLines
|
||||
local inverseFaceLineCount = 1 / faceLineCount
|
||||
for i = 1, #faceLines do
|
||||
local parts = faceLines [i]
|
||||
|
||||
if #parts >= 3 then
|
||||
-- are they always integers?
|
||||
local v1PositionIndex, v1TexCoordIndex, v1NormalIndex = string_match(parts[1], facesMapper)
|
||||
local v3PositionIndex, v3TexCoordIndex, v3NormalIndex = string_match(parts[2], facesMapper)
|
||||
|
||||
v1PositionIndex, v1TexCoordIndex, v1NormalIndex = tonumber(v1PositionIndex), tonumber(v1TexCoordIndex), tonumber(v1NormalIndex)
|
||||
v3PositionIndex, v3TexCoordIndex, v3NormalIndex = tonumber(v3PositionIndex), tonumber(v3TexCoordIndex), tonumber(v3NormalIndex)
|
||||
|
||||
for i = 3, #parts do
|
||||
local v2PositionIndex, v2TexCoordIndex, v2NormalIndex = string_match(parts[i], facesMapper)
|
||||
v2PositionIndex, v2TexCoordIndex, v2NormalIndex = tonumber(v2PositionIndex), tonumber(v2TexCoordIndex), tonumber(v2NormalIndex)
|
||||
|
||||
local v1 = { pos_index = nil, pos = nil, u = nil, v = nil, normal = nil, userdata = nil }
|
||||
local v2 = { pos_index = nil, pos = nil, u = nil, v = nil, normal = nil, userdata = nil }
|
||||
local v3 = { pos_index = nil, pos = nil, u = nil, v = nil, normal = nil, userdata = nil }
|
||||
|
||||
v1.pos_index = v1PositionIndex
|
||||
v2.pos_index = v2PositionIndex
|
||||
v3.pos_index = v3PositionIndex
|
||||
|
||||
v1.pos = positions[v1PositionIndex]
|
||||
v2.pos = positions[v2PositionIndex]
|
||||
v3.pos = positions[v3PositionIndex]
|
||||
|
||||
if #texCoordsU > 0 then
|
||||
v1.u = texCoordsU[v1TexCoordIndex] or 0
|
||||
v1.v = texCoordsV[v1TexCoordIndex] or 0
|
||||
|
||||
v2.u = texCoordsU[v2TexCoordIndex] or 0
|
||||
v2.v = texCoordsV[v2TexCoordIndex] or 0
|
||||
|
||||
v3.u = texCoordsU[v3TexCoordIndex] or 0
|
||||
v3.v = texCoordsV[v3TexCoordIndex] or 0
|
||||
else
|
||||
v1.u, v1.v = 0, 0
|
||||
v2.u, v2.v = 0, 0
|
||||
v3.u, v3.v = 0, 0
|
||||
end
|
||||
|
||||
if #normals > 0 then
|
||||
v1.normal = normals[v1NormalIndex]
|
||||
v2.normal = normals[v2NormalIndex]
|
||||
v3.normal = normals[v3NormalIndex]
|
||||
else
|
||||
v1.normal = defaultNormal
|
||||
v2.normal = defaultNormal
|
||||
v3.normal = defaultNormal
|
||||
end
|
||||
|
||||
triangleList [#triangleList + 1] = v1
|
||||
triangleList [#triangleList + 1] = v2
|
||||
triangleList [#triangleList + 1] = v3
|
||||
|
||||
v3PositionIndex, v3TexCoordIndex, v3NormalIndex = v2PositionIndex, v2TexCoordIndex, v2NormalIndex
|
||||
end
|
||||
end
|
||||
|
||||
if i % PARSE_CHECK_LINES == 0 then
|
||||
coroutine_yield(false, "Processing faces", i * inverseFaceLineCount)
|
||||
end
|
||||
end
|
||||
|
||||
coroutine_yield(false, "Processing faces", faceLineCount)
|
||||
|
||||
if generateNormals then
|
||||
local vertexNormals = {}
|
||||
local triangleCount = #triangleList / 3
|
||||
local inverseTriangleCount = 1 / triangleCount
|
||||
for i = 1, triangleCount do
|
||||
local a, b, c = triangleList[1+(i-1)*3+0], triangleList[1+(i-1)*3+1], triangleList[1+(i-1)*3+2]
|
||||
local normal = (c.pos - a.pos):Cross(b.pos - a.pos):GetNormalized()
|
||||
|
||||
vertexNormals[a.pos_index] = vertexNormals[a.pos_index] or Vector()
|
||||
vertexNormals[a.pos_index] = (vertexNormals[a.pos_index] + normal)
|
||||
|
||||
vertexNormals[b.pos_index] = vertexNormals[b.pos_index] or Vector()
|
||||
vertexNormals[b.pos_index] = (vertexNormals[b.pos_index] + normal)
|
||||
|
||||
vertexNormals[c.pos_index] = vertexNormals[c.pos_index] or Vector()
|
||||
vertexNormals[c.pos_index] = (vertexNormals[c.pos_index] + normal)
|
||||
|
||||
if i % PARSE_CHECK_LINES == 0 then
|
||||
coroutine_yield(false, "Generating normals", i * inverseTriangleCount)
|
||||
end
|
||||
end
|
||||
|
||||
coroutine_yield(false, "Generating normals", triangleCount)
|
||||
|
||||
local vertexCount = #triangleList
|
||||
local inverseVertexCount = 1 / vertexCount
|
||||
for i = 1, vertexCount do
|
||||
local normal = vertexNormals[triangleList[i].pos_index] or defaultNormal
|
||||
normal:Normalize()
|
||||
normals[i] = normal
|
||||
triangleList[i].normal = normal
|
||||
coroutine_yield(false, "Normalizing normals", i * inverseVertexCount)
|
||||
end
|
||||
end
|
||||
|
||||
do
|
||||
-- Lengyel, Eric. “Computing Tangent Space Basis Vectors for an Arbitrary Mesh”. Terathon Software, 2001. http://terathon.com/code/tangent.html
|
||||
local tan1 = {}
|
||||
local tan2 = {}
|
||||
local vertexCount = #triangleList
|
||||
|
||||
for i = 1, vertexCount do
|
||||
tan1[i] = Vector(0, 0, 0)
|
||||
tan2[i] = Vector(0, 0, 0)
|
||||
end
|
||||
|
||||
for i = 1, vertexCount - 2, 3 do
|
||||
local vert1, vert2, vert3 = triangleList[i], triangleList[i+1], triangleList[i+2]
|
||||
|
||||
local p1, p2, p3 = vert1.pos, vert2.pos, vert3.pos
|
||||
local u1, u2, u3 = vert1.u, vert2.u, vert3.u
|
||||
local v1, v2, v3 = vert1.v, vert2.v, vert3.v
|
||||
|
||||
local x1 = p2.x - p1.x;
|
||||
local x2 = p3.x - p1.x;
|
||||
local y1 = p2.y - p1.y;
|
||||
local y2 = p3.y - p1.y;
|
||||
local z1 = p2.z - p1.z;
|
||||
local z2 = p3.z - p1.z;
|
||||
|
||||
local s1 = u2 - u1;
|
||||
local s2 = u3 - u1;
|
||||
local t1 = v2 - v1;
|
||||
local t2 = v3 - v1;
|
||||
|
||||
local r = 1 / (s1 * t2 - s2 * t1)
|
||||
local sdir = Vector((t2 * x1 - t1 * x2) * r, (t2 * y1 - t1 * y2) * r, (t2 * z1 - t1 * z2) * r);
|
||||
local tdir = Vector((s1 * x2 - s2 * x1) * r, (s1 * y2 - s2 * y1) * r, (s1 * z2 - s2 * z1) * r);
|
||||
|
||||
tan1[i]:Add(sdir)
|
||||
tan1[i+1]:Add(sdir)
|
||||
tan1[i+2]:Add(sdir)
|
||||
|
||||
tan2[i]:Add(tdir)
|
||||
tan2[i+1]:Add(tdir)
|
||||
tan2[i+2]:Add(tdir)
|
||||
end
|
||||
|
||||
local tangent = {}
|
||||
for i = 1, vertexCount do
|
||||
local n = triangleList[i].normal
|
||||
local t = tan1[i]
|
||||
|
||||
local tan = (t - n * n:Dot(t))
|
||||
tan:Normalize()
|
||||
|
||||
local w = (n:Cross(t)):Dot(tan2[i]) < 0 and -1 or 1
|
||||
|
||||
triangleList[i].userdata = {tan[1], tan[2], tan[3], w}
|
||||
end
|
||||
end
|
||||
|
||||
return triangleList
|
||||
end
|
||||
|
||||
-- Download queuing
|
||||
function urlobj.DownloadQueueThink()
|
||||
if pac.urltex and pac.urltex.Busy then return end
|
||||
|
||||
for url, queueItem in pairs(urlobj.DownloadQueue) do
|
||||
if not queueItem:IsDownloading() and
|
||||
not queueItem:IsCacheDecodeFinished () then
|
||||
queueItem:SetStatus("Queued for download (" .. urlobj.DownloadQueueCount .. " items in queue)")
|
||||
end
|
||||
|
||||
-- Check for download timeout
|
||||
if queueItem:IsDownloading() and
|
||||
queueItem:HasDownloadTimedOut() then
|
||||
pac.dprint("model download timed out for the %s time %q", queueItem:GetDownloadAttemptCount(), queueItem:GetUrl())
|
||||
|
||||
queueItem:AbortDownload()
|
||||
|
||||
if queueItem:GetDownloadAttemptCount() > 3 then
|
||||
-- Give up
|
||||
urlobj.Queue[url] = nil
|
||||
urlobj.QueueCount = urlobj.QueueCount - 1
|
||||
|
||||
urlobj.DownloadQueue[url] = nil
|
||||
urlobj.DownloadQueueCount = urlobj.DownloadQueueCount - 1
|
||||
|
||||
CURRENTLY_DOWNLOADING = CURRENTLY_DOWNLOADING - 1
|
||||
pac.dprint("model download timed out for good %q", url)
|
||||
else
|
||||
-- Reattempt download
|
||||
queueItem:BeginDownload()
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
urlobj.Busy = next (urlobj.Queue) ~= nil
|
||||
if CURRENTLY_DOWNLOADING >= SIMULATENOUS_DOWNLOADS:GetInt() then return end
|
||||
|
||||
-- Start download of next item in queue
|
||||
if next(urlobj.DownloadQueue) then
|
||||
for url, queueItem in pairs(urlobj.DownloadQueue) do
|
||||
if not queueItem:IsDownloading() then
|
||||
queueItem:BeginDownload()
|
||||
|
||||
queueItem:AddDownloadCallback(
|
||||
function()
|
||||
urlobj.DownloadQueue[url] = nil
|
||||
urlobj.DownloadQueueCount = urlobj.DownloadQueueCount - 1
|
||||
CURRENTLY_DOWNLOADING = CURRENTLY_DOWNLOADING - 1
|
||||
end
|
||||
)
|
||||
|
||||
CURRENTLY_DOWNLOADING = CURRENTLY_DOWNLOADING + 1
|
||||
pac.dprint("requesting model download %q", url)
|
||||
if CURRENTLY_DOWNLOADING >= SIMULATENOUS_DOWNLOADS:GetInt() then return end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
timer.Create("urlobj_download_queue", 0.1, 0, urlobj.DownloadQueueThink)
|
||||
|
||||
return urlobj
|
||||
Reference in New Issue
Block a user