export_model.lua 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121
  1. -- adapted from https://github.com/marcan/cl-waifu2x
  2. require 'pl'
  3. local __FILE__ = (function() return string.gsub(debug.getinfo(2, 'S').source, "^@", "") end)()
  4. package.path = path.join(path.dirname(__FILE__), "..", "lib", "?.lua;") .. package.path
  5. require 'w2nn'
  6. local cjson = require "cjson"
  7. local function meta_data(model, model_path)
  8. local meta = {}
  9. for k, v in pairs(model) do
  10. if k:match("w2nn_") then
  11. meta[k:gsub("w2nn_", "")] = v
  12. end
  13. end
  14. modtime = file.modified_time(model_path)
  15. utc_date = Date('utc')
  16. utc_date:set(modtime)
  17. meta["created_at"] = tostring(utc_date)
  18. return meta
  19. end
  20. local function includes(s, a)
  21. for i = 1, #a do
  22. if s == a[i] then
  23. return true
  24. end
  25. end
  26. return false
  27. end
  28. local function get_bias(mod)
  29. if mod.bias then
  30. return mod.bias:float()
  31. else
  32. -- no bias
  33. return torch.FloatTensor(mod.nOutputPlane):zero()
  34. end
  35. end
  36. local function export_weight(jmodules, seq)
  37. local convolutions = {"nn.SpatialConvolutionMM",
  38. "cudnn.SpatialConvolution",
  39. "cudnn.SpatialDilatedConvolution",
  40. "nn.SpatialFullConvolution",
  41. "nn.SpatialDilatedConvolution",
  42. "cudnn.SpatialFullConvolution"
  43. }
  44. for k = 1, #seq.modules do
  45. local mod = seq.modules[k]
  46. local name = torch.typename(mod)
  47. if name == "nn.Sequential" or name == "nn.ConcatTable" then
  48. export_weight(jmodules, mod)
  49. elseif name == "nn.Linear" then
  50. local weight = torch.totable(mod.weight:float())
  51. local jmod = {
  52. class_name = name,
  53. nInputPlane = mod.weight:size(2),
  54. nOutputPlane = mod.weight:size(1),
  55. bias = torch.totable(get_bias(mod)),
  56. weight = weight
  57. }
  58. table.insert(jmodules, jmod)
  59. elseif includes(name, convolutions) then
  60. local weight = mod.weight:float()
  61. if name:match("FullConvolution") then
  62. weight = torch.totable(weight:reshape(mod.nInputPlane, mod.nOutputPlane, mod.kH, mod.kW))
  63. else
  64. weight = torch.totable(weight:reshape(mod.nOutputPlane, mod.nInputPlane, mod.kH, mod.kW))
  65. end
  66. local jmod = {
  67. class_name = name,
  68. kW = mod.kW,
  69. kH = mod.kH,
  70. dH = mod.dH,
  71. dW = mod.dW,
  72. padW = mod.padW,
  73. padH = mod.padH,
  74. dilationW = mod.dilationW,
  75. dilationH = mod.dilationH,
  76. nInputPlane = mod.nInputPlane,
  77. nOutputPlane = mod.nOutputPlane,
  78. bias = torch.totable(get_bias(mod)),
  79. weight = weight
  80. }
  81. table.insert(jmodules, jmod)
  82. end
  83. end
  84. end
  85. local function export(model, model_path, output)
  86. local jmodules = {}
  87. local model_config = meta_data(model, model_path)
  88. local first_layer = true
  89. print(model_config)
  90. print(model)
  91. export_weight(jmodules, model)
  92. jmodules[1]["model_config"] = model_config
  93. local fp = io.open(output, "w")
  94. if not fp then
  95. error("IO Error: " .. output)
  96. end
  97. fp:write(cjson.encode(jmodules))
  98. fp:close()
  99. end
  100. local cmd = torch.CmdLine()
  101. cmd:text()
  102. cmd:text("waifu2x export model")
  103. cmd:text("Options:")
  104. cmd:option("-i", "input.t7", 'Specify the input torch model')
  105. cmd:option("-o", "output.json", 'Specify the output json file')
  106. cmd:option("-iformat", "ascii", 'Specify the input format (ascii|binary)')
  107. local opt = cmd:parse(arg)
  108. if not path.isfile(opt.i) then
  109. cmd:help()
  110. os.exit(-1)
  111. end
  112. local model = torch.load(opt.i, opt.iformat)
  113. export(model, opt.i, opt.o)