cunet.prototxt 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794
  1. layer {
  2. name: "input"
  3. type: "Input"
  4. top: "Input1"
  5. input_param {
  6. shape {
  7. dim: 1
  8. dim: 3
  9. dim: 312
  10. dim: 312
  11. }
  12. }
  13. }
  14. layer {
  15. name: "Convolution1"
  16. type: "Convolution"
  17. bottom: "Input1"
  18. top: "Convolution1"
  19. convolution_param {
  20. num_output: 32
  21. pad: 0
  22. kernel_size: 3
  23. stride: 1
  24. }
  25. }
  26. layer {
  27. name: "ReLU1"
  28. type: "ReLU"
  29. bottom: "Convolution1"
  30. top: "Convolution1"
  31. relu_param {
  32. negative_slope: 0.1
  33. }
  34. }
  35. layer {
  36. name: "Convolution2"
  37. type: "Convolution"
  38. bottom: "Convolution1"
  39. top: "Convolution2"
  40. convolution_param {
  41. num_output: 64
  42. pad: 0
  43. kernel_size: 3
  44. stride: 1
  45. }
  46. }
  47. layer {
  48. name: "ReLU2"
  49. type: "ReLU"
  50. bottom: "Convolution2"
  51. top: "Convolution2"
  52. relu_param {
  53. negative_slope: 0.1
  54. }
  55. }
  56. layer {
  57. name: "Convolution3"
  58. type: "Convolution"
  59. bottom: "Convolution2"
  60. top: "Convolution3"
  61. convolution_param {
  62. num_output: 64
  63. pad: 0
  64. kernel_size: 2
  65. stride: 2
  66. }
  67. }
  68. layer {
  69. name: "ReLU3"
  70. type: "ReLU"
  71. bottom: "Convolution3"
  72. top: "Convolution3"
  73. relu_param {
  74. negative_slope: 0.1
  75. }
  76. }
  77. layer {
  78. name: "Convolution4"
  79. type: "Convolution"
  80. bottom: "Convolution3"
  81. top: "Convolution4"
  82. convolution_param {
  83. num_output: 128
  84. pad: 0
  85. kernel_size: 3
  86. stride: 1
  87. }
  88. }
  89. layer {
  90. name: "ReLU4"
  91. type: "ReLU"
  92. bottom: "Convolution4"
  93. top: "Convolution4"
  94. relu_param {
  95. negative_slope: 0.1
  96. }
  97. }
  98. layer {
  99. name: "Convolution5"
  100. type: "Convolution"
  101. bottom: "Convolution4"
  102. top: "Convolution5"
  103. convolution_param {
  104. num_output: 64
  105. pad: 0
  106. kernel_size: 3
  107. stride: 1
  108. }
  109. }
  110. layer {
  111. name: "ReLU5"
  112. type: "ReLU"
  113. bottom: "Convolution5"
  114. top: "Convolution5"
  115. relu_param {
  116. negative_slope: 0.1
  117. }
  118. }
  119. layer {
  120. name: "Pooling1"
  121. type: "Pooling"
  122. bottom: "Convolution5"
  123. top: "Pooling1"
  124. pooling_param {
  125. pool: AVE
  126. global_pooling: true
  127. }
  128. }
  129. layer {
  130. name: "Convolution6"
  131. type: "Convolution"
  132. bottom: "Pooling1"
  133. top: "Convolution6"
  134. convolution_param {
  135. num_output: 8
  136. pad: 0
  137. kernel_size: 1
  138. stride: 1
  139. }
  140. }
  141. layer {
  142. name: "ReLU6"
  143. type: "ReLU"
  144. bottom: "Convolution6"
  145. top: "Convolution6"
  146. }
  147. layer {
  148. name: "Convolution7"
  149. type: "Convolution"
  150. bottom: "Convolution6"
  151. top: "Convolution7"
  152. convolution_param {
  153. num_output: 64
  154. pad: 0
  155. kernel_size: 1
  156. stride: 1
  157. }
  158. }
  159. layer {
  160. name: "Sigmoid1"
  161. type: "Sigmoid"
  162. bottom: "Convolution7"
  163. top: "Convolution7"
  164. }
  165. layer {
  166. name: "Flatten1"
  167. type: "Flatten"
  168. bottom: "Convolution7"
  169. top: "Flatten1"
  170. }
  171. layer {
  172. name: "Scale1"
  173. type: "Scale"
  174. bottom: "Convolution5"
  175. bottom: "Flatten1"
  176. top: "Scale1"
  177. scale_param {
  178. axis: 0
  179. bias_term: false
  180. }
  181. }
  182. layer {
  183. name: "Deconvolution1"
  184. type: "Deconvolution"
  185. bottom: "Scale1"
  186. top: "Deconvolution1"
  187. convolution_param {
  188. num_output: 64
  189. pad: 0
  190. kernel_size: 2
  191. stride: 2
  192. }
  193. }
  194. layer {
  195. name: "ReLU7"
  196. type: "ReLU"
  197. bottom: "Deconvolution1"
  198. top: "Deconvolution1"
  199. relu_param {
  200. negative_slope: 0.1
  201. }
  202. }
  203. layer {
  204. name: "Crop1"
  205. type: "Crop"
  206. bottom: "Convolution2"
  207. bottom: "Deconvolution1"
  208. top: "Crop1"
  209. crop_param {
  210. axis: 2
  211. offset: 4
  212. }
  213. }
  214. layer {
  215. name: "Eltwise1"
  216. type: "Eltwise"
  217. bottom: "Crop1"
  218. bottom: "Deconvolution1"
  219. top: "Eltwise1"
  220. eltwise_param {
  221. operation: SUM
  222. }
  223. }
  224. layer {
  225. name: "Convolution8"
  226. type: "Convolution"
  227. bottom: "Eltwise1"
  228. top: "Convolution8"
  229. convolution_param {
  230. num_output: 64
  231. pad: 0
  232. kernel_size: 3
  233. stride: 1
  234. }
  235. }
  236. layer {
  237. name: "ReLU8"
  238. type: "ReLU"
  239. bottom: "Convolution8"
  240. top: "Convolution8"
  241. relu_param {
  242. negative_slope: 0.1
  243. }
  244. }
  245. layer {
  246. name: "Convolution9"
  247. type: "Convolution"
  248. bottom: "Convolution8"
  249. top: "Convolution9"
  250. convolution_param {
  251. num_output: 3
  252. pad: 0
  253. kernel_size: 3
  254. stride: 1
  255. }
  256. }
  257. layer {
  258. name: "Convolution10"
  259. type: "Convolution"
  260. bottom: "Convolution9"
  261. top: "Convolution10"
  262. convolution_param {
  263. num_output: 32
  264. pad: 0
  265. kernel_size: 3
  266. stride: 1
  267. }
  268. }
  269. layer {
  270. name: "ReLU9"
  271. type: "ReLU"
  272. bottom: "Convolution10"
  273. top: "Convolution10"
  274. relu_param {
  275. negative_slope: 0.1
  276. }
  277. }
  278. layer {
  279. name: "Convolution11"
  280. type: "Convolution"
  281. bottom: "Convolution10"
  282. top: "Convolution11"
  283. convolution_param {
  284. num_output: 64
  285. pad: 0
  286. kernel_size: 3
  287. stride: 1
  288. }
  289. }
  290. layer {
  291. name: "ReLU10"
  292. type: "ReLU"
  293. bottom: "Convolution11"
  294. top: "Convolution11"
  295. relu_param {
  296. negative_slope: 0.1
  297. }
  298. }
  299. layer {
  300. name: "Convolution12"
  301. type: "Convolution"
  302. bottom: "Convolution11"
  303. top: "Convolution12"
  304. convolution_param {
  305. num_output: 64
  306. pad: 0
  307. kernel_size: 2
  308. stride: 2
  309. }
  310. }
  311. layer {
  312. name: "ReLU11"
  313. type: "ReLU"
  314. bottom: "Convolution12"
  315. top: "Convolution12"
  316. relu_param {
  317. negative_slope: 0.1
  318. }
  319. }
  320. layer {
  321. name: "Convolution13"
  322. type: "Convolution"
  323. bottom: "Convolution12"
  324. top: "Convolution13"
  325. convolution_param {
  326. num_output: 64
  327. pad: 0
  328. kernel_size: 3
  329. stride: 1
  330. }
  331. }
  332. layer {
  333. name: "ReLU12"
  334. type: "ReLU"
  335. bottom: "Convolution13"
  336. top: "Convolution13"
  337. relu_param {
  338. negative_slope: 0.1
  339. }
  340. }
  341. layer {
  342. name: "Convolution14"
  343. type: "Convolution"
  344. bottom: "Convolution13"
  345. top: "Convolution14"
  346. convolution_param {
  347. num_output: 128
  348. pad: 0
  349. kernel_size: 3
  350. stride: 1
  351. }
  352. }
  353. layer {
  354. name: "ReLU13"
  355. type: "ReLU"
  356. bottom: "Convolution14"
  357. top: "Convolution14"
  358. relu_param {
  359. negative_slope: 0.1
  360. }
  361. }
  362. layer {
  363. name: "Pooling2"
  364. type: "Pooling"
  365. bottom: "Convolution14"
  366. top: "Pooling2"
  367. pooling_param {
  368. pool: AVE
  369. global_pooling: true
  370. }
  371. }
  372. layer {
  373. name: "Convolution15"
  374. type: "Convolution"
  375. bottom: "Pooling2"
  376. top: "Convolution15"
  377. convolution_param {
  378. num_output: 16
  379. pad: 0
  380. kernel_size: 1
  381. stride: 1
  382. }
  383. }
  384. layer {
  385. name: "ReLU14"
  386. type: "ReLU"
  387. bottom: "Convolution15"
  388. top: "Convolution15"
  389. }
  390. layer {
  391. name: "Convolution16"
  392. type: "Convolution"
  393. bottom: "Convolution15"
  394. top: "Convolution16"
  395. convolution_param {
  396. num_output: 128
  397. pad: 0
  398. kernel_size: 1
  399. stride: 1
  400. }
  401. }
  402. layer {
  403. name: "Sigmoid2"
  404. type: "Sigmoid"
  405. bottom: "Convolution16"
  406. top: "Convolution16"
  407. }
  408. layer {
  409. name: "Flatten2"
  410. type: "Flatten"
  411. bottom: "Convolution16"
  412. top: "Flatten2"
  413. }
  414. layer {
  415. name: "Scale2"
  416. type: "Scale"
  417. bottom: "Convolution14"
  418. bottom: "Flatten2"
  419. top: "Scale2"
  420. scale_param {
  421. axis: 0
  422. bias_term: false
  423. }
  424. }
  425. layer {
  426. name: "Convolution17"
  427. type: "Convolution"
  428. bottom: "Scale2"
  429. top: "Convolution17"
  430. convolution_param {
  431. num_output: 128
  432. pad: 0
  433. kernel_size: 2
  434. stride: 2
  435. }
  436. }
  437. layer {
  438. name: "ReLU15"
  439. type: "ReLU"
  440. bottom: "Convolution17"
  441. top: "Convolution17"
  442. relu_param {
  443. negative_slope: 0.1
  444. }
  445. }
  446. layer {
  447. name: "Convolution18"
  448. type: "Convolution"
  449. bottom: "Convolution17"
  450. top: "Convolution18"
  451. convolution_param {
  452. num_output: 256
  453. pad: 0
  454. kernel_size: 3
  455. stride: 1
  456. }
  457. }
  458. layer {
  459. name: "ReLU16"
  460. type: "ReLU"
  461. bottom: "Convolution18"
  462. top: "Convolution18"
  463. relu_param {
  464. negative_slope: 0.1
  465. }
  466. }
  467. layer {
  468. name: "Convolution19"
  469. type: "Convolution"
  470. bottom: "Convolution18"
  471. top: "Convolution19"
  472. convolution_param {
  473. num_output: 128
  474. pad: 0
  475. kernel_size: 3
  476. stride: 1
  477. }
  478. }
  479. layer {
  480. name: "ReLU17"
  481. type: "ReLU"
  482. bottom: "Convolution19"
  483. top: "Convolution19"
  484. relu_param {
  485. negative_slope: 0.1
  486. }
  487. }
  488. layer {
  489. name: "Pooling3"
  490. type: "Pooling"
  491. bottom: "Convolution19"
  492. top: "Pooling3"
  493. pooling_param {
  494. pool: AVE
  495. global_pooling: true
  496. }
  497. }
  498. layer {
  499. name: "Convolution20"
  500. type: "Convolution"
  501. bottom: "Pooling3"
  502. top: "Convolution20"
  503. convolution_param {
  504. num_output: 16
  505. pad: 0
  506. kernel_size: 1
  507. stride: 1
  508. }
  509. }
  510. layer {
  511. name: "ReLU18"
  512. type: "ReLU"
  513. bottom: "Convolution20"
  514. top: "Convolution20"
  515. }
  516. layer {
  517. name: "Convolution21"
  518. type: "Convolution"
  519. bottom: "Convolution20"
  520. top: "Convolution21"
  521. convolution_param {
  522. num_output: 128
  523. pad: 0
  524. kernel_size: 1
  525. stride: 1
  526. }
  527. }
  528. layer {
  529. name: "Sigmoid3"
  530. type: "Sigmoid"
  531. bottom: "Convolution21"
  532. top: "Convolution21"
  533. }
  534. layer {
  535. name: "Flatten3"
  536. type: "Flatten"
  537. bottom: "Convolution21"
  538. top: "Flatten3"
  539. }
  540. layer {
  541. name: "Scale3"
  542. type: "Scale"
  543. bottom: "Convolution19"
  544. bottom: "Flatten3"
  545. top: "Scale3"
  546. scale_param {
  547. axis: 0
  548. bias_term: false
  549. }
  550. }
  551. layer {
  552. name: "Deconvolution2"
  553. type: "Deconvolution"
  554. bottom: "Scale3"
  555. top: "Deconvolution2"
  556. convolution_param {
  557. num_output: 128
  558. pad: 0
  559. kernel_size: 2
  560. stride: 2
  561. }
  562. }
  563. layer {
  564. name: "ReLU19"
  565. type: "ReLU"
  566. bottom: "Deconvolution2"
  567. top: "Deconvolution2"
  568. relu_param {
  569. negative_slope: 0.1
  570. }
  571. }
  572. layer {
  573. name: "Crop2"
  574. type: "Crop"
  575. bottom: "Scale2"
  576. bottom: "Deconvolution2"
  577. top: "Crop2"
  578. crop_param {
  579. axis: 2
  580. offset: 4
  581. }
  582. }
  583. layer {
  584. name: "Eltwise2"
  585. type: "Eltwise"
  586. bottom: "Crop2"
  587. bottom: "Deconvolution2"
  588. top: "Eltwise2"
  589. eltwise_param {
  590. operation: SUM
  591. }
  592. }
  593. layer {
  594. name: "Convolution22"
  595. type: "Convolution"
  596. bottom: "Eltwise2"
  597. top: "Convolution22"
  598. convolution_param {
  599. num_output: 64
  600. pad: 0
  601. kernel_size: 3
  602. stride: 1
  603. }
  604. }
  605. layer {
  606. name: "ReLU20"
  607. type: "ReLU"
  608. bottom: "Convolution22"
  609. top: "Convolution22"
  610. relu_param {
  611. negative_slope: 0.1
  612. }
  613. }
  614. layer {
  615. name: "Convolution23"
  616. type: "Convolution"
  617. bottom: "Convolution22"
  618. top: "Convolution23"
  619. convolution_param {
  620. num_output: 64
  621. pad: 0
  622. kernel_size: 3
  623. stride: 1
  624. }
  625. }
  626. layer {
  627. name: "ReLU21"
  628. type: "ReLU"
  629. bottom: "Convolution23"
  630. top: "Convolution23"
  631. relu_param {
  632. negative_slope: 0.1
  633. }
  634. }
  635. layer {
  636. name: "Pooling4"
  637. type: "Pooling"
  638. bottom: "Convolution23"
  639. top: "Pooling4"
  640. pooling_param {
  641. pool: AVE
  642. global_pooling: true
  643. }
  644. }
  645. layer {
  646. name: "Convolution24"
  647. type: "Convolution"
  648. bottom: "Pooling4"
  649. top: "Convolution24"
  650. convolution_param {
  651. num_output: 8
  652. pad: 0
  653. kernel_size: 1
  654. stride: 1
  655. }
  656. }
  657. layer {
  658. name: "ReLU22"
  659. type: "ReLU"
  660. bottom: "Convolution24"
  661. top: "Convolution24"
  662. }
  663. layer {
  664. name: "Convolution25"
  665. type: "Convolution"
  666. bottom: "Convolution24"
  667. top: "Convolution25"
  668. convolution_param {
  669. num_output: 64
  670. pad: 0
  671. kernel_size: 1
  672. stride: 1
  673. }
  674. }
  675. layer {
  676. name: "Sigmoid4"
  677. type: "Sigmoid"
  678. bottom: "Convolution25"
  679. top: "Convolution25"
  680. }
  681. layer {
  682. name: "Flatten4"
  683. type: "Flatten"
  684. bottom: "Convolution25"
  685. top: "Flatten4"
  686. }
  687. layer {
  688. name: "Scale4"
  689. type: "Scale"
  690. bottom: "Convolution23"
  691. bottom: "Flatten4"
  692. top: "Scale4"
  693. scale_param {
  694. axis: 0
  695. bias_term: false
  696. }
  697. }
  698. layer {
  699. name: "Deconvolution3"
  700. type: "Deconvolution"
  701. bottom: "Scale4"
  702. top: "Deconvolution3"
  703. convolution_param {
  704. num_output: 64
  705. pad: 0
  706. kernel_size: 2
  707. stride: 2
  708. }
  709. }
  710. layer {
  711. name: "ReLU23"
  712. type: "ReLU"
  713. bottom: "Deconvolution3"
  714. top: "Deconvolution3"
  715. relu_param {
  716. negative_slope: 0.1
  717. }
  718. }
  719. layer {
  720. name: "Crop3"
  721. type: "Crop"
  722. bottom: "Convolution11"
  723. bottom: "Deconvolution3"
  724. top: "Crop3"
  725. crop_param {
  726. axis: 2
  727. offset: 16
  728. }
  729. }
  730. layer {
  731. name: "Eltwise3"
  732. type: "Eltwise"
  733. bottom: "Crop3"
  734. bottom: "Deconvolution3"
  735. top: "Eltwise3"
  736. eltwise_param {
  737. operation: SUM
  738. }
  739. }
  740. layer {
  741. name: "Convolution26"
  742. type: "Convolution"
  743. bottom: "Eltwise3"
  744. top: "Convolution26"
  745. convolution_param {
  746. num_output: 64
  747. pad: 0
  748. kernel_size: 3
  749. stride: 1
  750. }
  751. }
  752. layer {
  753. name: "ReLU24"
  754. type: "ReLU"
  755. bottom: "Convolution26"
  756. top: "Convolution26"
  757. relu_param {
  758. negative_slope: 0.1
  759. }
  760. }
  761. layer {
  762. name: "Convolution27"
  763. type: "Convolution"
  764. bottom: "Convolution26"
  765. top: "Convolution27"
  766. convolution_param {
  767. num_output: 3
  768. pad: 0
  769. kernel_size: 3
  770. stride: 1
  771. }
  772. }
  773. layer {
  774. name: "Crop4"
  775. type: "Crop"
  776. bottom: "Convolution9"
  777. bottom: "Convolution27"
  778. top: "Crop4"
  779. crop_param {
  780. axis: 2
  781. offset: 20
  782. }
  783. }
  784. layer {
  785. name: "Eltwise4"
  786. type: "Eltwise"
  787. bottom: "Crop4"
  788. bottom: "Convolution27"
  789. top: "Eltwise4"
  790. eltwise_param {
  791. operation: SUM
  792. }
  793. }