vulkan.go 32 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163
  1. // SPDX-License-Identifier: Unlicense OR MIT
  2. //go:build (linux || freebsd) && !novulkan
  3. // +build linux freebsd
  4. // +build !novulkan
  5. package vulkan
  6. import (
  7. "errors"
  8. "fmt"
  9. "image"
  10. "math/bits"
  11. "gioui.org/gpu/internal/driver"
  12. "gioui.org/internal/vk"
  13. "gioui.org/shader"
  14. )
  15. type Backend struct {
  16. physDev vk.PhysicalDevice
  17. dev vk.Device
  18. queue vk.Queue
  19. cmdPool struct {
  20. current vk.CommandBuffer
  21. pool vk.CommandPool
  22. used int
  23. buffers []vk.CommandBuffer
  24. }
  25. outFormat vk.Format
  26. staging struct {
  27. buf *Buffer
  28. mem []byte
  29. size int
  30. cap int
  31. }
  32. defers []func(d vk.Device)
  33. frameSig vk.Semaphore
  34. frameFence vk.Fence
  35. waitSems []vk.Semaphore
  36. waitStages []vk.PipelineStageFlags
  37. sigSems []vk.Semaphore
  38. fence vk.Fence
  39. allPipes []*Pipeline
  40. pipe *Pipeline
  41. passes map[passKey]vk.RenderPass
  42. // bindings and offset are temporary storage for BindVertexBuffer.
  43. bindings []vk.Buffer
  44. offsets []vk.DeviceSize
  45. desc struct {
  46. dirty bool
  47. texBinds [texUnits]*Texture
  48. bufBinds [storageUnits]*Buffer
  49. }
  50. caps driver.Features
  51. }
  52. type passKey struct {
  53. fmt vk.Format
  54. loadAct vk.AttachmentLoadOp
  55. initLayout vk.ImageLayout
  56. finalLayout vk.ImageLayout
  57. }
  58. type Texture struct {
  59. backend *Backend
  60. img vk.Image
  61. mem vk.DeviceMemory
  62. view vk.ImageView
  63. sampler vk.Sampler
  64. fbo vk.Framebuffer
  65. format vk.Format
  66. mipmaps int
  67. layout vk.ImageLayout
  68. passLayout vk.ImageLayout
  69. width int
  70. height int
  71. acquire vk.Semaphore
  72. foreign bool
  73. scope struct {
  74. stage vk.PipelineStageFlags
  75. access vk.AccessFlags
  76. }
  77. }
  78. type Shader struct {
  79. dev vk.Device
  80. module vk.ShaderModule
  81. pushRange vk.PushConstantRange
  82. src shader.Sources
  83. }
  84. type Pipeline struct {
  85. backend *Backend
  86. pipe vk.Pipeline
  87. pushRanges []vk.PushConstantRange
  88. ninputs int
  89. desc *descPool
  90. }
  91. type descPool struct {
  92. layout vk.PipelineLayout
  93. descLayout vk.DescriptorSetLayout
  94. pool vk.DescriptorPool
  95. sets []vk.DescriptorSet
  96. size int
  97. texBinds []int
  98. imgBinds []int
  99. bufBinds []int
  100. }
  101. type Buffer struct {
  102. backend *Backend
  103. buf vk.Buffer
  104. store []byte
  105. mem vk.DeviceMemory
  106. usage vk.BufferUsageFlags
  107. scope struct {
  108. stage vk.PipelineStageFlags
  109. access vk.AccessFlags
  110. }
  111. }
  112. const (
  113. texUnits = 4
  114. storageUnits = 4
  115. )
  116. func init() {
  117. driver.NewVulkanDevice = newVulkanDevice
  118. }
  119. func newVulkanDevice(api driver.Vulkan) (driver.Device, error) {
  120. b := &Backend{
  121. physDev: vk.PhysicalDevice(api.PhysDevice),
  122. dev: vk.Device(api.Device),
  123. outFormat: vk.Format(api.Format),
  124. caps: driver.FeatureCompute,
  125. passes: make(map[passKey]vk.RenderPass),
  126. }
  127. b.queue = vk.GetDeviceQueue(b.dev, api.QueueFamily, api.QueueIndex)
  128. cmdPool, err := vk.CreateCommandPool(b.dev, api.QueueFamily)
  129. if err != nil {
  130. return nil, err
  131. }
  132. b.cmdPool.pool = cmdPool
  133. props := vk.GetPhysicalDeviceFormatProperties(b.physDev, vk.FORMAT_R16_SFLOAT)
  134. reqs := vk.FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | vk.FORMAT_FEATURE_SAMPLED_IMAGE_BIT
  135. if props&reqs == reqs {
  136. b.caps |= driver.FeatureFloatRenderTargets
  137. }
  138. reqs = vk.FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT | vk.FORMAT_FEATURE_SAMPLED_IMAGE_BIT | vk.FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT
  139. props = vk.GetPhysicalDeviceFormatProperties(b.physDev, vk.FORMAT_R8G8B8A8_SRGB)
  140. if props&reqs == reqs {
  141. b.caps |= driver.FeatureSRGB
  142. }
  143. fence, err := vk.CreateFence(b.dev, 0)
  144. if err != nil {
  145. return nil, mapErr(err)
  146. }
  147. b.fence = fence
  148. return b, nil
  149. }
  150. func (b *Backend) BeginFrame(target driver.RenderTarget, clear bool, viewport image.Point) driver.Texture {
  151. b.staging.size = 0
  152. b.cmdPool.used = 0
  153. b.runDefers()
  154. b.resetPipes()
  155. if target == nil {
  156. return nil
  157. }
  158. switch t := target.(type) {
  159. case driver.VulkanRenderTarget:
  160. layout := vk.IMAGE_LAYOUT_UNDEFINED
  161. if !clear {
  162. layout = vk.IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
  163. }
  164. b.frameSig = vk.Semaphore(t.SignalSem)
  165. b.frameFence = vk.Fence(t.Fence)
  166. tex := &Texture{
  167. img: vk.Image(t.Image),
  168. fbo: vk.Framebuffer(t.Framebuffer),
  169. width: viewport.X,
  170. height: viewport.Y,
  171. layout: layout,
  172. passLayout: vk.IMAGE_LAYOUT_PRESENT_SRC_KHR,
  173. format: b.outFormat,
  174. acquire: vk.Semaphore(t.WaitSem),
  175. foreign: true,
  176. }
  177. return tex
  178. case *Texture:
  179. return t
  180. default:
  181. panic(fmt.Sprintf("vulkan: unsupported render target type: %T", t))
  182. }
  183. }
  184. func (b *Backend) deferFunc(f func(d vk.Device)) {
  185. b.defers = append(b.defers, f)
  186. }
  187. func (b *Backend) runDefers() {
  188. for _, f := range b.defers {
  189. f(b.dev)
  190. }
  191. b.defers = b.defers[:0]
  192. }
  193. func (b *Backend) resetPipes() {
  194. for i := len(b.allPipes) - 1; i >= 0; i-- {
  195. p := b.allPipes[i]
  196. if p.pipe == 0 {
  197. // Released pipeline.
  198. b.allPipes = append(b.allPipes[:i], b.allPipes[:i+1]...)
  199. continue
  200. }
  201. if p.desc.size > 0 {
  202. p.desc.size = 0
  203. }
  204. }
  205. }
  206. func (b *Backend) EndFrame() {
  207. if b.frameSig != 0 {
  208. b.sigSems = append(b.sigSems, b.frameSig)
  209. b.frameSig = 0
  210. }
  211. fence := b.frameFence
  212. if fence == 0 {
  213. // We're internally synchronized.
  214. fence = b.fence
  215. }
  216. b.submitCmdBuf(fence)
  217. if b.frameFence == 0 {
  218. vk.WaitForFences(b.dev, fence)
  219. vk.ResetFences(b.dev, fence)
  220. }
  221. }
  222. func (b *Backend) Caps() driver.Caps {
  223. return driver.Caps{
  224. MaxTextureSize: 4096,
  225. Features: b.caps,
  226. }
  227. }
  228. func (b *Backend) NewTimer() driver.Timer {
  229. panic("timers not supported")
  230. }
  231. func (b *Backend) IsTimeContinuous() bool {
  232. panic("timers not supported")
  233. }
  234. func (b *Backend) Release() {
  235. vk.DeviceWaitIdle(b.dev)
  236. if buf := b.staging.buf; buf != nil {
  237. vk.UnmapMemory(b.dev, b.staging.buf.mem)
  238. buf.Release()
  239. }
  240. b.runDefers()
  241. for _, rp := range b.passes {
  242. vk.DestroyRenderPass(b.dev, rp)
  243. }
  244. vk.DestroyFence(b.dev, b.fence)
  245. vk.FreeCommandBuffers(b.dev, b.cmdPool.pool, b.cmdPool.buffers...)
  246. vk.DestroyCommandPool(b.dev, b.cmdPool.pool)
  247. *b = Backend{}
  248. }
  249. func (b *Backend) NewTexture(format driver.TextureFormat, width, height int, minFilter, magFilter driver.TextureFilter, bindings driver.BufferBinding) (driver.Texture, error) {
  250. vkfmt := formatFor(format)
  251. usage := vk.IMAGE_USAGE_TRANSFER_DST_BIT | vk.IMAGE_USAGE_TRANSFER_SRC_BIT
  252. passLayout := vk.IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
  253. if bindings&driver.BufferBindingTexture != 0 {
  254. usage |= vk.IMAGE_USAGE_SAMPLED_BIT
  255. passLayout = vk.IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
  256. }
  257. if bindings&driver.BufferBindingFramebuffer != 0 {
  258. usage |= vk.IMAGE_USAGE_COLOR_ATTACHMENT_BIT
  259. }
  260. if bindings&(driver.BufferBindingShaderStorageRead|driver.BufferBindingShaderStorageWrite) != 0 {
  261. usage |= vk.IMAGE_USAGE_STORAGE_BIT
  262. }
  263. filterFor := func(f driver.TextureFilter) vk.Filter {
  264. switch f {
  265. case driver.FilterLinear, driver.FilterLinearMipmapLinear:
  266. return vk.FILTER_LINEAR
  267. case driver.FilterNearest:
  268. return vk.FILTER_NEAREST
  269. }
  270. panic("unknown filter")
  271. }
  272. mipmapMode := vk.SAMPLER_MIPMAP_MODE_NEAREST
  273. mipmap := minFilter == driver.FilterLinearMipmapLinear
  274. nmipmaps := 1
  275. if mipmap {
  276. mipmapMode = vk.SAMPLER_MIPMAP_MODE_LINEAR
  277. dim := width
  278. if height > dim {
  279. dim = height
  280. }
  281. log2 := 32 - bits.LeadingZeros32(uint32(dim)) - 1
  282. nmipmaps = log2 + 1
  283. }
  284. sampler, err := vk.CreateSampler(b.dev, filterFor(minFilter), filterFor(magFilter), mipmapMode)
  285. if err != nil {
  286. return nil, mapErr(err)
  287. }
  288. img, mem, err := vk.CreateImage(b.physDev, b.dev, vkfmt, width, height, nmipmaps, usage)
  289. if err != nil {
  290. vk.DestroySampler(b.dev, sampler)
  291. return nil, mapErr(err)
  292. }
  293. view, err := vk.CreateImageView(b.dev, img, vkfmt)
  294. if err != nil {
  295. vk.DestroySampler(b.dev, sampler)
  296. vk.DestroyImage(b.dev, img)
  297. vk.FreeMemory(b.dev, mem)
  298. return nil, mapErr(err)
  299. }
  300. t := &Texture{backend: b, img: img, mem: mem, view: view, sampler: sampler, layout: vk.IMAGE_LAYOUT_UNDEFINED, passLayout: passLayout, width: width, height: height, format: vkfmt, mipmaps: nmipmaps}
  301. if bindings&driver.BufferBindingFramebuffer != 0 {
  302. pass, err := vk.CreateRenderPass(b.dev, vkfmt, vk.ATTACHMENT_LOAD_OP_DONT_CARE,
  303. vk.IMAGE_LAYOUT_UNDEFINED, vk.IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, nil)
  304. if err != nil {
  305. return nil, mapErr(err)
  306. }
  307. defer vk.DestroyRenderPass(b.dev, pass)
  308. fbo, err := vk.CreateFramebuffer(b.dev, pass, view, width, height)
  309. if err != nil {
  310. return nil, mapErr(err)
  311. }
  312. t.fbo = fbo
  313. }
  314. return t, nil
  315. }
  316. func (b *Backend) NewBuffer(bindings driver.BufferBinding, size int) (driver.Buffer, error) {
  317. if bindings&driver.BufferBindingUniforms != 0 {
  318. // Implement uniform buffers as inline push constants.
  319. return &Buffer{store: make([]byte, size)}, nil
  320. }
  321. usage := vk.BUFFER_USAGE_TRANSFER_DST_BIT | vk.BUFFER_USAGE_TRANSFER_SRC_BIT
  322. if bindings&driver.BufferBindingIndices != 0 {
  323. usage |= vk.BUFFER_USAGE_INDEX_BUFFER_BIT
  324. }
  325. if bindings&(driver.BufferBindingShaderStorageRead|driver.BufferBindingShaderStorageWrite) != 0 {
  326. usage |= vk.BUFFER_USAGE_STORAGE_BUFFER_BIT
  327. }
  328. if bindings&driver.BufferBindingVertices != 0 {
  329. usage |= vk.BUFFER_USAGE_VERTEX_BUFFER_BIT
  330. }
  331. buf, err := b.newBuffer(size, usage, vk.MEMORY_PROPERTY_DEVICE_LOCAL_BIT)
  332. return buf, mapErr(err)
  333. }
  334. func (b *Backend) newBuffer(size int, usage vk.BufferUsageFlags, props vk.MemoryPropertyFlags) (*Buffer, error) {
  335. buf, mem, err := vk.CreateBuffer(b.physDev, b.dev, size, usage, props)
  336. return &Buffer{backend: b, buf: buf, mem: mem, usage: usage}, err
  337. }
  338. func (b *Backend) NewImmutableBuffer(typ driver.BufferBinding, data []byte) (driver.Buffer, error) {
  339. buf, err := b.NewBuffer(typ, len(data))
  340. if err != nil {
  341. return nil, err
  342. }
  343. buf.Upload(data)
  344. return buf, nil
  345. }
  346. func (b *Backend) NewVertexShader(src shader.Sources) (driver.VertexShader, error) {
  347. sh, err := b.newShader(src, vk.SHADER_STAGE_VERTEX_BIT)
  348. return sh, mapErr(err)
  349. }
  350. func (b *Backend) NewFragmentShader(src shader.Sources) (driver.FragmentShader, error) {
  351. sh, err := b.newShader(src, vk.SHADER_STAGE_FRAGMENT_BIT)
  352. return sh, mapErr(err)
  353. }
  354. func (b *Backend) NewPipeline(desc driver.PipelineDesc) (driver.Pipeline, error) {
  355. vs := desc.VertexShader.(*Shader)
  356. fs := desc.FragmentShader.(*Shader)
  357. var ranges []vk.PushConstantRange
  358. if r := vs.pushRange; r != (vk.PushConstantRange{}) {
  359. ranges = append(ranges, r)
  360. }
  361. if r := fs.pushRange; r != (vk.PushConstantRange{}) {
  362. ranges = append(ranges, r)
  363. }
  364. descPool, err := createPipelineLayout(b.dev, fs.src, ranges)
  365. if err != nil {
  366. return nil, mapErr(err)
  367. }
  368. blend := desc.BlendDesc
  369. factorFor := func(f driver.BlendFactor) vk.BlendFactor {
  370. switch f {
  371. case driver.BlendFactorZero:
  372. return vk.BLEND_FACTOR_ZERO
  373. case driver.BlendFactorOne:
  374. return vk.BLEND_FACTOR_ONE
  375. case driver.BlendFactorOneMinusSrcAlpha:
  376. return vk.BLEND_FACTOR_ONE_MINUS_SRC_ALPHA
  377. case driver.BlendFactorDstColor:
  378. return vk.BLEND_FACTOR_DST_COLOR
  379. default:
  380. panic("unknown blend factor")
  381. }
  382. }
  383. var top vk.PrimitiveTopology
  384. switch desc.Topology {
  385. case driver.TopologyTriangles:
  386. top = vk.PRIMITIVE_TOPOLOGY_TRIANGLE_LIST
  387. case driver.TopologyTriangleStrip:
  388. top = vk.PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP
  389. default:
  390. panic("unknown topology")
  391. }
  392. var binds []vk.VertexInputBindingDescription
  393. var attrs []vk.VertexInputAttributeDescription
  394. inputs := desc.VertexLayout.Inputs
  395. for i, inp := range inputs {
  396. binds = append(binds, vk.VertexInputBindingDescription{
  397. Binding: i,
  398. Stride: desc.VertexLayout.Stride,
  399. })
  400. attrs = append(attrs, vk.VertexInputAttributeDescription{
  401. Binding: i,
  402. Location: vs.src.Inputs[i].Location,
  403. Format: vertFormatFor(vs.src.Inputs[i]),
  404. Offset: inp.Offset,
  405. })
  406. }
  407. fmt := b.outFormat
  408. if f := desc.PixelFormat; f != driver.TextureFormatOutput {
  409. fmt = formatFor(f)
  410. }
  411. pass, err := vk.CreateRenderPass(b.dev, fmt, vk.ATTACHMENT_LOAD_OP_DONT_CARE,
  412. vk.IMAGE_LAYOUT_UNDEFINED, vk.IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, nil)
  413. if err != nil {
  414. return nil, mapErr(err)
  415. }
  416. defer vk.DestroyRenderPass(b.dev, pass)
  417. pipe, err := vk.CreateGraphicsPipeline(b.dev, pass, vs.module, fs.module, blend.Enable, factorFor(blend.SrcFactor), factorFor(blend.DstFactor), top, binds, attrs, descPool.layout)
  418. if err != nil {
  419. descPool.release(b.dev)
  420. return nil, mapErr(err)
  421. }
  422. p := &Pipeline{backend: b, pipe: pipe, desc: descPool, pushRanges: ranges, ninputs: len(inputs)}
  423. b.allPipes = append(b.allPipes, p)
  424. return p, nil
  425. }
  426. func (b *Backend) NewComputeProgram(src shader.Sources) (driver.Program, error) {
  427. sh, err := b.newShader(src, vk.SHADER_STAGE_COMPUTE_BIT)
  428. if err != nil {
  429. return nil, mapErr(err)
  430. }
  431. defer sh.Release()
  432. descPool, err := createPipelineLayout(b.dev, src, nil)
  433. if err != nil {
  434. return nil, mapErr(err)
  435. }
  436. pipe, err := vk.CreateComputePipeline(b.dev, sh.module, descPool.layout)
  437. if err != nil {
  438. descPool.release(b.dev)
  439. return nil, mapErr(err)
  440. }
  441. return &Pipeline{backend: b, pipe: pipe, desc: descPool}, nil
  442. }
  443. func vertFormatFor(f shader.InputLocation) vk.Format {
  444. t := f.Type
  445. s := f.Size
  446. switch {
  447. case t == shader.DataTypeFloat && s == 1:
  448. return vk.FORMAT_R32_SFLOAT
  449. case t == shader.DataTypeFloat && s == 2:
  450. return vk.FORMAT_R32G32_SFLOAT
  451. case t == shader.DataTypeFloat && s == 3:
  452. return vk.FORMAT_R32G32B32_SFLOAT
  453. case t == shader.DataTypeFloat && s == 4:
  454. return vk.FORMAT_R32G32B32A32_SFLOAT
  455. default:
  456. panic("unsupported data type")
  457. }
  458. }
  459. func createPipelineLayout(d vk.Device, src shader.Sources, ranges []vk.PushConstantRange) (*descPool, error) {
  460. var (
  461. descLayouts []vk.DescriptorSetLayout
  462. descLayout vk.DescriptorSetLayout
  463. )
  464. texBinds := make([]int, len(src.Textures))
  465. imgBinds := make([]int, len(src.Images))
  466. bufBinds := make([]int, len(src.StorageBuffers))
  467. var descBinds []vk.DescriptorSetLayoutBinding
  468. for i, t := range src.Textures {
  469. descBinds = append(descBinds, vk.DescriptorSetLayoutBinding{
  470. Binding: t.Binding,
  471. StageFlags: vk.SHADER_STAGE_FRAGMENT_BIT,
  472. DescriptorType: vk.DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
  473. })
  474. texBinds[i] = t.Binding
  475. }
  476. for i, img := range src.Images {
  477. descBinds = append(descBinds, vk.DescriptorSetLayoutBinding{
  478. Binding: img.Binding,
  479. StageFlags: vk.SHADER_STAGE_COMPUTE_BIT,
  480. DescriptorType: vk.DESCRIPTOR_TYPE_STORAGE_IMAGE,
  481. })
  482. imgBinds[i] = img.Binding
  483. }
  484. for i, buf := range src.StorageBuffers {
  485. descBinds = append(descBinds, vk.DescriptorSetLayoutBinding{
  486. Binding: buf.Binding,
  487. StageFlags: vk.SHADER_STAGE_COMPUTE_BIT,
  488. DescriptorType: vk.DESCRIPTOR_TYPE_STORAGE_BUFFER,
  489. })
  490. bufBinds[i] = buf.Binding
  491. }
  492. if len(descBinds) > 0 {
  493. var err error
  494. descLayout, err = vk.CreateDescriptorSetLayout(d, descBinds)
  495. if err != nil {
  496. return nil, err
  497. }
  498. descLayouts = append(descLayouts, descLayout)
  499. }
  500. layout, err := vk.CreatePipelineLayout(d, ranges, descLayouts)
  501. if err != nil {
  502. if descLayout != 0 {
  503. vk.DestroyDescriptorSetLayout(d, descLayout)
  504. }
  505. return nil, err
  506. }
  507. descPool := &descPool{
  508. texBinds: texBinds,
  509. bufBinds: bufBinds,
  510. imgBinds: imgBinds,
  511. layout: layout,
  512. descLayout: descLayout,
  513. }
  514. return descPool, nil
  515. }
  516. func (b *Backend) newShader(src shader.Sources, stage vk.ShaderStageFlags) (*Shader, error) {
  517. mod, err := vk.CreateShaderModule(b.dev, src.SPIRV)
  518. if err != nil {
  519. return nil, err
  520. }
  521. sh := &Shader{dev: b.dev, module: mod, src: src}
  522. if locs := src.Uniforms.Locations; len(locs) > 0 {
  523. pushOffset := 0x7fffffff
  524. for _, l := range locs {
  525. if l.Offset < pushOffset {
  526. pushOffset = l.Offset
  527. }
  528. }
  529. sh.pushRange = vk.BuildPushConstantRange(stage, pushOffset, src.Uniforms.Size)
  530. }
  531. return sh, nil
  532. }
  533. func (b *Backend) CopyTexture(dstTex driver.Texture, dorig image.Point, srcFBO driver.Texture, srect image.Rectangle) {
  534. dst := dstTex.(*Texture)
  535. src := srcFBO.(*Texture)
  536. cmdBuf := b.ensureCmdBuf()
  537. op := vk.BuildImageCopy(srect.Min.X, srect.Min.Y, dorig.X, dorig.Y, srect.Dx(), srect.Dy())
  538. src.imageBarrier(cmdBuf,
  539. vk.IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  540. vk.PIPELINE_STAGE_TRANSFER_BIT,
  541. vk.ACCESS_TRANSFER_READ_BIT,
  542. )
  543. dst.imageBarrier(cmdBuf,
  544. vk.IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  545. vk.PIPELINE_STAGE_TRANSFER_BIT,
  546. vk.ACCESS_TRANSFER_WRITE_BIT,
  547. )
  548. vk.CmdCopyImage(cmdBuf, src.img, src.layout, dst.img, dst.layout, []vk.ImageCopy{op})
  549. }
  550. func (b *Backend) Viewport(x, y, width, height int) {
  551. cmdBuf := b.currentCmdBuf()
  552. vp := vk.BuildViewport(float32(x), float32(y), float32(width), float32(height))
  553. vk.CmdSetViewport(cmdBuf, 0, vp)
  554. }
  555. func (b *Backend) DrawArrays(off, count int) {
  556. cmdBuf := b.currentCmdBuf()
  557. if b.desc.dirty {
  558. b.pipe.desc.bindDescriptorSet(b, cmdBuf, vk.PIPELINE_BIND_POINT_GRAPHICS, b.desc.texBinds, b.desc.bufBinds)
  559. b.desc.dirty = false
  560. }
  561. vk.CmdDraw(cmdBuf, count, 1, off, 0)
  562. }
  563. func (b *Backend) DrawElements(off, count int) {
  564. cmdBuf := b.currentCmdBuf()
  565. if b.desc.dirty {
  566. b.pipe.desc.bindDescriptorSet(b, cmdBuf, vk.PIPELINE_BIND_POINT_GRAPHICS, b.desc.texBinds, b.desc.bufBinds)
  567. b.desc.dirty = false
  568. }
  569. vk.CmdDrawIndexed(cmdBuf, count, 1, off, 0, 0)
  570. }
  571. func (b *Backend) BindImageTexture(unit int, tex driver.Texture) {
  572. t := tex.(*Texture)
  573. b.desc.texBinds[unit] = t
  574. b.desc.dirty = true
  575. t.imageBarrier(b.currentCmdBuf(),
  576. vk.IMAGE_LAYOUT_GENERAL,
  577. vk.PIPELINE_STAGE_COMPUTE_SHADER_BIT,
  578. vk.ACCESS_SHADER_READ_BIT|vk.ACCESS_SHADER_WRITE_BIT,
  579. )
  580. }
  581. func (b *Backend) DispatchCompute(x, y, z int) {
  582. cmdBuf := b.currentCmdBuf()
  583. if b.desc.dirty {
  584. b.pipe.desc.bindDescriptorSet(b, cmdBuf, vk.PIPELINE_BIND_POINT_COMPUTE, b.desc.texBinds, b.desc.bufBinds)
  585. b.desc.dirty = false
  586. }
  587. vk.CmdDispatch(cmdBuf, x, y, z)
  588. }
  589. func (t *Texture) Upload(offset, size image.Point, pixels []byte, stride int) {
  590. if stride == 0 {
  591. stride = size.X * 4
  592. }
  593. cmdBuf := t.backend.ensureCmdBuf()
  594. dstStride := size.X * 4
  595. n := size.Y * dstStride
  596. stage, mem, off := t.backend.stagingBuffer(n)
  597. var srcOff, dstOff int
  598. for y := 0; y < size.Y; y++ {
  599. srcRow := pixels[srcOff : srcOff+dstStride]
  600. dstRow := mem[dstOff : dstOff+dstStride]
  601. copy(dstRow, srcRow)
  602. dstOff += dstStride
  603. srcOff += stride
  604. }
  605. op := vk.BuildBufferImageCopy(off, dstStride/4, offset.X, offset.Y, size.X, size.Y)
  606. t.imageBarrier(cmdBuf,
  607. vk.IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  608. vk.PIPELINE_STAGE_TRANSFER_BIT,
  609. vk.ACCESS_TRANSFER_WRITE_BIT,
  610. )
  611. vk.CmdCopyBufferToImage(cmdBuf, stage.buf, t.img, t.layout, op)
  612. // Build mipmaps by repeating linear blits.
  613. w, h := t.width, t.height
  614. for i := 1; i < t.mipmaps; i++ {
  615. nw, nh := w/2, h/2
  616. if nh < 1 {
  617. nh = 1
  618. }
  619. if nw < 1 {
  620. nw = 1
  621. }
  622. // Transition previous (source) level.
  623. b := vk.BuildImageMemoryBarrier(
  624. t.img,
  625. vk.ACCESS_TRANSFER_WRITE_BIT, vk.ACCESS_TRANSFER_READ_BIT,
  626. vk.IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, vk.IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  627. i-1, 1,
  628. )
  629. vk.CmdPipelineBarrier(cmdBuf, vk.PIPELINE_STAGE_TRANSFER_BIT, vk.PIPELINE_STAGE_TRANSFER_BIT, vk.DEPENDENCY_BY_REGION_BIT, nil, nil, []vk.ImageMemoryBarrier{b})
  630. // Blit to this mipmap level.
  631. blit := vk.BuildImageBlit(0, 0, 0, 0, w, h, nw, nh, i-1, i)
  632. vk.CmdBlitImage(cmdBuf, t.img, vk.IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, t.img, vk.IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, []vk.ImageBlit{blit}, vk.FILTER_LINEAR)
  633. w, h = nw, nh
  634. }
  635. if t.mipmaps > 1 {
  636. // Add barrier for last blit.
  637. b := vk.BuildImageMemoryBarrier(
  638. t.img,
  639. vk.ACCESS_TRANSFER_WRITE_BIT, vk.ACCESS_TRANSFER_READ_BIT,
  640. vk.IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, vk.IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  641. t.mipmaps-1, 1,
  642. )
  643. vk.CmdPipelineBarrier(cmdBuf, vk.PIPELINE_STAGE_TRANSFER_BIT, vk.PIPELINE_STAGE_TRANSFER_BIT, vk.DEPENDENCY_BY_REGION_BIT, nil, nil, []vk.ImageMemoryBarrier{b})
  644. t.layout = vk.IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
  645. }
  646. }
  647. func (t *Texture) Release() {
  648. if t.foreign {
  649. panic("external textures cannot be released")
  650. }
  651. freet := *t
  652. t.backend.deferFunc(func(d vk.Device) {
  653. if freet.fbo != 0 {
  654. vk.DestroyFramebuffer(d, freet.fbo)
  655. }
  656. vk.DestroySampler(d, freet.sampler)
  657. vk.DestroyImageView(d, freet.view)
  658. vk.DestroyImage(d, freet.img)
  659. vk.FreeMemory(d, freet.mem)
  660. })
  661. *t = Texture{}
  662. }
  663. func (p *Pipeline) Release() {
  664. freep := *p
  665. p.backend.deferFunc(func(d vk.Device) {
  666. freep.desc.release(d)
  667. vk.DestroyPipeline(d, freep.pipe)
  668. })
  669. *p = Pipeline{}
  670. }
  671. func (p *descPool) release(d vk.Device) {
  672. if p := p.pool; p != 0 {
  673. vk.DestroyDescriptorPool(d, p)
  674. }
  675. if l := p.descLayout; l != 0 {
  676. vk.DestroyDescriptorSetLayout(d, l)
  677. }
  678. vk.DestroyPipelineLayout(d, p.layout)
  679. }
  680. func (p *descPool) bindDescriptorSet(b *Backend, cmdBuf vk.CommandBuffer, bindPoint vk.PipelineBindPoint, texBinds [texUnits]*Texture, bufBinds [storageUnits]*Buffer) {
  681. if p.size == len(p.sets) {
  682. l := p.descLayout
  683. if l == 0 {
  684. panic("vulkan: descriptor set is dirty, but pipeline has empty layout")
  685. }
  686. newCap := len(p.sets) * 2
  687. if pool := p.pool; pool != 0 {
  688. b.deferFunc(func(d vk.Device) {
  689. vk.DestroyDescriptorPool(d, pool)
  690. })
  691. }
  692. const initialPoolSize = 100
  693. if newCap < initialPoolSize {
  694. newCap = initialPoolSize
  695. }
  696. var poolSizes []vk.DescriptorPoolSize
  697. if n := len(p.texBinds); n > 0 {
  698. poolSizes = append(poolSizes, vk.BuildDescriptorPoolSize(vk.DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, newCap*n))
  699. }
  700. if n := len(p.imgBinds); n > 0 {
  701. poolSizes = append(poolSizes, vk.BuildDescriptorPoolSize(vk.DESCRIPTOR_TYPE_STORAGE_IMAGE, newCap*n))
  702. }
  703. if n := len(p.bufBinds); n > 0 {
  704. poolSizes = append(poolSizes, vk.BuildDescriptorPoolSize(vk.DESCRIPTOR_TYPE_STORAGE_BUFFER, newCap*n))
  705. }
  706. pool, err := vk.CreateDescriptorPool(b.dev, newCap, poolSizes)
  707. if err != nil {
  708. panic(fmt.Errorf("vulkan: failed to allocate descriptor pool with %d descriptors: %v", newCap, err))
  709. }
  710. p.pool = pool
  711. sets, err := vk.AllocateDescriptorSets(b.dev, p.pool, l, newCap)
  712. if err != nil {
  713. panic(fmt.Errorf("vulkan: failed to allocate descriptor with %d sets: %v", newCap, err))
  714. }
  715. p.sets = sets
  716. p.size = 0
  717. }
  718. descSet := p.sets[p.size]
  719. p.size++
  720. for _, bind := range p.texBinds {
  721. tex := texBinds[bind]
  722. write := vk.BuildWriteDescriptorSetImage(descSet, bind, vk.DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, tex.sampler, tex.view, vk.IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
  723. vk.UpdateDescriptorSet(b.dev, write)
  724. }
  725. for _, bind := range p.imgBinds {
  726. tex := texBinds[bind]
  727. write := vk.BuildWriteDescriptorSetImage(descSet, bind, vk.DESCRIPTOR_TYPE_STORAGE_IMAGE, 0, tex.view, vk.IMAGE_LAYOUT_GENERAL)
  728. vk.UpdateDescriptorSet(b.dev, write)
  729. }
  730. for _, bind := range p.bufBinds {
  731. buf := bufBinds[bind]
  732. write := vk.BuildWriteDescriptorSetBuffer(descSet, bind, vk.DESCRIPTOR_TYPE_STORAGE_BUFFER, buf.buf)
  733. vk.UpdateDescriptorSet(b.dev, write)
  734. }
  735. vk.CmdBindDescriptorSets(cmdBuf, bindPoint, p.layout, 0, []vk.DescriptorSet{descSet})
  736. }
  737. func (t *Texture) imageBarrier(cmdBuf vk.CommandBuffer, layout vk.ImageLayout, stage vk.PipelineStageFlags, access vk.AccessFlags) {
  738. srcStage := t.scope.stage
  739. if srcStage == 0 && t.layout == layout {
  740. t.scope.stage = stage
  741. t.scope.access = access
  742. return
  743. }
  744. if srcStage == 0 {
  745. srcStage = vk.PIPELINE_STAGE_TOP_OF_PIPE_BIT
  746. }
  747. b := vk.BuildImageMemoryBarrier(
  748. t.img,
  749. t.scope.access, access,
  750. t.layout, layout,
  751. 0, vk.REMAINING_MIP_LEVELS,
  752. )
  753. vk.CmdPipelineBarrier(cmdBuf, srcStage, stage, vk.DEPENDENCY_BY_REGION_BIT, nil, nil, []vk.ImageMemoryBarrier{b})
  754. t.layout = layout
  755. t.scope.stage = stage
  756. t.scope.access = access
  757. }
  758. func (b *Backend) PrepareTexture(tex driver.Texture) {
  759. t := tex.(*Texture)
  760. cmdBuf := b.ensureCmdBuf()
  761. t.imageBarrier(cmdBuf,
  762. vk.IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
  763. vk.PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
  764. vk.ACCESS_SHADER_READ_BIT,
  765. )
  766. }
  767. func (b *Backend) BindTexture(unit int, tex driver.Texture) {
  768. t := tex.(*Texture)
  769. b.desc.texBinds[unit] = t
  770. b.desc.dirty = true
  771. }
  772. func (b *Backend) BindPipeline(pipe driver.Pipeline) {
  773. b.bindPipeline(pipe.(*Pipeline), vk.PIPELINE_BIND_POINT_GRAPHICS)
  774. }
  775. func (b *Backend) BindProgram(prog driver.Program) {
  776. b.bindPipeline(prog.(*Pipeline), vk.PIPELINE_BIND_POINT_COMPUTE)
  777. }
  778. func (b *Backend) bindPipeline(p *Pipeline, point vk.PipelineBindPoint) {
  779. b.pipe = p
  780. b.desc.dirty = p.desc.descLayout != 0
  781. cmdBuf := b.currentCmdBuf()
  782. vk.CmdBindPipeline(cmdBuf, point, p.pipe)
  783. }
  784. func (s *Shader) Release() {
  785. vk.DestroyShaderModule(s.dev, s.module)
  786. *s = Shader{}
  787. }
  788. func (b *Backend) BindStorageBuffer(binding int, buffer driver.Buffer) {
  789. buf := buffer.(*Buffer)
  790. b.desc.bufBinds[binding] = buf
  791. b.desc.dirty = true
  792. buf.barrier(b.currentCmdBuf(),
  793. vk.PIPELINE_STAGE_COMPUTE_SHADER_BIT,
  794. vk.ACCESS_SHADER_READ_BIT|vk.ACCESS_SHADER_WRITE_BIT,
  795. )
  796. }
  797. func (b *Backend) BindUniforms(buffer driver.Buffer) {
  798. buf := buffer.(*Buffer)
  799. cmdBuf := b.currentCmdBuf()
  800. for _, s := range b.pipe.pushRanges {
  801. off := vk.PushConstantRangeOffset(s)
  802. vk.CmdPushConstants(cmdBuf, b.pipe.desc.layout, vk.PushConstantRangeStageFlags(s), off, buf.store[off:off+vk.PushConstantRangeSize(s)])
  803. }
  804. }
  805. func (b *Backend) BindVertexBuffer(buffer driver.Buffer, offset int) {
  806. buf := buffer.(*Buffer)
  807. cmdBuf := b.currentCmdBuf()
  808. b.bindings = b.bindings[:0]
  809. b.offsets = b.offsets[:0]
  810. for i := 0; i < b.pipe.ninputs; i++ {
  811. b.bindings = append(b.bindings, buf.buf)
  812. b.offsets = append(b.offsets, vk.DeviceSize(offset))
  813. }
  814. vk.CmdBindVertexBuffers(cmdBuf, 0, b.bindings, b.offsets)
  815. }
  816. func (b *Backend) BindIndexBuffer(buffer driver.Buffer) {
  817. buf := buffer.(*Buffer)
  818. cmdBuf := b.currentCmdBuf()
  819. vk.CmdBindIndexBuffer(cmdBuf, buf.buf, 0, vk.INDEX_TYPE_UINT16)
  820. }
  821. func (b *Buffer) Download(data []byte) error {
  822. if b.buf == 0 {
  823. copy(data, b.store)
  824. return nil
  825. }
  826. stage, mem, off := b.backend.stagingBuffer(len(data))
  827. cmdBuf := b.backend.ensureCmdBuf()
  828. b.barrier(cmdBuf,
  829. vk.PIPELINE_STAGE_TRANSFER_BIT,
  830. vk.ACCESS_TRANSFER_READ_BIT,
  831. )
  832. vk.CmdCopyBuffer(cmdBuf, b.buf, stage.buf, 0, off, len(data))
  833. stage.scope.stage = vk.PIPELINE_STAGE_TRANSFER_BIT
  834. stage.scope.access = vk.ACCESS_TRANSFER_WRITE_BIT
  835. stage.barrier(cmdBuf,
  836. vk.PIPELINE_STAGE_HOST_BIT,
  837. vk.ACCESS_HOST_READ_BIT,
  838. )
  839. b.backend.submitCmdBuf(b.backend.fence)
  840. vk.WaitForFences(b.backend.dev, b.backend.fence)
  841. vk.ResetFences(b.backend.dev, b.backend.fence)
  842. copy(data, mem)
  843. return nil
  844. }
  845. func (b *Buffer) Upload(data []byte) {
  846. if b.buf == 0 {
  847. copy(b.store, data)
  848. return
  849. }
  850. stage, mem, off := b.backend.stagingBuffer(len(data))
  851. copy(mem, data)
  852. cmdBuf := b.backend.ensureCmdBuf()
  853. b.barrier(cmdBuf,
  854. vk.PIPELINE_STAGE_TRANSFER_BIT,
  855. vk.ACCESS_TRANSFER_WRITE_BIT,
  856. )
  857. vk.CmdCopyBuffer(cmdBuf, stage.buf, b.buf, off, 0, len(data))
  858. var access vk.AccessFlags
  859. if b.usage&vk.BUFFER_USAGE_INDEX_BUFFER_BIT != 0 {
  860. access |= vk.ACCESS_INDEX_READ_BIT
  861. }
  862. if b.usage&vk.BUFFER_USAGE_VERTEX_BUFFER_BIT != 0 {
  863. access |= vk.ACCESS_VERTEX_ATTRIBUTE_READ_BIT
  864. }
  865. if access != 0 {
  866. b.barrier(cmdBuf,
  867. vk.PIPELINE_STAGE_VERTEX_INPUT_BIT,
  868. access,
  869. )
  870. }
  871. }
  872. func (b *Buffer) barrier(cmdBuf vk.CommandBuffer, stage vk.PipelineStageFlags, access vk.AccessFlags) {
  873. srcStage := b.scope.stage
  874. if srcStage == 0 {
  875. b.scope.stage = stage
  876. b.scope.access = access
  877. return
  878. }
  879. barrier := vk.BuildBufferMemoryBarrier(
  880. b.buf,
  881. b.scope.access, access,
  882. )
  883. vk.CmdPipelineBarrier(cmdBuf, srcStage, stage, vk.DEPENDENCY_BY_REGION_BIT, nil, []vk.BufferMemoryBarrier{barrier}, nil)
  884. b.scope.stage = stage
  885. b.scope.access = access
  886. }
  887. func (b *Buffer) Release() {
  888. freeb := *b
  889. if freeb.buf != 0 {
  890. b.backend.deferFunc(func(d vk.Device) {
  891. vk.DestroyBuffer(d, freeb.buf)
  892. vk.FreeMemory(d, freeb.mem)
  893. })
  894. }
  895. *b = Buffer{}
  896. }
  897. func (t *Texture) ReadPixels(src image.Rectangle, pixels []byte, stride int) error {
  898. if len(pixels) == 0 {
  899. return nil
  900. }
  901. sz := src.Size()
  902. stageStride := sz.X * 4
  903. n := sz.Y * stageStride
  904. stage, mem, off := t.backend.stagingBuffer(n)
  905. cmdBuf := t.backend.ensureCmdBuf()
  906. region := vk.BuildBufferImageCopy(off, stageStride/4, src.Min.X, src.Min.Y, sz.X, sz.Y)
  907. t.imageBarrier(cmdBuf,
  908. vk.IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  909. vk.PIPELINE_STAGE_TRANSFER_BIT,
  910. vk.ACCESS_TRANSFER_READ_BIT,
  911. )
  912. vk.CmdCopyImageToBuffer(cmdBuf, t.img, t.layout, stage.buf, []vk.BufferImageCopy{region})
  913. stage.scope.stage = vk.PIPELINE_STAGE_TRANSFER_BIT
  914. stage.scope.access = vk.ACCESS_TRANSFER_WRITE_BIT
  915. stage.barrier(cmdBuf,
  916. vk.PIPELINE_STAGE_HOST_BIT,
  917. vk.ACCESS_HOST_READ_BIT,
  918. )
  919. t.backend.submitCmdBuf(t.backend.fence)
  920. vk.WaitForFences(t.backend.dev, t.backend.fence)
  921. vk.ResetFences(t.backend.dev, t.backend.fence)
  922. var srcOff, dstOff int
  923. for y := 0; y < sz.Y; y++ {
  924. dstRow := pixels[srcOff : srcOff+stageStride]
  925. srcRow := mem[dstOff : dstOff+stageStride]
  926. copy(dstRow, srcRow)
  927. dstOff += stageStride
  928. srcOff += stride
  929. }
  930. return nil
  931. }
  932. func (b *Backend) currentCmdBuf() vk.CommandBuffer {
  933. cur := b.cmdPool.current
  934. if cur == nil {
  935. panic("vulkan: invalid operation outside a render or compute pass")
  936. }
  937. return cur
  938. }
  939. func (b *Backend) ensureCmdBuf() vk.CommandBuffer {
  940. if b.cmdPool.current != nil {
  941. return b.cmdPool.current
  942. }
  943. if b.cmdPool.used < len(b.cmdPool.buffers) {
  944. buf := b.cmdPool.buffers[b.cmdPool.used]
  945. b.cmdPool.current = buf
  946. } else {
  947. buf, err := vk.AllocateCommandBuffer(b.dev, b.cmdPool.pool)
  948. if err != nil {
  949. panic(err)
  950. }
  951. b.cmdPool.buffers = append(b.cmdPool.buffers, buf)
  952. b.cmdPool.current = buf
  953. }
  954. b.cmdPool.used++
  955. buf := b.cmdPool.current
  956. if err := vk.BeginCommandBuffer(buf); err != nil {
  957. panic(err)
  958. }
  959. return buf
  960. }
  961. func (b *Backend) BeginRenderPass(tex driver.Texture, d driver.LoadDesc) {
  962. t := tex.(*Texture)
  963. var vkop vk.AttachmentLoadOp
  964. switch d.Action {
  965. case driver.LoadActionClear:
  966. vkop = vk.ATTACHMENT_LOAD_OP_CLEAR
  967. case driver.LoadActionInvalidate:
  968. vkop = vk.ATTACHMENT_LOAD_OP_DONT_CARE
  969. case driver.LoadActionKeep:
  970. vkop = vk.ATTACHMENT_LOAD_OP_LOAD
  971. }
  972. cmdBuf := b.ensureCmdBuf()
  973. if sem := t.acquire; sem != 0 {
  974. // The render pass targets a framebuffer that has an associated acquire semaphore.
  975. // Wait for it by forming an execution barrier.
  976. b.waitSems = append(b.waitSems, sem)
  977. b.waitStages = append(b.waitStages, vk.PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT)
  978. // But only for the first pass in a frame.
  979. t.acquire = 0
  980. }
  981. t.imageBarrier(cmdBuf,
  982. vk.IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  983. vk.PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
  984. vk.ACCESS_COLOR_ATTACHMENT_READ_BIT|vk.ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
  985. )
  986. pass := b.lookupPass(t.format, vkop, t.layout, t.passLayout)
  987. col := d.ClearColor
  988. vk.CmdBeginRenderPass(cmdBuf, pass, t.fbo, t.width, t.height, [4]float32{col.R, col.G, col.B, col.A})
  989. t.layout = t.passLayout
  990. // If the render pass describes an automatic image layout transition to its final layout, there
  991. // is an implicit image barrier with destination PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT. Make
  992. // sure any subsequent barrier includes the transition.
  993. // See also https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#VkSubpassDependency.
  994. t.scope.stage |= vk.PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
  995. }
  996. func (b *Backend) EndRenderPass() {
  997. vk.CmdEndRenderPass(b.cmdPool.current)
  998. }
  999. func (b *Backend) BeginCompute() {
  1000. b.ensureCmdBuf()
  1001. }
  1002. func (b *Backend) EndCompute() {
  1003. }
  1004. func (b *Backend) lookupPass(fmt vk.Format, loadAct vk.AttachmentLoadOp, initLayout, finalLayout vk.ImageLayout) vk.RenderPass {
  1005. key := passKey{fmt: fmt, loadAct: loadAct, initLayout: initLayout, finalLayout: finalLayout}
  1006. if pass, ok := b.passes[key]; ok {
  1007. return pass
  1008. }
  1009. pass, err := vk.CreateRenderPass(b.dev, fmt, loadAct, initLayout, finalLayout, nil)
  1010. if err != nil {
  1011. panic(err)
  1012. }
  1013. b.passes[key] = pass
  1014. return pass
  1015. }
  1016. func (b *Backend) submitCmdBuf(fence vk.Fence) {
  1017. buf := b.cmdPool.current
  1018. if buf == nil && fence == 0 {
  1019. return
  1020. }
  1021. buf = b.ensureCmdBuf()
  1022. b.cmdPool.current = nil
  1023. if err := vk.EndCommandBuffer(buf); err != nil {
  1024. panic(err)
  1025. }
  1026. if err := vk.QueueSubmit(b.queue, buf, b.waitSems, b.waitStages, b.sigSems, fence); err != nil {
  1027. panic(err)
  1028. }
  1029. b.waitSems = b.waitSems[:0]
  1030. b.sigSems = b.sigSems[:0]
  1031. b.waitStages = b.waitStages[:0]
  1032. }
  1033. func (b *Backend) stagingBuffer(size int) (*Buffer, []byte, int) {
  1034. if b.staging.size+size > b.staging.cap {
  1035. if b.staging.buf != nil {
  1036. vk.UnmapMemory(b.dev, b.staging.buf.mem)
  1037. b.staging.buf.Release()
  1038. b.staging.cap = 0
  1039. }
  1040. cap := 2 * (b.staging.size + size)
  1041. buf, err := b.newBuffer(cap, vk.BUFFER_USAGE_TRANSFER_SRC_BIT|vk.BUFFER_USAGE_TRANSFER_DST_BIT,
  1042. vk.MEMORY_PROPERTY_HOST_VISIBLE_BIT|vk.MEMORY_PROPERTY_HOST_COHERENT_BIT)
  1043. if err != nil {
  1044. panic(err)
  1045. }
  1046. mem, err := vk.MapMemory(b.dev, buf.mem, 0, cap)
  1047. if err != nil {
  1048. buf.Release()
  1049. panic(err)
  1050. }
  1051. b.staging.buf = buf
  1052. b.staging.mem = mem
  1053. b.staging.size = 0
  1054. b.staging.cap = cap
  1055. }
  1056. off := b.staging.size
  1057. b.staging.size += size
  1058. mem := b.staging.mem[off : off+size]
  1059. return b.staging.buf, mem, off
  1060. }
  1061. func formatFor(format driver.TextureFormat) vk.Format {
  1062. switch format {
  1063. case driver.TextureFormatRGBA8:
  1064. return vk.FORMAT_R8G8B8A8_UNORM
  1065. case driver.TextureFormatSRGBA:
  1066. return vk.FORMAT_R8G8B8A8_SRGB
  1067. case driver.TextureFormatFloat:
  1068. return vk.FORMAT_R16_SFLOAT
  1069. default:
  1070. panic("unsupported texture format")
  1071. }
  1072. }
  1073. func mapErr(err error) error {
  1074. var vkErr vk.Error
  1075. if errors.As(err, &vkErr) && vkErr == vk.ERROR_DEVICE_LOST {
  1076. return driver.ErrDeviceLost
  1077. }
  1078. return err
  1079. }
  1080. func (f *Texture) ImplementsRenderTarget() {}