| <!DOCTYPE html> |
| <html lang="en"> |
| |
| <head> |
| <meta charset="UTF-8" /> |
| <link rel="icon" type="image/svg+xml" href="/vite.svg" /> |
| <meta name="viewport" content="width=device-width, initial-scale=1.0" /> |
| <title>Vite + TS</title> |
| </head> |
| |
| <body> |
| <div id="app"> |
| </div> |
| |
| <script type="module" src="/src/main.ts"></script> |
| </body> |
| |
| </html> |
| { |
| "name": "webgpu_learn_typescript", |
| "private": true, |
| "version": "0.0.0", |
| "type": "module", |
| "scripts": { |
| "dev": "vite", |
| "build": "tsc && vite build", |
| "preview": "vite preview" |
| }, |
| "devDependencies": { |
| "typescript": "^5.0.2", |
| "vite": "^4.3.2" |
| }, |
| "dependencies": { |
| "@types/node": "^20.1.7", |
| "@webgpu/types": "^0.1.32", |
| "ts-shader-loader": "^2.0.2" |
| } |
| } |
| |
| { |
| "compilerOptions": { |
| "target": "ES2020", |
| "useDefineForClassFields": true, |
| "module": "ESNext", |
| "lib": ["ES2020", "DOM", "DOM.Iterable"], |
| "skipLibCheck": true, |
| |
| |
| "moduleResolution": "bundler", |
| "allowImportingTsExtensions": true, |
| "resolveJsonModule": true, |
| "isolatedModules": true, |
| "noEmit": true, |
| |
| |
| "strict": true, |
| "noUnusedLocals": true, |
| "noUnusedParameters": true, |
| "noFallthroughCasesInSwitch": true, |
| |
| "types": ["@webgpu/types"], |
| |
| "allowJs": true |
| }, |
| "include": ["src"] |
| } |
| |
/Users/song/Code/webgpu_learn/webgpu-for-beginners/webgpu_learn_typescript/src_04_渐变颜色的矩形/main.ts
| async function main() { |
| const adapter = await navigator.gpu?.requestAdapter(); |
| |
| const device = await adapter?.requestDevice()!; |
| |
| if (!device) { |
| console.log("need a browser that supports WebGPU"); |
| return; |
| } |
| |
| |
| const canvas = document.createElement("canvas"); |
| canvas.style.width = "500px"; |
| canvas.style.height = "300px"; |
| canvas.style.border = "1px solid red"; |
| |
| const context = canvas.getContext("webgpu")!; |
| |
| const presentationFormat = navigator.gpu.getPreferredCanvasFormat(); |
| |
| context.configure({ |
| device, |
| format: presentationFormat, |
| }); |
| |
| |
| const module = device.createShaderModule({ |
| label: "our hardcoded rgb triangle shaders", |
| code: ` |
| struct OurVertexShaderOutput { |
| @builtin(position) position: vec4f, |
| @location(0) color: vec4f, |
| }; |
| |
| @vertex fn vs( |
| @builtin(vertex_index) vertexIndex : u32 |
| ) -> OurVertexShaderOutput { |
| // 位置 |
| var pos = array<vec2f, 6>( |
| // 第一个三角形的坐标 |
| vec2f(0.0, 0.0), // 0 |
| vec2f(1.0, 0.0), // 1 |
| |
| vec2f(1.0, 1.0), // 2 |
| vec2f(1.0, 1.0), // 3 |
| vec2f(0.0, 1.0), // 4 |
| |
| vec2f(0.0, 0.0), // 5 |
| ); |
| // 颜色 |
| var color = array<vec4f, 6>( |
| vec4f(1, 0, 0, 1), |
| vec4f(1, 0, 0, 1), |
| |
| vec4f(0, 1, 0, 1), |
| vec4f(0, 1, 0, 1), |
| vec4f(0, 1, 0, 1), |
| |
| vec4f(1, 0, 0, 1), |
| ); |
| |
| var vsOutput: OurVertexShaderOutput; |
| vsOutput.position = vec4f(pos[vertexIndex], 0.0, 1.0); |
| // 渐变颜色的矩形 |
| vsOutput.color = color[vertexIndex]; |
| return vsOutput; |
| } |
| |
| @fragment fn fs(fsInput: OurVertexShaderOutput) -> @location(0) vec4f { |
| return fsInput.color; |
| } |
| `, |
| }); |
| |
| const pipeline = device.createRenderPipeline({ |
| label: "hardcoded rgb triangle pipeline", |
| layout: "auto", |
| vertex: { |
| module, |
| entryPoint: "vs", |
| }, |
| fragment: { |
| module, |
| entryPoint: "fs", |
| targets: [{ format: presentationFormat }], |
| }, |
| }); |
| |
| const renderPassDescriptor = { |
| label: "our basic canvas renderPass", |
| colorAttachments: [ |
| { |
| |
| clearValue: [1.0, 1.0, 1.0, 1], |
| loadOp: "clear", |
| storeOp: "store", |
| }, |
| ], |
| }; |
| |
| function render() { |
| |
| |
| renderPassDescriptor.colorAttachments[0].view = context |
| .getCurrentTexture() |
| .createView(); |
| |
| const encoder = device.createCommandEncoder({ |
| label: "render triangle encoder", |
| }); |
| const pass = encoder.beginRenderPass( |
| renderPassDescriptor as GPURenderPassDescriptor |
| ); |
| pass.setPipeline(pipeline); |
| pass.draw(6); |
| pass.end(); |
| |
| const commandBuffer = encoder.finish(); |
| device.queue.submit([commandBuffer]); |
| } |
| |
| render(); |
| |
| document.body.appendChild(canvas); |
| } |
| |
| main(); |
| |
/Users/song/Code/webgpu_learn/webgpu-for-beginners/webgpu_learn_typescript/src_02_三角形/main.ts
| async function main() { |
| const adapter = await navigator.gpu?.requestAdapter(); |
| |
| const device = await adapter?.requestDevice()!; |
| |
| if (!device) { |
| fail("need a browser that supports WebGPU"); |
| return; |
| } |
| |
| |
| const canvas = document.createElement("canvas"); |
| canvas.style.width = "500px"; |
| canvas.style.height = "300px"; |
| |
| |
| const context = canvas.getContext("webgpu")!; |
| |
| const presentationFormat = navigator.gpu.getPreferredCanvasFormat(); |
| |
| context.configure({ |
| device, |
| format: presentationFormat, |
| }); |
| |
| |
| |
| const module = device.createShaderModule({ |
| label: "our hardcoded rgb triangle shaders", |
| code: ` |
| struct OurVertexShaderOutput { |
| @builtin(position) position: vec4f, |
| @location(0) color: vec4f, |
| }; |
| |
| @vertex fn vs( |
| @builtin(vertex_index) vertexIndex : u32 |
| ) -> OurVertexShaderOutput { |
| // 位置 |
| var pos = array<vec2f, 3>( |
| vec2f(0.0, 0.0), // top center |
| vec2f(1.0, 0.0), // bottom left |
| vec2f(0.5, 1.0) // bottom right |
| ); |
| // 颜色 |
| var color = array<vec4f, 3>( |
| vec4f(1, 0, 0, 1), // red |
| vec4f(0, 1, 0, 1), // green |
| vec4f(0, 0, 1, 1), // blue |
| ); |
| |
| var vsOutput: OurVertexShaderOutput; |
| vsOutput.position = vec4f(pos[vertexIndex], 0.0, 1.0); |
| vsOutput.color = color[vertexIndex]; |
| return vsOutput; |
| } |
| |
| @fragment fn fs(fsInput: OurVertexShaderOutput) -> @location(0) vec4f { |
| return fsInput.color; |
| } |
| `, |
| }); |
| |
| const pipeline = device.createRenderPipeline({ |
| label: "hardcoded rgb triangle pipeline", |
| layout: "auto", |
| vertex: { |
| module, |
| entryPoint: "vs", |
| }, |
| fragment: { |
| module, |
| entryPoint: "fs", |
| targets: [{ format: presentationFormat }], |
| }, |
| }); |
| |
| const renderPassDescriptor = { |
| label: "our basic canvas renderPass", |
| colorAttachments: [ |
| { |
| |
| clearValue: [0.3, 0.3, 0.3, 1], |
| loadOp: "clear", |
| storeOp: "store", |
| }, |
| ], |
| }; |
| |
| function render() { |
| |
| |
| renderPassDescriptor.colorAttachments[0].view = context |
| .getCurrentTexture() |
| .createView(); |
| |
| const encoder = device.createCommandEncoder({ |
| label: "render triangle encoder", |
| }); |
| |
| const pass = encoder.beginRenderPass( |
| renderPassDescriptor as GPURenderPassDescriptor |
| ); |
| |
| pass.setPipeline(pipeline); |
| pass.draw(3); |
| pass.end(); |
| |
| const commandBuffer = encoder.finish(); |
| device.queue.submit([commandBuffer]); |
| } |
| |
| const observer = new ResizeObserver((entries) => { |
| for (const entry of entries) { |
| const canvas = entry.target; |
| const width = entry.contentBoxSize[0].inlineSize; |
| const height = entry.contentBoxSize[0].blockSize; |
| (canvas as HTMLCanvasElement).width = Math.min( |
| width, |
| device.limits.maxTextureDimension2D |
| ); |
| (canvas as HTMLCanvasElement).height = Math.min( |
| height, |
| device.limits.maxTextureDimension2D |
| ); |
| |
| render(); |
| } |
| }); |
| observer.observe(canvas); |
| |
| document.body.appendChild(canvas); |
| } |
| |
| function fail(msg: string) { |
| |
| alert(msg); |
| } |
| |
| main(); |
| |
/Users/song/Code/webgpu_learn/webgpu-for-beginners/webgpu_learn_typescript/src_06_红色三角形_郭隆帮老师/main.ts
| import { vertexShader, fragmentShader } from "./shader"; |
| |
| async function main() { |
| |
| const adapter = await navigator.gpu?.requestAdapter(); |
| |
| const device = await adapter?.requestDevice()!; |
| if (!device) { |
| fail("need a browser that supports WebGPU"); |
| return; |
| } |
| |
| |
| |
| const canvas = document.createElement("canvas"); |
| canvas.style.width = "500px"; |
| canvas.style.height = "300px"; |
| |
| const context = canvas.getContext("webgpu")!; |
| const presentationFormat = navigator.gpu.getPreferredCanvasFormat(); |
| context.configure({ |
| device, |
| format: presentationFormat, |
| }); |
| |
| const vertexArray = new Float32Array([ |
| |
| 0.0, |
| 0.0, |
| 0.0, |
| |
| 1.0, |
| 0.0, |
| 0.0, |
| |
| 0.0, |
| 1.0, |
| 0.0, |
| ]); |
| |
| |
| const vertexBuffer = device.createBuffer({ |
| size: vertexArray.byteLength, |
| |
| usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST, |
| }); |
| |
| device.queue.writeBuffer(vertexBuffer, 0, vertexArray); |
| |
| const pipeline = device.createRenderPipeline({ |
| layout: "auto", |
| vertex: { |
| |
| buffers: [ |
| |
| { |
| |
| arrayStride: 3 * 4, |
| attributes: [ |
| { |
| |
| shaderLocation: 0, |
| format: "float32x3", |
| offset: 0, |
| }, |
| ], |
| }, |
| ], |
| module: device.createShaderModule({ |
| label: "triangle vertex", |
| code: vertexShader, |
| }), |
| entryPoint: "main", |
| }, |
| fragment: { |
| module: device.createShaderModule({ |
| label: "fragment vertex", |
| code: fragmentShader, |
| }), |
| entryPoint: "main", |
| targets: [ |
| { |
| format: presentationFormat, |
| }, |
| ], |
| }, |
| primitive: { |
| topology: "triangle-list", |
| |
| |
| }, |
| }); |
| |
| |
| const commandEncoder = device.createCommandEncoder(); |
| |
| const renderPass = commandEncoder.beginRenderPass({ |
| label: "our basic canvas renderPass", |
| |
| colorAttachments: [ |
| { |
| |
| |
| view: context.getCurrentTexture().createView(), |
| storeOp: "store", |
| loadOp: "clear", |
| clearValue: { r: 0.5, g: 0.5, b: 0.5, a: 1.0 }, |
| }, |
| ], |
| }); |
| |
| |
| renderPass.setVertexBuffer(0, vertexBuffer); |
| renderPass.setPipeline(pipeline); |
| renderPass.draw(3); |
| renderPass.end(); |
| |
| |
| const commandBuffer = commandEncoder.finish(); |
| device.queue.submit([commandBuffer]); |
| |
| document.body.appendChild(canvas); |
| } |
| |
| function fail(msg: string) { |
| |
| alert(msg); |
| } |
| |
| main(); |
| |
| |
| const vertexShader = ` |
| @vertex |
| fn main(@location(0) pos: vec3<f32>) -> @builtin(position) vec4<f32> { |
| return vec4<f32>(pos,1.0); |
| } |
| `; |
| |
| |
| const fragmentShader = ` |
| @fragment |
| fn main() -> @location(0) vec4<f32> { |
| return vec4<f32>(1.0, 0.0, 0.0, 1.0);//片元设置为红色 |
| } |
| `; |
| |
| export { vertexShader, fragmentShader }; |
| |
/Users/song/Code/webgpu_learn/webgpu-for-beginners/webgpu_learn_typescript/src_01_测试是否支持webgpu/main.ts
| const oApp = document.getElementById("app")!; |
| |
| if (navigator.gpu) { |
| oApp.innerHTML = "web gpu ok"; |
| } else { |
| oApp.innerHTML = "web gpu not ok"; |
| } |
| |
/Users/song/Code/webgpu_learn/webgpu-for-beginners/webgpu_learn_typescript/src_05_两倍数组/main.ts
| async function main() { |
| const adapter = await navigator.gpu?.requestAdapter(); |
| const device = await adapter?.requestDevice(); |
| if (!device) { |
| console.log("need a browser that supports WebGPU"); |
| return; |
| } |
| |
| const module = device.createShaderModule({ |
| label: "doubling compute module", |
| code: ` |
| @group(0) @binding(0) var<storage, read_write> data: array<f32>; |
| |
| @compute @workgroup_size(1) fn computeSomething( |
| @builtin(global_invocation_id) id: vec3<u32> |
| ) { |
| let i = id.x; |
| data[i] = data[i] * 2.0; |
| } |
| `, |
| }); |
| |
| const pipeline = device.createComputePipeline({ |
| label: "doubling compute pipeline", |
| layout: "auto", |
| compute: { |
| module, |
| entryPoint: "computeSomething", |
| }, |
| }); |
| |
| const input = new Float32Array([1, 3, 5]); |
| |
| |
| |
| const workBuffer = device.createBuffer({ |
| label: "work buffer", |
| size: input.byteLength, |
| usage: |
| GPUBufferUsage.STORAGE | |
| GPUBufferUsage.COPY_SRC | |
| GPUBufferUsage.COPY_DST, |
| }); |
| |
| device.queue.writeBuffer(workBuffer, 0, input); |
| |
| |
| const resultBuffer = device.createBuffer({ |
| label: "result buffer", |
| size: input.byteLength, |
| usage: GPUBufferUsage.MAP_READ | GPUBufferUsage.COPY_DST, |
| }); |
| |
| |
| |
| const bindGroup = device.createBindGroup({ |
| label: "bindGroup for work buffer", |
| layout: pipeline.getBindGroupLayout(0), |
| entries: [{ binding: 0, resource: { buffer: workBuffer } }], |
| }); |
| |
| |
| const encoder = device.createCommandEncoder({ |
| label: "doubling encoder", |
| }); |
| const pass = encoder.beginComputePass({ |
| label: "doubling compute pass", |
| }); |
| pass.setPipeline(pipeline); |
| pass.setBindGroup(0, bindGroup); |
| pass.dispatchWorkgroups(input.length); |
| pass.end(); |
| |
| |
| encoder.copyBufferToBuffer(workBuffer, 0, resultBuffer, 0, resultBuffer.size); |
| |
| |
| const commandBuffer = encoder.finish(); |
| device.queue.submit([commandBuffer]); |
| |
| |
| await resultBuffer.mapAsync(GPUMapMode.READ); |
| |
| |
| |
| |
| const result = new Float32Array(resultBuffer.getMappedRange().slice(0)); |
| resultBuffer.unmap(); |
| |
| console.log("input", input); |
| console.log("result", result); |
| } |
| |
| main(); |
| |
/Users/song/Code/webgpu_learn/webgpu-for-beginners/webgpu_learn_typescript/src_03_纯红色三角形/main.ts
| async function main() { |
| const adapter = await navigator.gpu?.requestAdapter(); |
| |
| const device = await adapter?.requestDevice()!; |
| |
| if (!device) { |
| fail("need a browser that supports WebGPU"); |
| return; |
| } |
| |
| |
| const canvas = document.createElement("canvas"); |
| canvas.style.width = "500px"; |
| canvas.style.height = "300px"; |
| canvas.style.border = "1px solid red"; |
| |
| |
| const context = canvas.getContext("webgpu")!; |
| |
| const presentationFormat = navigator.gpu.getPreferredCanvasFormat(); |
| |
| context.configure({ |
| device, |
| format: presentationFormat, |
| }); |
| |
| |
| |
| const module = device.createShaderModule({ |
| label: "our hardcoded rgb triangle shaders", |
| code: ` |
| struct OurVertexShaderOutput { |
| @builtin(position) position: vec4f, |
| @location(0) color: vec4f, |
| }; |
| |
| @vertex fn vs( |
| @builtin(vertex_index) vertexIndex : u32 |
| ) -> OurVertexShaderOutput { |
| // 位置 |
| var pos = array<vec2f, 3>( |
| vec2f(0.0, 0.0), // top center |
| vec2f(1.0, 0.0), // bottom left |
| vec2f(0.0, 1.0) // bottom right |
| ); |
| // 颜色 |
| var color = array<vec4f, 3>( |
| vec4f(1, 0, 0, 1), // red |
| vec4f(0, 1, 0, 1), // green |
| vec4f(0, 0, 1, 1), // blue |
| ); |
| |
| var vsOutput: OurVertexShaderOutput; |
| vsOutput.position = vec4f(pos[vertexIndex], 0.0, 1.0); |
| // 纯红色的三角形 |
| // vsOutput.color = color[vertexIndex]; |
| vsOutput.color = vec4f(1, 0, 0, 0.5); |
| return vsOutput; |
| } |
| |
| @fragment fn fs(fsInput: OurVertexShaderOutput) -> @location(0) vec4f { |
| return fsInput.color; |
| } |
| `, |
| }); |
| |
| const pipeline = device.createRenderPipeline({ |
| label: "hardcoded rgb triangle pipeline", |
| layout: "auto", |
| vertex: { |
| module, |
| entryPoint: "vs", |
| }, |
| fragment: { |
| module, |
| entryPoint: "fs", |
| targets: [{ format: presentationFormat }], |
| }, |
| }); |
| |
| const renderPassDescriptor = { |
| label: "our basic canvas renderPass", |
| colorAttachments: [ |
| { |
| |
| clearValue: [1.0, 1.0, 1.0, 1], |
| loadOp: "clear", |
| storeOp: "store", |
| }, |
| ], |
| }; |
| |
| function render() { |
| |
| |
| renderPassDescriptor.colorAttachments[0].view = context |
| .getCurrentTexture() |
| .createView(); |
| |
| const encoder = device.createCommandEncoder({ |
| label: "render triangle encoder", |
| }); |
| const pass = encoder.beginRenderPass( |
| renderPassDescriptor as GPURenderPassDescriptor |
| ); |
| pass.setPipeline(pipeline); |
| pass.draw(3); |
| pass.end(); |
| |
| const commandBuffer = encoder.finish(); |
| device.queue.submit([commandBuffer]); |
| } |
| |
| const observer = new ResizeObserver((entries) => { |
| for (const entry of entries) { |
| const canvas = entry.target; |
| const width = entry.contentBoxSize[0].inlineSize; |
| const height = entry.contentBoxSize[0].blockSize; |
| (canvas as HTMLCanvasElement).width = Math.min( |
| width, |
| device.limits.maxTextureDimension2D |
| ); |
| (canvas as HTMLCanvasElement).height = Math.min( |
| height, |
| device.limits.maxTextureDimension2D |
| ); |
| |
| render(); |
| } |
| }); |
| observer.observe(canvas); |
| |
| document.body.appendChild(canvas); |
| } |
| |
| function fail(msg: string) { |
| |
| alert(msg); |
| } |
| |
| main(); |
| |
/Users/song/Code/webgpu_learn/webgpu-for-beginners/webgpu_learn_typescript/src/main.ts
| import { vertexShader, fragmentShader } from "./shader"; |
| |
| async function main() { |
| |
| const adapter = await navigator.gpu?.requestAdapter(); |
| |
| const device = await adapter?.requestDevice()!; |
| if (!device) { |
| fail("need a browser that supports WebGPU"); |
| return; |
| } |
| |
| |
| |
| const canvas = document.createElement("canvas"); |
| canvas.style.width = "500px"; |
| canvas.style.height = "300px"; |
| |
| const context = canvas.getContext("webgpu")!; |
| const presentationFormat = navigator.gpu.getPreferredCanvasFormat(); |
| context.configure({ |
| device, |
| format: presentationFormat, |
| }); |
| |
| const vertexArray = new Float32Array([ |
| |
| 0.0, |
| 0.0, |
| 0.0, |
| |
| 1.0, |
| 0.0, |
| 0.0, |
| |
| 0.0, |
| 1.0, |
| 0.0, |
| ]); |
| |
| |
| const vertexBuffer = device.createBuffer({ |
| size: vertexArray.byteLength, |
| |
| usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST, |
| }); |
| |
| device.queue.writeBuffer(vertexBuffer, 0, vertexArray); |
| |
| const pipeline = device.createRenderPipeline({ |
| layout: "auto", |
| vertex: { |
| |
| buffers: [ |
| |
| { |
| |
| arrayStride: 3 * 4, |
| attributes: [ |
| { |
| |
| shaderLocation: 0, |
| format: "float32x3", |
| offset: 0, |
| }, |
| ], |
| }, |
| ], |
| module: device.createShaderModule({ |
| label: "triangle vertex", |
| code: vertexShader, |
| }), |
| entryPoint: "main", |
| }, |
| fragment: { |
| module: device.createShaderModule({ |
| label: "fragment vertex", |
| code: fragmentShader, |
| }), |
| entryPoint: "main", |
| targets: [ |
| { |
| format: presentationFormat, |
| }, |
| ], |
| }, |
| primitive: { |
| topology: "triangle-list", |
| |
| |
| }, |
| }); |
| |
| |
| const commandEncoder = device.createCommandEncoder(); |
| |
| const renderPass = commandEncoder.beginRenderPass({ |
| label: "our basic canvas renderPass", |
| |
| colorAttachments: [ |
| { |
| |
| |
| view: context.getCurrentTexture().createView(), |
| storeOp: "store", |
| loadOp: "clear", |
| clearValue: { r: 0.5, g: 0.5, b: 0.5, a: 1.0 }, |
| }, |
| ], |
| }); |
| |
| |
| renderPass.setVertexBuffer(0, vertexBuffer); |
| renderPass.setPipeline(pipeline); |
| renderPass.draw(3); |
| renderPass.end(); |
| |
| |
| const commandBuffer = commandEncoder.finish(); |
| device.queue.submit([commandBuffer]); |
| |
| document.body.appendChild(canvas); |
| } |
| |
| function fail(msg: string) { |
| |
| alert(msg); |
| } |
| |
| main(); |
| |
| |
| const vertexShader = ` |
| @vertex |
| fn main(@location(0) pos: vec3<f32>) -> @builtin(position) vec4<f32> { |
| return vec4<f32>(pos,1.0); |
| } |
| `; |
| |
| |
| const fragmentShader = ` |
| @fragment |
| fn main() -> @location(0) vec4<f32> { |
| return vec4<f32>(1.0, 0.0, 0.0, 1.0);//片元设置为红色 |
| } |
| `; |
| |
| export { vertexShader, fragmentShader }; |
| |
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】凌霞软件回馈社区,博客园 & 1Panel & Halo 联合会员上线
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】博客园社区专享云产品让利特惠,阿里云新客6.5折上折
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 微软正式发布.NET 10 Preview 1:开启下一代开发框架新篇章
· 没有源码,如何修改代码逻辑?
· PowerShell开发游戏 · 打蜜蜂
· 在鹅厂做java开发是什么体验
· WPF到Web的无缝过渡:英雄联盟客户端的OpenSilver迁移实战