Introduction
Here I modify an example from my previous article VMR9 Allocator Presenter in C# with Direct3D Video Rendering with adding support of scene rendering with pixel shader effects on HLSL.
Background
This article describes how to build effects support with video playback appllication. Before reading you should to check information about GPU processing pipeline, and about HLSL programming. I implement simple effects on pixel shaders, which are also can be used in WPF or XNA applications. As in previous article implementation here I also use SlimDX.
Implementing scene preseinting
In previous article I simplify copy video surface into backbuffer but here we should make support of rendering video.
Custom vertexes
First we need to declare vertexes for drawing video (Vertex Declaration):
[ComVisible(false)]
[StructLayout(LayoutKind.Sequential)]
private struct CustomVertex
{
public float x, y, z, rhw;
public float tu, tv;
public float tu2, tv2;
public static CustomVertex Create(Vector3 _position, float rhw, float tu, float tv, float tu2, float tv2)
{
return Create(_position.X, _position.Y, _position.Z, rhw, tu, tv, tu2, tv2);
}
public static CustomVertex Create(float x, float y, float z, float rhw, float tu, float tv, float tu2, float tv2)
{
CustomVertex _vertex = new CustomVertex();
_vertex.x = x;
_vertex.y = y;
_vertex.z = z;
_vertex.rhw = rhw;
_vertex.tu = tu;
_vertex.tv = tv;
_vertex.tu2 = tu2;
_vertex.tv2 = tv2;
return _vertex;
}
public static VertexElement[] Decl
{
get
{
return new VertexElement[] {
new VertexElement(0, 0, DeclarationType.Float4, DeclarationMethod.Default, DeclarationUsage.PositionTransformed,0),
new VertexElement(0, 16, DeclarationType.Float2, DeclarationMethod.Default, DeclarationUsage.TextureCoordinate,0),
new VertexElement(0, 24, DeclarationType.Float2, DeclarationMethod.Default, DeclarationUsage.TextureCoordinate,1),
VertexElement.VertexDeclarationEnd };
}
}
}
Our vertexes consists of coordimnates position and source and destination texture mapping. Vertex buffer initialization looks:
SurfaceDescription _desc = m_RenderTarget.Description;
{
m_VertexBuffer = new VertexBuffer(m_Device, 8 * Marshal.SizeOf(typeof(CustomVertex)), Usage.WriteOnly | Usage.Dynamic, VertexFormat.None, Pool.Default);
DataStream _stream = m_VertexBuffer.Lock(0, 8 * Marshal.SizeOf(typeof(CustomVertex)), LockFlags.None);
_stream.WriteRange(new[]
{
CustomVertex.Create(-0.5f, -0.5f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f),
CustomVertex.Create((float)_desc.Width-0.5f, -0.5f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f, 0.0f),
CustomVertex.Create(-0.5f, (float)_desc.Height-0.5f, 1.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f),
CustomVertex.Create((float)_desc.Width-0.5f, (float)_desc.Height-0.5f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f)
}
);
m_VertexBuffer.Unlock();
}
{
m_VertexDeclaration = new VertexDeclaration(m_Device, CustomVertex.Decl);
}
Resize handling and swap chains
As we use windowed rendering we need to adjust render target size while resizing rendering window, otherwise we will loose preview quality while user resize main window. To handle that we are use Swap Chains and recreating target Swap Chain once user resize the window. First we add handler for control resizing:
m_Control.Resize += new EventHandler(Control_Resize);
In that handler we perform swap chain recreation:
private void Control_Resize(object sender, EventArgs e)
{
lock (m_csLock)
{
if (m_Device != null)
{
if (m_RenderTarget != null)
{
m_RenderTarget.Dispose();
m_RenderTarget = null;
}
if (m_SwapChain != null)
{
m_SwapChain.Dispose();
m_SwapChain = null;
}
PresentParameters d3dpp = m_d3dpp.Clone();
d3dpp.BackBufferWidth = 0;
d3dpp.BackBufferHeight = 0;
d3dpp.DeviceWindowHandle = m_Control.Handle;
m_SwapChain = new SwapChain(m_Device, d3dpp);
m_RenderTarget = m_SwapChain.GetBackBuffer(0);
}
}
}
Note: we use lock
statement for thread safety
Rendering
How the rendering code will be looks right now:
public void OnSurfaceReady(ref Surface _surface)
{
lock (m_csLock)
{
m_Device.SetRenderTarget(0, m_RenderTarget);
m_Device.Clear(ClearFlags.Target, Color.Blue, 1.0f, 0);
m_Device.BeginScene();
m_Device.SetRenderState(RenderState.DitherEnable, true);
m_Device.SetRenderState(RenderState.ZEnable, true);
m_Device.SetSamplerState(0, SamplerState.MagFilter, TextureFilter.Linear);
m_Device.SetSamplerState(0, SamplerState.MinFilter, TextureFilter.Linear);
m_Device.SetRenderState(RenderState.CullMode, 1);
m_Device.SetRenderState(RenderState.Lighting, false);
Texture _texture = _surface.GetContainer<Texture>();
m_Device.VertexDeclaration = m_VertexDeclaration;
m_Device.SetTexture(0, _texture);
m_Device.SetStreamSource(0, m_VertexBuffer, 0, Marshal.SizeOf(typeof(CustomVertex)));
m_Device.DrawPrimitives(PrimitiveType.TriangleStrip, 0, 2);
m_Device.SetTexture(0, null);
_texture.Dispose();
m_Device.EndScene();
if (m_SwapChain != null) m_SwapChain.Present(Present.None); else m_Device.Present();
}
}
Creating effects support
In scene class we add variables for store effect, technique and texture parameter handlers. This variables we initialize in method there we specify technique string.
public void CreateEffect(string _technique)
{
EffectHandle hTexture = null;
EffectHandle hTechnique = null;
Effect _effect = null;
try
{
_effect = Effect.FromString(
m_Device,
EffectsPlayback.Properties.Resources.effects, ShaderFlags.None
);
if (string.IsNullOrEmpty(_technique))
{
hTechnique = _effect.GetTechnique(0);
}
else
{
hTechnique = _effect.GetTechnique(_technique);
}
int nIndex = 0;
while (true)
{
EffectHandle _handle = _effect.GetParameter(null, nIndex++);
if (_handle != null)
{
ParameterDescription _ParamDesc = _effect.GetParameterDescription(_handle);
if (_ParamDesc.Type == ParameterType.Texture && hTexture == null)
{
hTexture = _handle;
}
continue;
}
break;
}
}
catch
{
}
if (hTexture != null && hTechnique != null && _effect != null)
{
lock (m_csLock)
{
if (m_Effect != null) m_Effect.Dispose();
if (m_hTextureHandle != null) m_hTextureHandle.Dispose();
if (m_hTehniqueHandle != null) m_hTehniqueHandle.Dispose();
m_Effect = _effect;
m_hTextureHandle = hTexture;
m_hTehniqueHandle = hTechnique;
}
}
else
{
Sonic.COMHelper.ASSERT(false);
}
}
Rendering with effect will be little different:
public void OnSurfaceReady(ref Surface _surface)
{
lock (m_csLock)
{
m_Device.SetRenderTarget(0, m_RenderTarget);
m_Device.Clear(ClearFlags.Target, Color.Blue, 1.0f, 0);
m_Device.BeginScene();
m_Device.SetRenderState(RenderState.DitherEnable, true);
m_Device.SetRenderState(RenderState.ZEnable, true);
m_Device.SetSamplerState(0, SamplerState.MagFilter, TextureFilter.Linear);
m_Device.SetSamplerState(0, SamplerState.MinFilter, TextureFilter.Linear);
m_Device.SetRenderState(RenderState.CullMode, 1);
m_Device.SetRenderState(RenderState.Lighting, false);
Texture _texture = _surface.GetContainer<Texture>();
m_Effect.Technique = m_hTehniqueHandle;
m_Device.VertexDeclaration = m_VertexDeclaration;
int nPasses = m_Effect.Begin();
for (int i = 0; i < nPasses; i++)
{
m_Effect.SetTexture(m_hTextureHandle, _texture);
m_Effect.BeginPass(i);
m_Device.SetStreamSource(0, m_VertexBuffer, 0, Marshal.SizeOf(typeof(CustomVertex)));
m_Device.DrawPrimitives(PrimitiveType.TriangleStrip, 0, 2);
m_Effect.EndPass();
}
m_Effect.End();
_texture.Dispose();
m_Device.EndScene();
if (m_SwapChain != null) m_SwapChain.Present(Present.None); else m_Device.Present();
}
}
Now lets write simple Pixel shader. First declare common stuff for effects:
texture _texture;
sampler2D _sampler =
sampler_state
{
Texture = <_texture>;
AddressU = Clamp;
AddressV = Clamp;
MinFilter = Point;
MagFilter = Linear;
MipFilter = Linear;
};
Effect technique:
technique Simple_Technique
{
pass p0
{
VertexShader = null;
PixelShader = compile ps_2_0 Simple_Proc();
}
}
Here we declare technique with one pass. That pass have only pixel shader with Simple_Proc
function and it uses PS 2.0 semantic. Sample effect:
float4 Simple_Proc(float2 _pos: TEXCOORD0) : COLOR0
{
return tex2D( _sampler, _pos);
}
In that effect we do nothing just pass pixel without changes.
Effects overview
Effects in sample appllication can be changed during runtime. Here is the list of implemented effects and how they are looks:
Grayscale
This is simple implementation of this effect by basic formula.
const float4 g_cf4Luminance = { 0.2125f, 0.7154f, 0.0721f, 0.0f };
float4 GrayScale_Proc(float2 _pos: TEXCOORD0) : COLOR0
{
return dot((float4)tex2D( _sampler, _pos), g_cf4Luminance);
}
Sharp
To make this effect we adjust current with near pixels.
float4 Sharp_Proc(float2 _pos: TEXCOORD0) : COLOR0
{
float4 _color = tex2D( _sampler, _pos);
_color -= tex2D( _sampler, _pos+0.001)*3.0f;
_color += tex2D( _sampler, _pos-0.001)*3.0f;
return _color;
}
Sepia
To make sepia effect we get grayscale pixels and adjust each color component
const float4 g_cf4Luminance = { 0.2125f, 0.7154f, 0.0721f, 0.0f };
const float g_cfSepiaDepth = 0.15;
float4 Sepia_Proc(float2 _pos: TEXCOORD0) : COLOR0
{
float4 _color = dot( (float4)tex2D( _sampler, _pos ), g_cf4Luminance );
_color.xyz += float3(g_cfSepiaDepth * 2,g_cfSepiaDepth,g_cfSepiaDepth / 2);
return _color;
}
Invert
Also simple effect. We just inverse color component values
float4 Invert_Proc(float2 _pos: TEXCOORD0) : COLOR0
{
return 1.0f - tex2D( _sampler, _pos);
}
Emboss
Here is how we can get embossed image
float4 Emboss_Proc(float2 _pos: TEXCOORD0) : COLOR0
{
float4 _color;
_color.a = 1.0f;
_color.rgb = 0.5f;
_color -= tex2D( _sampler, _pos.xy-0.001)*2.0f;
_color += tex2D( _sampler, _pos.xy+0.001)*2.0f;
_color.rgb = (_color.r+_color.g+_color.b)/3.0f;
return _color;
}
Blur
One of the blur implementation algorithms.
float4 Blur_Proc(float2 _pos: TEXCOORD0) : COLOR0
{
const int nSamples = 13;
const float2 cSamples[nSamples] = {
0.000000, 0.000000,
-0.326212, -0.405805,
-0.840144, -0.073580,
-0.695914, 0.457137,
-0.203345, 0.620716,
0.962340, -0.194983,
0.473434, -0.480026,
0.519456, 0.767022,
0.185461, -0.893124,
0.507431, 0.064425,
0.896420, 0.412458,
-0.321940, -0.932615,
-0.791559, -0.597705,
};
float4 sum = 0;
for (int i = 0; i < nSamples - 1; i++)
{
sum += tex2D(_sampler, _pos + 0.025 * cSamples[i]);
}
return sum / nSamples;
}
Posterize
Posterization effect implementation.
float4 Posterize_Proc(float2 _pos: TEXCOORD0) : COLOR0
{
const float cColors = 8.0f;
const float cGamma = 0.6f;
float4 _color = tex2D(_sampler, _pos);
float3 tc = _color.xyz;
tc = pow(tc, cGamma);
tc = tc * cColors;
tc = floor(tc);
tc = tc / cColors;
tc = pow(tc,1.0/cGamma);
return float4(tc,_color.w);
}
Brightness
Brightness adjustment effect.
float4 Brightness_Proc(float2 _pos: TEXCOORD0) : COLOR0
{
const float cBrightness = 2.0;
float4 _color = tex2D( _sampler, _pos);
_color.xyz *= cBrightness;
return _color;
}
Red-Gray
Performing grayscale and adjust red color
float4 Red_Proc(float2 _pos: TEXCOORD0) : COLOR0
{
const float cRedCutOff = 0.5;
const float cRedBrightness = 1.2;
float4 _color = tex2D( _sampler, _pos);
float4 _result = dot( _color, g_cf4Luminance );
if (_color.r * 2 - cRedCutOff > _color.g + _color.b)
{
_result.r = _color.r * cRedBrightness;
}
return _result;
}
History
19.11.2012 - initial version.