After playing around with Unity a long time ago, I thought I'd give it another go now that the new UI stuff is out.
This was initially going to be a UI test demo to see how all the data binding worked, but instead I ended up getting distracted by messing around with 2D worlds and day dreaming about writing an RPG.
Use the arrow keys to play; hint: you'll have to use UP to jump a lot.
Turns out the CG shaders in unity are pretty sophisticated and well thought out.
The dynamic plasma like shader in this example is via:
Shader "Components/flare/Flare" {
Properties {
_Vector1 ("Direction", Vector) = (0, 0, 0, 0)
_Color1 ("Start Color", Color) = (1, 0, 1, 1)
_Color2 ("End Color", Color) = (0, 0, 1, 1)
_Size ("Size", Float) = 1.0
_Base ("Base Opacity", Float) = 0.2
}
SubShader {
Blend SrcAlpha OneMinusSrcAlpha
Pass {
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma target 3.0
#include "UnityCG.cginc"
#define PI 3.141516
uniform float4 _Vector1;
uniform float4 _Color1;
uniform float4 _Color2;
uniform float _Size;
uniform float _Base;
struct vertexInput {
float4 vertex : POSITION;
float4 texcoord0 : TEXCOORD0;
};
struct fragmentInput{
float4 position : SV_POSITION;
float4 texcoord0 : TEXCOORD0;
};
fragmentInput vert(vertexInput i){
fragmentInput o;
o.position = mul (UNITY_MATRIX_MVP, i.vertex);
o.texcoord0 = i.texcoord0;
return o;
}
float delta(float2 p1, float2 p2) {
return sqrt((p1.x - p2.x) * (p1.x - p2.x) + (p1.y - p2.y) * (p1.y - p2.y));
}
float2 projected(float2 point) {
float m = (_Vector1.w - _Vector1.y) / (_Vector1.z - _Vector1.x);
float b = _Vector1.y - (m * _Vector1.x);
float x = (m * point.y + point.x - m * b) / (m * m + 1);
float y = (m * m * point.y + m * point.x + b) / (m * m + 1);
return float2(x, y);
}
float magn(float2 point) {
float max = delta(_Vector1.xy, _Vector1.zw);
float value = delta(_Vector1.xy, point) - delta(_Vector1.zw, point);
float factor = 1.0 - value / max;
return factor * (
factor * sin(point.x * PI * _Size + _Time[1]) * sin(point.y * PI * _Size + _Time[1]) +
cos((1.0 - point.x) * PI * _Size + _Time[1]) * cos((1.0 - point.y) * PI * _Size + _Time[1]) +
(1.0 - _Base) * factor * sin(point.x * PI * _Size - _Time[1]) * cos(point.y * PI * _Size + _Time[1])
);
}
float map(float c1, float c2, float factor) {
return c1 + factor * (c2 - c1);
}
float4 frag(fragmentInput i) : COLOR {
float2 point = projected(i.texcoord0.xy);
float n = magn(point);
return float4(
map(_Color1.r, _Color2.r, n * sin(i.texcoord0.x * PI * _Size + _Time[1]) * sin(i.texcoord0.y * PI * _Size + _Time[1])),
map(_Color1.g, _Color2.g, n * cos((1.0 - i.texcoord0.x) * PI * _Size + _Time[1]) * cos((1.0 - i.texcoord0.y) * PI * _Size + _Time[1])),
map(_Color1.b, _Color2.b, n * sin(i.texcoord0.x * PI * _Size - _Time[1]) * cos(i.texcoord0.y * PI * _Size + _Time[1])),
2 * magn(i.texcoord0.xy)
);
}
ENDCG
}
}
}
Notice how you can have dynamic properties bound to the shader from the editor interface. Very cool.
Now if only Unity will release their web-native HTML5 exporter and we can stop relying on this terrible web plugin for it.