Alright! Here's a 90-line example:
#include <acknex.h>
#include <default.c>
BMAP *targetBmap = "#256x256x32";
ENTITY *blob, *earth, *writeTarget;
function skinChangeEvent() {
if (me == render_view.genius)
return 0;
else
return 1;
}
MATERIAL *skinChangeMat = {
effect = "
texture entSkin1;
sampler SkinSampler = sampler_state { Texture = <entSkin1>; };
float4x4 matWorld;
float4x4 matWorldViewProj;
float4 vecSunDir;
void VS(in float3 inNormal: NORMAL,
in float2 inTex: TEXCOORD0,
out float4 outPos: POSITION,
out float2 outTex: TEXCOORD0,
out float3 outNormal: TEXCOORD1) {
outPos = float4(inTex.x * 2.0 - 1.0, -inTex.y * 2.0 + 1.0, 1.0, 1.0);
outTex = inTex;
outNormal = mul(inNormal, (float3x3)matWorld);
}
float4 PS(in float2 inTex: TEXCOORD0,
in float3 inNormal: TEXCOORD1): COLOR0 {
float4 colour = tex2D(SkinSampler, inTex);
colour.rgb *= saturate(dot(-vecSunDir, normalize(inNormal)));
return colour;
}
technique t {
pass p {
CullMode = None;
VertexShader = compile vs_2_0 VS();
PixelShader = compile ps_2_0 PS();
}
}
";
flags = ENABLE_RENDER;
event = skinChangeEvent;
}
function updateView() {
beep();
VIEW *v = view_create(1);
v.bmap = targetBmap;
v.size_x = v.size_y = 256;
vec_set(v.x, camera.x);
vec_set(v.pan, camera.pan);
v.flags |= SHOW;
v.genius = writeTarget;
v.material = skinChangeMat;
wait(1);
ptr_remove(v);
}
function switchTarget() {
beep();
if (writeTarget == blob)
writeTarget = earth;
else
writeTarget = blob;
}
function main() {
level_load(NULL);
writeTarget = blob = ent_create("blob.MDL", vector(0, -20, 0), NULL);
earth = ent_create("earth.MDL", vector(0, 20, 0), NULL);
camera.x -= 100;
on_space = updateView;
on_enter = switchTarget;
while (1) {
DEBUG_BMAP(targetBmap, 0, 1);
wait(1);
}
}
Copy earth.mdl and blob.mdl from your samples folder to whatever folder you test this code in, and then it should be good to go. Hit [SPACE] to render the target's skin to a bmap that'll be displayed on-screen at the top-left. Hit [ENTER] to toggle between targets (earth or the blob). To demonstrate the use of an actual shader, simple diffuse lighting is baked into the skin.
These skins aren't applied to the models themselves, but could be. In fact, a relatively well-known yet hardly-used effect is to render shadows to a texture and post-process them from there (such as blurring the red channel for a sub-surface scattering effect, which was first utilised in the Matrix sequels (and I mean the movies, not games!) for realistically rendering skin, if I recall correctly). This is probably hardly ever used because each instance of the entity would require its own skin, making it memory-intensive.
Let me know if anything doesn't appear to make sense straight away. I'm getting back to work, but I'll check in every now and then (like I usually do).
It's worth noting that if the objects aren't in view of the temporarily created view, they'll get culled automatically (out of my hands, AFAIK), so in application you'll want to place the view such that the entity in question will certainly not get culled.
EDIT: The weird artifacts on blob's baked skin are because blob's u/v coordinates overlap themselves.
EDIT2: Changed the shader a little so the y axis isn't mirrored.