How to correctly export UV coordinates from Blender

| | August 8, 2015

Alright, so I’m just now getting around to texturing some assets. After much trial and error I feel I’m pretty good at UV unwrapping now and my work looks good in Blender.

However, either I’m using the UV data incorrectly (I really doubt it) or Blender doesn’t seem to export the correct UV coordinates into the obj file because the texture is mapped differently in my game engine. And in Blender I’ve played with the texture panel and it’s mapping options and have noticed it doesn’t appear to affect the exported obj file’s uv coordinates.

So I guess my question is, is there something I need to do prior to exporting in order to bake the correct UV coordinates into the obj file? Or something else that needs to be done to massage the texture coordinates for sampling. Or any thoughts at all of what could be going wrong?

(Also here is a screen shot of my diffused texture in blender and the game engine. As you can see in the image, I have the same problem with a simple test cube not getting correct uv’s either)

Blender screenshot

Game screenshot

Edit –

Added my geometry pass shader source code to show how I’m rendering and sampling the diffuse texture. I’m simply using the UV coordinates provided by the obj file and an anisotropic sampler.

Texture2D    diffuseTexture : register(t0);
SamplerState textureSampler : register(s0);

cbuffer ObjectTransformBuffer : register(b0) {
    float4x4 worldTransformMatrix,  // Translates to world space.
             cameraTransformMatrix; // Translates to camera space. (not including rotation)

cbuffer ScreenTransformBuffer : register(b1) {
    float4x4 viewProjectionMatrix; // Rotates to camera space and then projects to screen space.

cbuffer MaterialBuffer : register(b2) {
    float3 materialDiffuseAlbedo;
    float  materialSpecularExponent;
    float3 materialSpecularAlbedo;
    bool isTextured,

struct vsInput {
    float3 positionLS : POSITION;
    float3 textureLS  : TEXTURE; // Input signature uses a uvw coordinate but w is 0 and not needed for any geo pass textures.
    float3 normalLS   : NORMAL;

struct vsOutput {
    float4 positionCS : SV_POSITION;
    float2 textureLS  : TEXTURE;
    float3 normalWS   : NORMAL;
    float3 positionWS : POSITION;

struct psOutput {
    float4 positionWS : SV_Target0;     // Surface positions.
    float4 normalWS   : SV_Target1;     // Surface normals.
    float4 diffuseAlbedo : SV_Target2;  // Surface diffuse albedo.
    float4 specularAlbedo : SV_Target3; // Surface specular albedo.

vsOutput VS( in const vsInput _in ) {
    vsOutput _out;

    _out.positionCS = mul(float4(_in.positionLS, 1.0f), mul(cameraTransformMatrix, viewProjectionMatrix));
    _out.positionWS = mul(float4(_in.positionLS, 1.0f), worldTransformMatrix).xyz;
    _out.normalWS   = mul(float4(_in.normalLS, 0.0f), worldTransformMatrix).xyz;
    _out.textureLS  = _in.textureLS.xy; // w coordinate is 0 and not needed.

    return _out;

psOutput PS( in vsOutput _in ) {
    psOutput _out;

    // Use the alpha channel to indicate specular light intensity.
    _out.normalWS = float4(normalize(_in.normalWS), materialSpecularExponent);

    float _lightEffectModifier;
    if (isLighted)
        _lightEffectModifier = 1.0f;
    else _lightEffectModifier = 0.0f;
    // Use the alpha channel to indicate whether the surface is affected by light for the light pass.
    _out.positionWS = float4(_in.positionWS, _lightEffectModifier);

    _out.diffuseAlbedo = float4(materialDiffuseAlbedo, 1.0f);
    if (isTextured)
        _out.diffuseAlbedo *= diffuseTexture.Sample(textureSampler, _in.textureLS.xy);

    _out.specularAlbedo = float4(materialSpecularAlbedo, 1.0f);

    return _out;

One Response to “How to correctly export UV coordinates from Blender”

  1. Alright, found my issue. Not sure if this applies to OpenGL but for my DirectX application I needed to invert the v coordinate for sampling the texture.

    u = u
    v = 1 - v;

Leave a Reply