Atmospheric Scattering Shader

Atmosheric-Scattering

Just found this rather nice Atmospheric Scattering rendering C++ code over at scratchapixel.com and thought I’d do a quick conversion to a GLSL shader as a test for the Timeline software I’m working on. Works rather nicely…

Screenshot-2014-08-20-11.25

My (none optimised) fragment shader conversion is:

#version 150

#define M_PI 3.1415926535897932384626433832795

uniform float TimeOfDay; // range 0.0 -> 1.0 (0.0 = Midnight, 0.5 = Midday, etc)

const float RADIUS_EARTH = 6360e3;
const float RADIUS_ATMOSPHERE = 6420e3;
const float RAYLEIGH_SCALE_HEIGHT = 7994;
const float MIE_SCALE_HEIGHT = 1200;
const float SUN_INTENSITY = 20;

const float g = 0.76;

const vec3 betaR = vec3( 5.5e-6, 13.0e-6, 22.4e-6 );    // Rayleigh scattering coefficients at sea level
const vec3 betaM = vec3( 21e-6 );                       // Mie scattering coefficients at sea level

vec3 sunDirection = vec3( 0, 1, 0 );

const int numSamples = 16;
const int numSamplesLight = 8;

struct Ray
{
    vec3 o; //origin
    vec3 d; //direction (should always be normalized)
};

struct Sphere
{
    vec3 pos;   //center of sphere position
    float rad;  //radius
};

const Sphere SPHERE_EARTH      = Sphere( vec3( 0 ), RADIUS_EARTH );
const Sphere SPHERE_ATMOSPHERE = Sphere( vec3( 0 ), RADIUS_ATMOSPHERE );

bool intersect( in Ray ray, in Sphere sphere, out float t0, out float t1 )
{
    vec3 oc = ray.o - sphere.pos;
    float b = 2.0 * dot(ray.d, oc);
    float c = dot(oc, oc) - sphere.rad*sphere.rad;
    float disc = b * b - 4.0 * c;

    if (disc < 0.0)
        return false;

   float q;
    if (b < 0.0)         q = (-b - sqrt(disc))/2.0;     else         q = (-b + sqrt(disc))/2.0;       t0 = q;     t1 = c / q;     // make sure t0 is smaller than t1     if (t0 > t1) {
        // if t0 is bigger than t1 swap them around
        float temp = t0;
        t0 = t1;
        t1 = temp;
    }

    // if t1 is less than zero, the object is in the ray's negative direction
    // and consequently the ray misses the sphere
    if (t1 < 0.0)
        return false;

    if( t0 < 0.0 )
    {
        t0 = 0;
    }

    return( true );
}

vec3 computeIncidentLight( in Ray r )
{
    float       t0, t1;

    if( !intersect( r, SPHERE_ATMOSPHERE, t0, t1 ) )
    {
        return vec3( 1 );
    }

    float segmentLength = ( t1 - t0 ) / numSamples;
    float tCurrent = t0;

    vec3 sumR = vec3( 0 );
    vec3 sumM = vec3( 0 );

    float opticalDepthR = 0;
    float opticalDepthM = 0;

    float mu = dot( r.d, sunDirection );
    float phaseR = 3 / ( 16 * M_PI ) * ( 1 + mu * mu );
    float phaseM = 3 / (  8 * M_PI ) * ( ( 1 - g * g ) * ( 1 + mu * mu ) ) / ( ( 2 + g * g ) * pow( 1 + g * g - 2 * g * mu, 1.5 ) );

    for( int i = 0; i < numSamples ; i++ )
    {
        vec3    samplePosition = r.o + r.d * ( tCurrent + 0.5 * segmentLength );
        float   height = length( samplePosition ) - RADIUS_EARTH;

        // compute optical depth for light

        float hr = exp( -height / RAYLEIGH_SCALE_HEIGHT ) * segmentLength;
        float hm = exp( -height / MIE_SCALE_HEIGHT      ) * segmentLength;

        opticalDepthR += hr;
        opticalDepthM += hm;

        // light optical depth

        Ray lightRay = Ray( samplePosition, sunDirection );

        float lmin, lmax;

        intersect( lightRay, SPHERE_ATMOSPHERE, lmin, lmax );

        float segmentLengthLight = lmax / numSamplesLight;
        float tCurrentLight = 0;
        float opticalDepthLightR = 0;
        float opticalDepthLightM = 0;
        
        int j = 0;

        for( ; j < numSamplesLight ; j++ )
        {
            vec3 samplePositionLight = lightRay.o + lightRay.d * ( tCurrentLight + 0.5 * segmentLengthLight );

            float heightLight = length( samplePositionLight ) - RADIUS_EARTH;

            if( heightLight < 0 )
            {
                break;
            }

            opticalDepthLightR += exp( -heightLight / RAYLEIGH_SCALE_HEIGHT ) * segmentLengthLight;
            opticalDepthLightM += exp( -heightLight / MIE_SCALE_HEIGHT      ) * segmentLengthLight;

            tCurrentLight += segmentLengthLight;
        }

        if( j == numSamplesLight )
        {
            vec3 tau = betaR * ( opticalDepthR + opticalDepthLightR ) + betaM * 1.1 * ( opticalDepthM + opticalDepthLightM );
            vec3 attenuation = exp( -tau );

            sumR += hr * attenuation;
            sumM += hm * attenuation;
        }

        tCurrent += segmentLength;
    }

    return( SUN_INTENSITY * ( sumR * phaseR * betaR + sumM * phaseM * betaM ) );
}

void main()
{
    const int width = 512;
    const int height = 512;

    float a = mod( TimeOfDay - 0.5, 1 ) * 2.0 * M_PI;

    sunDirection = normalize( vec3( 0, cos( a ), sin( a ) ) );

    float x = 2 * ( gl_FragCoord.x + 0.5 ) / ( width  - 1 ) - 1;
    float y = 2 * ( gl_FragCoord.y + 0.5 ) / ( height - 1 ) - 1;

    float z2 = x * x + y * y; 

    if( z2 <= 1 )
    {
        float phi   = atan( y, x );
        float theta = acos( 1 - z2 );

        vec3 dir = vec3( sin( theta ) * cos( phi ), cos( theta ), sin( theta ) * sin( phi ) );
        vec3 pos = vec3( 0, RADIUS_EARTH + 1, 0 );

        gl_FragColor = vec4( computeIncidentLight( Ray( pos, normalize( dir ) ) ), 1 );
    }
    else
    {
        gl_FragColor = vec4( 0 );
    }
}

Timeline Development – 3rd August 2014

Screenshot-2014-08-03-12.13.08It’s been a while since my last update, though not from lack of action, rather I’ve been struggling with my latest project for a the past few months and I felt it’s time to pull back the curtain a bit and show what I’ve been working on.

My original design for the Timeline software was a nice open-ended sequencer that could manipulate all manner of types of data from single values (for MIDI or OSC control of parameters) to colours, audio, and even video, combined with a flexible (possibly too flexible) control over how each track played back with repeating sections and random markers, and all manner of tricks that I was getting really excited about using.

I’d spent almost a year working on it and had a pretty nice media playback engine, and everything seemed to be heading towards a 1.0 release back in June 2014 but then I hit a wall, which I have to say is pretty rare for me in my software development experience as I’ve always had a clear idea about what the role and function of each system I’m developing has been.

The problem was the growing complexity of visually managing the relationship between the different tracks of data and how these related to other applications and devices through the various input and output interfaces.  I was also toying with the idea of being able to apply real-time effects to video and audio (also data) and these did not comfortably fit into the design I had come up with.

I’ve also slowly been working on another application called PatchBox that uses a node based interface to visually build connections between blocks of functionality, so I took a deep breath and ripped the code apart and put in a new interface:

Screenshot-2014-06-25-21.49.30The node interface went some way towards solving the problem of presenting the relationship between tracks and devices, but there was a major problem, in that the core code for the node system (it’s actually the code that drives several of my art installations such as Shadows of Light) was rather incompatible with the core code of the Timeline application, and a hard decision had to be made:

  1. Release Timeline and PatchBox separately and fix the interface issue over time.
  2. Combine the two applications, which would require taking a massive step back equivalent to months of development time.

Not an easy one to make, compounded by the fact that as a freelance artist, until I get a product on sale, I’m basically paying for all the development time out of my own pocket so the latter option was not to be taken lightly.

After a couple of weeks of chin stroking, frantic diagrams scratched in notebooks, thinking about what configuration would be most commercially viable, and false starts, I came to a final thought:

“Make the tool that you need for your art”

It’s not that I don’t want it to be a useful tool that other people will want to use and buy at some point (that would be lovely) but I’m not a software design company, and this is primarily an “art platform” for my own work so I have to listen to what feels right to me.

So, I chose the latter (of course) and I’ve been working on it at least a few hours a day, pretty much every day for the past few months.  The screenshot at the top of this post is the latest showing a colour timeline track feeding into an OpenGL shader.

There is still much to be done and it’s pretty gruelling at times as I’m having to go over old ground repeatedly, but I feel like it’s heading in the right direction, and I’m already creating new artworks using it that wouldn’t have previously been possible.

Realistically a 1.0 release isn’t now going to happen until 2015, though with a long solo project like this it is easy to find yourself on the long slide into a quiet madness of complexity and introspection so I’m planning more regular updates to at least keep my progress in check by “real people”.  To this end, if you have any comments, questions, or general messages of encouragement, I’d be happy to hear them.

 

 

Timeline Update

Timeline.2014-02-17I managed to spend a few hours working on my OSC and MIDI sequencer project today and completely rewrote the audio side of it.

It now uses FFMPEG to load audio and PortAudio for playback.

I’m slowly developing the interface into something usable, though the design is pretty simplistic right now.

At some point I’ll get around to doing a release for Windows, OSX, and Linux – probably to coincide with the developing MIDI and OSC support in Painting With Light.

Window Capture Video Streaming

fugScreenCapture

fugScreenCapture is a new utility that allows you to capture all or part the window of an application running on your computer, and send it as a video stream to other applications that support the bigfug video streaming system.

NOTE: fugScreenCapture is currently in development, which means:

  • It’s currently only available for Windows – OSX and Linux versions are planned
  • It uses only the most basic window capture method, so:
    • The speed of capture might not be that fast on your system
    • It might not be able to capture all windows

You can download a demo version to try the capture performance out on your system and see if it meets your requirements.

Download Demo

fugFeedbackGL 1.1 update

fugFeedbackGL-Green-Screen-03

Laurent Smadja and Scott Baker informed me of a couple of problems with fugFeedbackGL, namely that alpha wasn’t working quite as expected, and that on some Apple computers, the resulting image was stretched incorrectly.

There is now an 1.1 update for fugFeedbackGL for both Windows and Apple OSX that fixes these issues.

Existing customers can download the update by visiting the My Account page.

Reorganising Websites

It’s been long overdue but I’m slowly getting around to reorganising my various websites to be more focussed on specific topics, so:

www.alexmayarts.co.uk – is now the web site for my art, talks, and workshops.

www.bigfug.com – will now be for my FreeFrame, video streaming, and other software tools

paintingwithlight.bigfug.com – is now pwl.bigfug.com and is the place for my video mapping software Painting With Light

mantis.bigfug.com – is the place to report bugs for any of my software projects

I’m moving these about at the moment so expect pages to come and go.