Last active
November 27, 2017 20:14
-
-
Save CensoredUsername/f0f2288c92bf6488fb964f9ce52bd520 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const float EPSILON = 0.01; | |
const float MAX_DISTANCE = 100.; | |
const float SCREEN_DISTANCE = 2.; | |
const int MAX_ITER = 100; | |
// why o why does webGL not support inverse(mat3); | |
mat3 inv(mat3 m) { | |
float a00 = m[0][0], a01 = m[0][1], a02 = m[0][2]; | |
float a10 = m[1][0], a11 = m[1][1], a12 = m[1][2]; | |
float a20 = m[2][0], a21 = m[2][1], a22 = m[2][2]; | |
float b01 = a22 * a11 - a12 * a21; | |
float b11 = -a22 * a10 + a12 * a20; | |
float b21 = a21 * a10 - a11 * a20; | |
float det = a00 * b01 + a01 * b11 + a02 * b21; | |
return mat3(b01, (-a22 * a01 + a02 * a21), (a12 * a01 - a02 * a11), | |
b11, (a22 * a00 - a02 * a20), (-a12 * a00 + a02 * a10), | |
b21, (-a21 * a00 + a01 * a20), (a11 * a00 - a01 * a10)) / det; | |
} | |
float plane(vec3 p, vec4 d) { | |
return dot(p, d.xyz) - d.w; | |
} | |
float distort(vec3 p) { | |
return cos(p.x) * cos(p.y + p.x * 0.5) * cos(p.y - p.x * 0.5) * 3.; | |
} | |
float map(vec3 p, out float reduce) { | |
if (p.z < 4.5) { | |
reduce = .3; | |
p.z -= distort(p); | |
return plane(p, vec4(0.0, 0.0, 1.0, 0.0)); | |
} else { | |
reduce = 1.0; | |
return plane(p, vec4(0.0, 0.0, 1.0, 3.0)); | |
} | |
} | |
vec2 mainImage(vec2 screenpos) { | |
float a = cursor.z * 0.2 - 0.505; // iTime / 10.; | |
float b = cursor.w * 0.2 - 0.7; | |
vec3 viewpoint_location = vec3(0., 0., 10.); | |
vec3 viewpoint_direction = normalize(vec3(sin(a) * cos(b), cos(a) * cos(b), sin(b))); | |
// create the upwards and sideways components of the reference frame. | |
vec3 upwards = vec3(0., 0., 1.); | |
vec3 viewpoint_sideways = normalize(cross(viewpoint_direction, upwards)); | |
vec3 viewpoint_upwards = cross(viewpoint_sideways, viewpoint_direction); | |
vec3 ray_relative_start = SCREEN_DISTANCE * viewpoint_direction + | |
screenpos.x * viewpoint_sideways + | |
screenpos.y * viewpoint_upwards; | |
// calculate initial values | |
vec3 o = viewpoint_location; | |
vec3 d = normalize(ray_relative_start); | |
vec3 p = vec3(0., 0., 0.); | |
// raymarching loop | |
for (int i = 0; i < MAX_ITER; i++) { | |
float reduce; | |
float r = map(o + p, reduce); | |
float e = (EPSILON * length(p) / SCREEN_DISTANCE); | |
if (r < e) { | |
// normals | |
e = 0.01; | |
float dr_dx = map(o + p + viewpoint_sideways * e, reduce) - map(o + p - viewpoint_sideways * e, reduce); | |
float dr_dy = map(o + p + viewpoint_upwards * e, reduce) - map(o + p - viewpoint_upwards * e, reduce); | |
float dr_dz = map(o + p + viewpoint_direction * e, reduce) - map(o + p - viewpoint_direction * e, reduce); | |
vec3 normal = normalize(vec3(dr_dx, dr_dy, dr_dz)); | |
vec3 normal_world = normal.x * viewpoint_sideways + normal.y * viewpoint_upwards + normal.z * viewpoint_direction; | |
vec3 velocity_field = cross(vec3(0.0, 0.0, 1.0), normal_world); | |
// screen normal reference frame is -> = viewpoint_sideways, ^ = viewpoint_upwards, x = d | |
// this is not an orthagonal reference frame mind you | |
mat3 screen_ref_frame = mat3(viewpoint_sideways, viewpoint_upwards, d); | |
vec3 screen_proj_normals = inv(screen_ref_frame) * velocity_field; | |
return screen_proj_normals.xy * 0.5; | |
} | |
p += d * r * reduce; | |
if (length(p) > MAX_DISTANCE) { | |
break; | |
} | |
} | |
// didn't converge within stated limits | |
return vec2(0.0); | |
} | |
vec2 get_velocity(vec2 p) { | |
return mainImage(p); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment