Skip to content
This repository has been archived by the owner on Aug 28, 2024. It is now read-only.

Axis3 creation errors #97

Open
SebastianM-C opened this issue May 5, 2021 · 2 comments
Open

Axis3 creation errors #97

SebastianM-C opened this issue May 5, 2021 · 2 comments

Comments

@SebastianM-C
Copy link
Member

MWE

julia> fig = Figure()

julia> ax = Axis3(fig[1,1])
An exception was thrown in JS: TypeError: Cannot read property 'wgl_camera' of undefined
Additional message: Error while processing message {"payload":{"__javascript_type__":"JSCode","payload":{"source":"    WGLMakie.insert_plot(...__eval_context__[0])\n","context":[["11020546761748311111",[{"fragment_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n\nout vec4 fragment_color;\n\n// Uniforms: \nuniform vec3 specular;\nvec3 get_specular(){return specular;}\nuniform bool colormap;\nbool get_colormap(){return colormap;}\nuniform vec3 ambient;\nvec3 get_ambient(){return ambient;}\nuniform mat3 normalmatrix;\nmat3 get_normalmatrix(){return normalmatrix;}\nuniform vec3 lightposition;\nvec3 get_lightposition(){return lightposition;}\nuniform float shininess;\nfloat get_shininess(){return shininess;}\nuniform vec2 uv;\nvec2 get_uv(){return uv;}\nuniform bool colorrange;\nbool get_colorrange(){return colorrange;}\nuniform mat4 model;\nmat4 get_model(){return model;}\nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform vec4 uniform_color;\nvec4 get_uniform_color(){return uniform_color;}\nuniform bool shading;\nbool get_shading(){return shading;}\nuniform vec3 diffuse;\nvec3 get_diffuse(){return diffuse;}\n\nin vec2 frag_uv;\nin vec4 frag_color;\n\nin vec3 o_normal;\nin vec3 o_camdir;\nin vec3 o_lightdir;\n\nvec3 blinnphong(vec3 N, vec3 V, vec3 L, vec3 color){\n    float diff_coeff = max(dot(L, N), 0.0);\n\n    // specular coefficient\n    vec3 H = normalize(L + V);\n\n    float spec_coeff = pow(max(dot(H, N), 0.0), get_shininess());\n\n    // final lighting model\n    return vec3(\n        get_ambient() * color +\n        get_diffuse() * diff_coeff * color +\n        get_specular() * spec_coeff\n    );\n}\n\nvec4 get_color(vec3 color, vec2 uv, bool colorrange, bool colormap){\n    return vec4(color, 1.0); // we must prohibit uv from getting into dead variable removal\n}\n\nvec4 get_color(vec4 color, vec2 uv, bool colorrange, bool colormap){\n    return color; // we must prohibit uv from getting into dead variable removal\n}\n\nvec4 get_color(bool color, vec2 uv, bool colorrange, bool colormap){\n    return frag_color;  // color not in uniform\n}\n\nvec4 get_color(sampler2D color, vec2 uv, bool colorrange, bool colormap){\n    return texture(color, uv);\n}\n\nfloat _normalize(float val, float from, float to){return (val-from) / (to - from);}\n\nvec4 get_color(sampler2D color, vec2 uv, vec2 colorrange, sampler2D colormap){\n    float value = texture(color, uv).x;\n    float normed = _normalize(value, colorrange.x, colorrange.y);\n    return texture(colormap, vec2(normed, 0.0));\n}\n\nvec4 get_color(sampler2D color, vec2 uv, bool colorrange, sampler2D colormap){\n    return texture(color, uv);\n}\n\nvoid main() {\n    vec4 real_color = get_color(uniform_color, frag_uv, get_colorrange(), colormap);\n    vec3 shaded_color = real_color.xyz;\n\n    if(get_shading()){\n        vec3 L = normalize(o_lightdir);\n        vec3 N = normalize(o_normal);\n        shaded_color = blinnphong(N, o_camdir, L, real_color.rgb);\n    }\n    fragment_color = vec4(shaded_color, real_color.a);\n}\n","faces":[0,1,2,2,3,0],"uniform_updater":"6112322790391812303","vertexarrays":{"position":{"flat":{"type":"Float32Array","data":[0,0,-0.5,0,100,-0.5,100,100,-0.5,100,0,-0.5]},"type_length":3},"normals":{"flat":{"type":"Float32Array","data":[0,0,-1,0,0,-1,0,0,-1,0,0,-1]},"type_length":3}},"name":"mesh-4338530744799629452","visible":"3961733811311831387","uuid":"4338530744799629452","uniforms":{"specular":{"__javascript_type__":"TypedVector","payload":[0.20000000298023224,0.20000000298023224,0.20000000298023224]},"colormap":true,"ambient":{"__javascript_type__":"TypedVector","payload":[0.550000011920929,0.550000011920929,0.550000011920929]},"normalmatrix":[1,0,0,0,1,0,0,0,1],"lightposition":{"__javascript_type__":"TypedVector","payload":[1,1,1]},"shininess":32,"uv":{"__javascript_type__":"TypedVector","payload":[0,0]},"colorrange":true,"model":[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1],"color":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]},"uniform_color":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]},"shading":false,"diffuse":{"__javascript_type__":"TypedVector","payload":[0.4000000059604645,0.4000000059604645,0.4000000059604645]}},"vertex_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n// Instance inputs: \nin vec3 position;\nvec3 get_position(){return position;}\nin vec3 normals;\nvec3 get_normals(){return normals;}\n\n// Uniforms: \nuniform vec3 specular;\nvec3 get_specular(){return specular;}\nuniform bool colormap;\nbool get_colormap(){return colormap;}\nuniform vec3 ambient;\nvec3 get_ambient(){return ambient;}\nuniform mat3 normalmatrix;\nmat3 get_normalmatrix(){return normalmatrix;}\nuniform vec3 lightposition;\nvec3 get_lightposition(){return lightposition;}\nuniform float shininess;\nfloat get_shininess(){return shininess;}\nuniform vec2 uv;\nvec2 get_uv(){return uv;}\nuniform bool colorrange;\nbool get_colorrange(){return colorrange;}\nuniform mat4 model;\nmat4 get_model(){return model;}\nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform vec4 uniform_color;\nvec4 get_uniform_color(){return uniform_color;}\nuniform bool shading;\nbool get_shading(){return shading;}\nuniform vec3 diffuse;\nvec3 get_diffuse(){return diffuse;}\n\n\n\nout vec2 frag_uv;\nout vec3 o_normal;\nout vec3 o_camdir;\nout vec3 o_lightdir;\n\nout vec4 frag_color;\n\nuniform mat4 projection;\nuniform mat4 view;\n\nvec3 tovec3(vec2 v){return vec3(v, 0.0);}\nvec3 tovec3(vec3 v){return v;}\n\nvec4 tovec4(vec3 v){return vec4(v, 1.0);}\nvec4 tovec4(vec4 v){return v;}\n\n\n\nvoid main(){\n    // get_* gets the global inputs (uniform, sampler, position array)\n    // those functions will get inserted by the shader creation pipeline\n    vec3 vertex_position = tovec3(get_position());\n    vec4 position_world = model * vec4(vertex_position, 1);\n\n    // normal in world space\n    o_normal = get_normalmatrix() * get_normals();\n    // position in view space (as seen from camera)\n    vec4 view_pos = view * position_world;\n    // position in clip space (w/ depth)\n    gl_Position = projection * view_pos;\n    // direction to light\n    o_lightdir = normalize(view*vec4(get_lightposition(), 1.0) - view_pos).xyz;\n    // direction to camera\n    // This is equivalent to\n    // normalize(view*vec4(eyeposition, 1.0) - view_pos).xyz\n    // (by definition `view * eyeposition = 0`)\n    o_camdir = normalize(-view_pos).xyz;\n\n    frag_uv = get_uv();\n    frag_uv = vec2(1.0 - frag_uv.y, frag_uv.x);\n    frag_color = tovec4(get_color());\n}\n\n","attribute_updater":"2462282561326717060"}]]]}},"msg_type":"2"}
Stack trace:
    TypeError: Cannot read property 'wgl_camera' of undefined
        at add_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/jfc9q/src/wglmakie.js:64)
        at /mnt/storage/sebastian/.julia/packages/WGLMakie/jfc9q/src/wglmakie.js:49
        at Array.forEach (<anonymous>)
        at Object.insert_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/jfc9q/src/wglmakie.js:48)
        at eval (eval at deserialize_js (/mnt/storage/sebastian/.julia/packages/JSServe/bo3OG/js_dependencies/JSServe.js:162), <anonymous>:3:14)
        at /mnt/storage/sebastian/.julia/packages/JSServe/bo3OG/js_dependencies/JSServe.js:168
        at process_message (/mnt/storage/sebastian/.julia/packages/JSServe/bo3OG/js_dependencies/JSServe.js:365)
        at WebSocket.websocket.onmessage (/mnt/storage/sebastian/.julia/packages/JSServe/bo3OG/js_dependencies/JSServe.js:451)
@SimonDanisch
Copy link
Member

I think this is fixed in the newest version:
image

@SebastianM-C
Copy link
Member Author

I'm still having this issue with WGLMakie v0.3.9, Makie v0.13.9 and JSServe v1.2.2.

image

An exception was thrown in JS: TypeError: Cannot read property 'wgl_camera' of undefined
Additional message: Error while processing message {"payload":{"__javascript_type__":"JSCode","payload":{"source":"    WGLMakie.insert_plot(...__eval_context__[0])\n","context":[["14856892172797685956",[{"fragment_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n\nout vec4 fragment_color;\n\n// Uniforms: \nuniform vec3 specular;\nvec3 get_specular(){return specular;}\nuniform bool colormap;\nbool get_colormap(){return colormap;}\nuniform vec4 highclip;\nvec4 get_highclip(){return highclip;}\nuniform vec3 ambient;\nvec3 get_ambient(){return ambient;}\nuniform mat3 normalmatrix;\nmat3 get_normalmatrix(){return normalmatrix;}\nuniform vec3 lightposition;\nvec3 get_lightposition(){return lightposition;}\nuniform vec4 nan_color;\nvec4 get_nan_color(){return nan_color;}\nuniform float shininess;\nfloat get_shininess(){return shininess;}\nuniform vec2 uv;\nvec2 get_uv(){return uv;}\nuniform bool colorrange;\nbool get_colorrange(){return colorrange;}\nuniform mat4 model;\nmat4 get_model(){return model;}\nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform vec4 uniform_color;\nvec4 get_uniform_color(){return uniform_color;}\nuniform bool shading;\nbool get_shading(){return shading;}\nuniform vec3 diffuse;\nvec3 get_diffuse(){return diffuse;}\nuniform vec4 lowclip;\nvec4 get_lowclip(){return lowclip;}\n\nin vec2 frag_uv;\nin vec4 frag_color;\n\nin vec3 o_normal;\nin vec3 o_camdir;\nin vec3 o_lightdir;\n\nvec3 blinnphong(vec3 N, vec3 V, vec3 L, vec3 color){\n    float diff_coeff = max(dot(L, N), 0.0);\n\n    // specular coefficient\n    vec3 H = normalize(L + V);\n\n    float spec_coeff = pow(max(dot(H, N), 0.0), get_shininess());\n\n    // final lighting model\n    return vec3(\n        get_ambient() * color +\n        get_diffuse() * diff_coeff * color +\n        get_specular() * spec_coeff\n    );\n}\n\nvec4 get_color(vec3 color, vec2 uv, bool colorrange, bool colormap){\n    return vec4(color, 1.0); // we must prohibit uv from getting into dead variable removal\n}\n\nvec4 get_color(vec4 color, vec2 uv, bool colorrange, bool colormap){\n    return color; // we must prohibit uv from getting into dead variable removal\n}\n\nvec4 get_color(bool color, vec2 uv, bool colorrange, bool colormap){\n    return frag_color;  // color not in uniform\n}\n\nvec4 get_color(sampler2D color, vec2 uv, bool colorrange, bool colormap){\n    return texture(color, uv);\n}\n\nfloat _normalize(float val, float from, float to){return (val-from) / (to - from);}\n\nvec4 get_color(sampler2D color, vec2 uv, vec2 colorrange, sampler2D colormap){\n    float value = texture(color, uv).x;\n    float normed = _normalize(value, colorrange.x, colorrange.y);\n    vec4 c = texture(colormap, vec2(normed, 0.0));\n\n    if (isnan(value)) {\n        c = get_nan_color();\n    } else if (value < colorrange.x) {\n        c = get_lowclip();\n    } else if (value > colorrange.y) {\n        c = get_highclip();\n    }\n    return c;\n}\n\nvec4 get_color(sampler2D color, vec2 uv, bool colorrange, sampler2D colormap){\n    return texture(color, uv);\n}\n\nvoid main() {\n    vec4 real_color = get_color(uniform_color, frag_uv, get_colorrange(), colormap);\n    vec3 shaded_color = real_color.xyz;\n\n    if(get_shading()){\n        vec3 L = normalize(o_lightdir);\n        vec3 N = normalize(o_normal);\n        shaded_color = blinnphong(N, o_camdir, L, real_color.rgb);\n    }\n    fragment_color = vec4(shaded_color, real_color.a);\n}\n","faces":[0,1,2,2,3,0],"uniform_updater":"2741849011602753516","vertexarrays":{"position":{"flat":{"type":"Float32Array","data":[0,0,-0.5,0,100,-0.5,100,100,-0.5,100,0,-0.5]},"type_length":3},"normals":{"flat":{"type":"Float32Array","data":[0,0,-1,0,0,-1,0,0,-1,0,0,-1]},"type_length":3}},"name":"mesh-16020861128831355060","visible":"3627271117804668370","uuid":"16020861128831355060","uniforms":{"specular":{"__javascript_type__":"TypedVector","payload":[0.20000000298023224,0.20000000298023224,0.20000000298023224]},"colormap":true,"highclip":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]},"ambient":{"__javascript_type__":"TypedVector","payload":[0.550000011920929,0.550000011920929,0.550000011920929]},"normalmatrix":[1,0,0,0,1,0,0,0,1],"lightposition":{"__javascript_type__":"TypedVector","payload":[1,1,1]},"nan_color":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]},"shininess":32,"uv":{"__javascript_type__":"TypedVector","payload":[0,0]},"model":[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1],"colorrange":true,"color":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]},"uniform_color":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]},"shading":false,"diffuse":{"__javascript_type__":"TypedVector","payload":[0.4000000059604645,0.4000000059604645,0.4000000059604645]},"lowclip":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]}},"vertex_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n// Instance inputs: \nin vec3 position;\nvec3 get_position(){return position;}\nin vec3 normals;\nvec3 get_normals(){return normals;}\n\n// Uniforms: \nuniform vec3 specular;\nvec3 get_specular(){return specular;}\nuniform bool colormap;\nbool get_colormap(){return colormap;}\nuniform vec4 highclip;\nvec4 get_highclip(){return highclip;}\nuniform vec3 ambient;\nvec3 get_ambient(){return ambient;}\nuniform mat3 normalmatrix;\nmat3 get_normalmatrix(){return normalmatrix;}\nuniform vec3 lightposition;\nvec3 get_lightposition(){return lightposition;}\nuniform vec4 nan_color;\nvec4 get_nan_color(){return nan_color;}\nuniform float shininess;\nfloat get_shininess(){return shininess;}\nuniform vec2 uv;\nvec2 get_uv(){return uv;}\nuniform bool colorrange;\nbool get_colorrange(){return colorrange;}\nuniform mat4 model;\nmat4 get_model(){return model;}\nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform vec4 uniform_color;\nvec4 get_uniform_color(){return uniform_color;}\nuniform bool shading;\nbool get_shading(){return shading;}\nuniform vec3 diffuse;\nvec3 get_diffuse(){return diffuse;}\nuniform vec4 lowclip;\nvec4 get_lowclip(){return lowclip;}\n\n\n\nout vec2 frag_uv;\nout vec3 o_normal;\nout vec3 o_camdir;\nout vec3 o_lightdir;\n\nout vec4 frag_color;\n\nuniform mat4 projection;\nuniform mat4 view;\n\nvec3 tovec3(vec2 v){return vec3(v, 0.0);}\nvec3 tovec3(vec3 v){return v;}\n\nvec4 tovec4(vec3 v){return vec4(v, 1.0);}\nvec4 tovec4(vec4 v){return v;}\n\n\n\nvoid main(){\n    // get_* gets the global inputs (uniform, sampler, position array)\n    // those functions will get inserted by the shader creation pipeline\n    vec3 vertex_position = tovec3(get_position());\n    if (isnan(vertex_position.z)) {\n        vertex_position.z = 0.0;\n    }\n    vec4 position_world = model * vec4(vertex_position, 1);\n\n    // normal in world space\n    o_normal = get_normalmatrix() * get_normals();\n    // position in view space (as seen from camera)\n    vec4 view_pos = view * position_world;\n    // position in clip space (w/ depth)\n    gl_Position = projection * view_pos;\n    // direction to light\n    o_lightdir = normalize(view*vec4(get_lightposition(), 1.0) - view_pos).xyz;\n    // direction to camera\n    // This is equivalent to\n    // normalize(view*vec4(eyeposition, 1.0) - view_pos).xyz\n    // (by definition `view * eyeposition = 0`)\n    o_camdir = normalize(-view_pos).xyz;\n\n    frag_uv = get_uv();\n    frag_uv = vec2(1.0 - frag_uv.y, frag_uv.x);\n    frag_color = tovec4(get_color());\n}\n\n","attribute_updater":"14760972518310517822"}]]]}},"msg_type":"2"}
Stack trace:
    TypeError: Cannot read property 'wgl_camera' of undefined
        at add_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:65)
        at /mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:50
        at Array.forEach (<anonymous>)
        at Object.insert_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:49)
        at eval (eval at deserialize_js (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:162), <anonymous>:3:14)
        at /mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:168
        at process_message (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:365)
        at WebSocket.websocket.onmessage (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:451)
An exception was thrown in JS: TypeError: Cannot read property 'wgl_camera' of undefined
Additional message: Error while processing message {"payload":{"__javascript_type__":"JSCode","payload":{"source":"    WGLMakie.insert_plot(...__eval_context__[0])\n","context":[["14856892172797685956",[{"fragment_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n\nout vec4 fragment_color;\n\n// Uniforms: \nuniform vec3 specular;\nvec3 get_specular(){return specular;}\nuniform bool colormap;\nbool get_colormap(){return colormap;}\nuniform vec4 highclip;\nvec4 get_highclip(){return highclip;}\nuniform vec3 ambient;\nvec3 get_ambient(){return ambient;}\nuniform mat3 normalmatrix;\nmat3 get_normalmatrix(){return normalmatrix;}\nuniform vec3 lightposition;\nvec3 get_lightposition(){return lightposition;}\nuniform vec4 nan_color;\nvec4 get_nan_color(){return nan_color;}\nuniform float shininess;\nfloat get_shininess(){return shininess;}\nuniform vec2 uv;\nvec2 get_uv(){return uv;}\nuniform bool colorrange;\nbool get_colorrange(){return colorrange;}\nuniform mat4 model;\nmat4 get_model(){return model;}\nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform vec4 uniform_color;\nvec4 get_uniform_color(){return uniform_color;}\nuniform bool shading;\nbool get_shading(){return shading;}\nuniform vec3 diffuse;\nvec3 get_diffuse(){return diffuse;}\nuniform vec4 lowclip;\nvec4 get_lowclip(){return lowclip;}\n\nin vec2 frag_uv;\nin vec4 frag_color;\n\nin vec3 o_normal;\nin vec3 o_camdir;\nin vec3 o_lightdir;\n\nvec3 blinnphong(vec3 N, vec3 V, vec3 L, vec3 color){\n    float diff_coeff = max(dot(L, N), 0.0);\n\n    // specular coefficient\n    vec3 H = normalize(L + V);\n\n    float spec_coeff = pow(max(dot(H, N), 0.0), get_shininess());\n\n    // final lighting model\n    return vec3(\n        get_ambient() * color +\n        get_diffuse() * diff_coeff * color +\n        get_specular() * spec_coeff\n    );\n}\n\nvec4 get_color(vec3 color, vec2 uv, bool colorrange, bool colormap){\n    return vec4(color, 1.0); // we must prohibit uv from getting into dead variable removal\n}\n\nvec4 get_color(vec4 color, vec2 uv, bool colorrange, bool colormap){\n    return color; // we must prohibit uv from getting into dead variable removal\n}\n\nvec4 get_color(bool color, vec2 uv, bool colorrange, bool colormap){\n    return frag_color;  // color not in uniform\n}\n\nvec4 get_color(sampler2D color, vec2 uv, bool colorrange, bool colormap){\n    return texture(color, uv);\n}\n\nfloat _normalize(float val, float from, float to){return (val-from) / (to - from);}\n\nvec4 get_color(sampler2D color, vec2 uv, vec2 colorrange, sampler2D colormap){\n    float value = texture(color, uv).x;\n    float normed = _normalize(value, colorrange.x, colorrange.y);\n    vec4 c = texture(colormap, vec2(normed, 0.0));\n\n    if (isnan(value)) {\n        c = get_nan_color();\n    } else if (value < colorrange.x) {\n        c = get_lowclip();\n    } else if (value > colorrange.y) {\n        c = get_highclip();\n    }\n    return c;\n}\n\nvec4 get_color(sampler2D color, vec2 uv, bool colorrange, sampler2D colormap){\n    return texture(color, uv);\n}\n\nvoid main() {\n    vec4 real_color = get_color(uniform_color, frag_uv, get_colorrange(), colormap);\n    vec3 shaded_color = real_color.xyz;\n\n    if(get_shading()){\n        vec3 L = normalize(o_lightdir);\n        vec3 N = normalize(o_normal);\n        shaded_color = blinnphong(N, o_camdir, L, real_color.rgb);\n    }\n    fragment_color = vec4(shaded_color, real_color.a);\n}\n","faces":[0,1,2,2,3,0],"uniform_updater":"4739581195115134776","vertexarrays":{"position":{"flat":{"type":"Float32Array","data":[100.5,0,0,100.5,0,100,100.5,100,100,100.5,100,0]},"type_length":3},"normals":{"flat":{"type":"Float32Array","data":[-1,0,0,-1,0,0,-1,0,0,-1,0,0]},"type_length":3}},"name":"mesh-2529233409374058725","visible":"1966897753754113256","uuid":"2529233409374058725","uniforms":{"specular":{"__javascript_type__":"TypedVector","payload":[0.20000000298023224,0.20000000298023224,0.20000000298023224]},"colormap":true,"highclip":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]},"ambient":{"__javascript_type__":"TypedVector","payload":[0.550000011920929,0.550000011920929,0.550000011920929]},"normalmatrix":[1,0,0,0,1,0,0,0,1],"lightposition":{"__javascript_type__":"TypedVector","payload":[1,1,1]},"nan_color":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]},"shininess":32,"uv":{"__javascript_type__":"TypedVector","payload":[0,0]},"model":[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1],"colorrange":true,"color":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]},"uniform_color":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]},"shading":false,"diffuse":{"__javascript_type__":"TypedVector","payload":[0.4000000059604645,0.4000000059604645,0.4000000059604645]},"lowclip":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]}},"vertex_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n// Instance inputs: \nin vec3 position;\nvec3 get_position(){return position;}\nin vec3 normals;\nvec3 get_normals(){return normals;}\n\n// Uniforms: \nuniform vec3 specular;\nvec3 get_specular(){return specular;}\nuniform bool colormap;\nbool get_colormap(){return colormap;}\nuniform vec4 highclip;\nvec4 get_highclip(){return highclip;}\nuniform vec3 ambient;\nvec3 get_ambient(){return ambient;}\nuniform mat3 normalmatrix;\nmat3 get_normalmatrix(){return normalmatrix;}\nuniform vec3 lightposition;\nvec3 get_lightposition(){return lightposition;}\nuniform vec4 nan_color;\nvec4 get_nan_color(){return nan_color;}\nuniform float shininess;\nfloat get_shininess(){return shininess;}\nuniform vec2 uv;\nvec2 get_uv(){return uv;}\nuniform bool colorrange;\nbool get_colorrange(){return colorrange;}\nuniform mat4 model;\nmat4 get_model(){return model;}\nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform vec4 uniform_color;\nvec4 get_uniform_color(){return uniform_color;}\nuniform bool shading;\nbool get_shading(){return shading;}\nuniform vec3 diffuse;\nvec3 get_diffuse(){return diffuse;}\nuniform vec4 lowclip;\nvec4 get_lowclip(){return lowclip;}\n\n\n\nout vec2 frag_uv;\nout vec3 o_normal;\nout vec3 o_camdir;\nout vec3 o_lightdir;\n\nout vec4 frag_color;\n\nuniform mat4 projection;\nuniform mat4 view;\n\nvec3 tovec3(vec2 v){return vec3(v, 0.0);}\nvec3 tovec3(vec3 v){return v;}\n\nvec4 tovec4(vec3 v){return vec4(v, 1.0);}\nvec4 tovec4(vec4 v){return v;}\n\n\n\nvoid main(){\n    // get_* gets the global inputs (uniform, sampler, position array)\n    // those functions will get inserted by the shader creation pipeline\n    vec3 vertex_position = tovec3(get_position());\n    if (isnan(vertex_position.z)) {\n        vertex_position.z = 0.0;\n    }\n    vec4 position_world = model * vec4(vertex_position, 1);\n\n    // normal in world space\n    o_normal = get_normalmatrix() * get_normals();\n    // position in view space (as seen from camera)\n    vec4 view_pos = view * position_world;\n    // position in clip space (w/ depth)\n    gl_Position = projection * view_pos;\n    // direction to light\n    o_lightdir = normalize(view*vec4(get_lightposition(), 1.0) - view_pos).xyz;\n    // direction to camera\n    // This is equivalent to\n    // normalize(view*vec4(eyeposition, 1.0) - view_pos).xyz\n    // (by definition `view * eyeposition = 0`)\n    o_camdir = normalize(-view_pos).xyz;\n\n    frag_uv = get_uv();\n    frag_uv = vec2(1.0 - frag_uv.y, frag_uv.x);\n    frag_color = tovec4(get_color());\n}\n\n","attribute_updater":"11814718665919899601"}]]]}},"msg_type":"2"}
Stack trace:
    TypeError: Cannot read property 'wgl_camera' of undefined
        at add_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:65)
        at /mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:50
        at Array.forEach (<anonymous>)
        at Object.insert_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:49)
        at eval (eval at deserialize_js (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:162), <anonymous>:3:14)
        at /mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:168
        at process_message (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:365)
        at WebSocket.websocket.onmessage (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:451)
An exception was thrown in JS: TypeError: Cannot read property 'wgl_camera' of undefined
Additional message: Error while processing message {"payload":{"__javascript_type__":"JSCode","payload":{"source":"    WGLMakie.insert_plot(...__eval_context__[0])\n","context":[["14856892172797685956",[{"fragment_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n\nout vec4 fragment_color;\n\n// Uniforms: \nuniform vec3 specular;\nvec3 get_specular(){return specular;}\nuniform bool colormap;\nbool get_colormap(){return colormap;}\nuniform vec4 highclip;\nvec4 get_highclip(){return highclip;}\nuniform vec3 ambient;\nvec3 get_ambient(){return ambient;}\nuniform mat3 normalmatrix;\nmat3 get_normalmatrix(){return normalmatrix;}\nuniform vec3 lightposition;\nvec3 get_lightposition(){return lightposition;}\nuniform vec4 nan_color;\nvec4 get_nan_color(){return nan_color;}\nuniform float shininess;\nfloat get_shininess(){return shininess;}\nuniform vec2 uv;\nvec2 get_uv(){return uv;}\nuniform bool colorrange;\nbool get_colorrange(){return colorrange;}\nuniform mat4 model;\nmat4 get_model(){return model;}\nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform vec4 uniform_color;\nvec4 get_uniform_color(){return uniform_color;}\nuniform bool shading;\nbool get_shading(){return shading;}\nuniform vec3 diffuse;\nvec3 get_diffuse(){return diffuse;}\nuniform vec4 lowclip;\nvec4 get_lowclip(){return lowclip;}\n\nin vec2 frag_uv;\nin vec4 frag_color;\n\nin vec3 o_normal;\nin vec3 o_camdir;\nin vec3 o_lightdir;\n\nvec3 blinnphong(vec3 N, vec3 V, vec3 L, vec3 color){\n    float diff_coeff = max(dot(L, N), 0.0);\n\n    // specular coefficient\n    vec3 H = normalize(L + V);\n\n    float spec_coeff = pow(max(dot(H, N), 0.0), get_shininess());\n\n    // final lighting model\n    return vec3(\n        get_ambient() * color +\n        get_diffuse() * diff_coeff * color +\n        get_specular() * spec_coeff\n    );\n}\n\nvec4 get_color(vec3 color, vec2 uv, bool colorrange, bool colormap){\n    return vec4(color, 1.0); // we must prohibit uv from getting into dead variable removal\n}\n\nvec4 get_color(vec4 color, vec2 uv, bool colorrange, bool colormap){\n    return color; // we must prohibit uv from getting into dead variable removal\n}\n\nvec4 get_color(bool color, vec2 uv, bool colorrange, bool colormap){\n    return frag_color;  // color not in uniform\n}\n\nvec4 get_color(sampler2D color, vec2 uv, bool colorrange, bool colormap){\n    return texture(color, uv);\n}\n\nfloat _normalize(float val, float from, float to){return (val-from) / (to - from);}\n\nvec4 get_color(sampler2D color, vec2 uv, vec2 colorrange, sampler2D colormap){\n    float value = texture(color, uv).x;\n    float normed = _normalize(value, colorrange.x, colorrange.y);\n    vec4 c = texture(colormap, vec2(normed, 0.0));\n\n    if (isnan(value)) {\n        c = get_nan_color();\n    } else if (value < colorrange.x) {\n        c = get_lowclip();\n    } else if (value > colorrange.y) {\n        c = get_highclip();\n    }\n    return c;\n}\n\nvec4 get_color(sampler2D color, vec2 uv, bool colorrange, sampler2D colormap){\n    return texture(color, uv);\n}\n\nvoid main() {\n    vec4 real_color = get_color(uniform_color, frag_uv, get_colorrange(), colormap);\n    vec3 shaded_color = real_color.xyz;\n\n    if(get_shading()){\n        vec3 L = normalize(o_lightdir);\n        vec3 N = normalize(o_normal);\n        shaded_color = blinnphong(N, o_camdir, L, real_color.rgb);\n    }\n    fragment_color = vec4(shaded_color, real_color.a);\n}\n","faces":[0,1,2,2,3,0],"uniform_updater":"14830999005178962582","vertexarrays":{"position":{"flat":{"type":"Float32Array","data":[0,100.5,0,0,100.5,100,100,100.5,100,100,100.5,0]},"type_length":3},"normals":{"flat":{"type":"Float32Array","data":[0,1,0,0,1,0,0,1,0,0,1,0]},"type_length":3}},"name":"mesh-11712152090100918226","visible":"1722422644578893962","uuid":"11712152090100918226","uniforms":{"specular":{"__javascript_type__":"TypedVector","payload":[0.20000000298023224,0.20000000298023224,0.20000000298023224]},"colormap":true,"highclip":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]},"ambient":{"__javascript_type__":"TypedVector","payload":[0.550000011920929,0.550000011920929,0.550000011920929]},"normalmatrix":[1,0,0,0,1,0,0,0,1],"lightposition":{"__javascript_type__":"TypedVector","payload":[1,1,1]},"nan_color":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]},"shininess":32,"uv":{"__javascript_type__":"TypedVector","payload":[0,0]},"model":[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1],"colorrange":true,"color":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]},"uniform_color":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]},"shading":false,"diffuse":{"__javascript_type__":"TypedVector","payload":[0.4000000059604645,0.4000000059604645,0.4000000059604645]},"lowclip":{"__javascript_type__":"TypedVector","payload":[0,0,0,0]}},"vertex_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n// Instance inputs: \nin vec3 position;\nvec3 get_position(){return position;}\nin vec3 normals;\nvec3 get_normals(){return normals;}\n\n// Uniforms: \nuniform vec3 specular;\nvec3 get_specular(){return specular;}\nuniform bool colormap;\nbool get_colormap(){return colormap;}\nuniform vec4 highclip;\nvec4 get_highclip(){return highclip;}\nuniform vec3 ambient;\nvec3 get_ambient(){return ambient;}\nuniform mat3 normalmatrix;\nmat3 get_normalmatrix(){return normalmatrix;}\nuniform vec3 lightposition;\nvec3 get_lightposition(){return lightposition;}\nuniform vec4 nan_color;\nvec4 get_nan_color(){return nan_color;}\nuniform float shininess;\nfloat get_shininess(){return shininess;}\nuniform vec2 uv;\nvec2 get_uv(){return uv;}\nuniform bool colorrange;\nbool get_colorrange(){return colorrange;}\nuniform mat4 model;\nmat4 get_model(){return model;}\nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform vec4 uniform_color;\nvec4 get_uniform_color(){return uniform_color;}\nuniform bool shading;\nbool get_shading(){return shading;}\nuniform vec3 diffuse;\nvec3 get_diffuse(){return diffuse;}\nuniform vec4 lowclip;\nvec4 get_lowclip(){return lowclip;}\n\n\n\nout vec2 frag_uv;\nout vec3 o_normal;\nout vec3 o_camdir;\nout vec3 o_lightdir;\n\nout vec4 frag_color;\n\nuniform mat4 projection;\nuniform mat4 view;\n\nvec3 tovec3(vec2 v){return vec3(v, 0.0);}\nvec3 tovec3(vec3 v){return v;}\n\nvec4 tovec4(vec3 v){return vec4(v, 1.0);}\nvec4 tovec4(vec4 v){return v;}\n\n\n\nvoid main(){\n    // get_* gets the global inputs (uniform, sampler, position array)\n    // those functions will get inserted by the shader creation pipeline\n    vec3 vertex_position = tovec3(get_position());\n    if (isnan(vertex_position.z)) {\n        vertex_position.z = 0.0;\n    }\n    vec4 position_world = model * vec4(vertex_position, 1);\n\n    // normal in world space\n    o_normal = get_normalmatrix() * get_normals();\n    // position in view space (as seen from camera)\n    vec4 view_pos = view * position_world;\n    // position in clip space (w/ depth)\n    gl_Position = projection * view_pos;\n    // direction to light\n    o_lightdir = normalize(view*vec4(get_lightposition(), 1.0) - view_pos).xyz;\n    // direction to camera\n    // This is equivalent to\n    // normalize(view*vec4(eyeposition, 1.0) - view_pos).xyz\n    // (by definition `view * eyeposition = 0`)\n    o_camdir = normalize(-view_pos).xyz;\n\n    frag_uv = get_uv();\n    frag_uv = vec2(1.0 - frag_uv.y, frag_uv.x);\n    frag_color = tovec4(get_color());\n}\n\n","attribute_updater":"15138701543423319330"}]]]}},"msg_type":"2"}
Stack trace:
    TypeError: Cannot read property 'wgl_camera' of undefined
        at add_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:65)
        at /mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:50
        at Array.forEach (<anonymous>)
        at Object.insert_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:49)
        at eval (eval at deserialize_js (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:162), <anonymous>:3:14)
        at /mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:168
        at process_message (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:365)
        at WebSocket.websocket.onmessage (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:451)
An exception was thrown in JS: TypeError: Cannot read property 'wgl_camera' of undefined
Additional message: Error while processing message {"payload":{"__javascript_type__":"JSCode","payload":{"source":"    WGLMakie.insert_plot(...__eval_context__[0])\n","context":[["14856892172797685956",[{"vertexarrays":{"position":{"flat":{"type":"Float32Array","data":[0,-1,0,1,1,-1,1,1]},"type_length":2},"uv":{"flat":{"type":"Float32Array","data":[0,0,0,0,0,0,0,0]},"type_length":2}},"visible":"755152619170014049","uniform_updater":"7093234165909756344","attribute_updater":"16703940574874477647","faces":[0,1,2,1,3,2],"name":"linesegments-6704490737697746484","uuid":"6704490737697746484","uniforms":{"color":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"linewidth_start":1.5,"color_end":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"linewidth_end":1.5,"color_start":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"resolution":{"__javascript_type__":"TypedVector","payload":[100,100]},"linewidth":1.5,"lightposition":{"__javascript_type__":"TypedVector","payload":[1,1,1]},"model":[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1]},"fragment_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n\nout vec4 fragment_color;\n\n// Uniforms: \nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform float linewidth_start;\nfloat get_linewidth_start(){return linewidth_start;}\nuniform vec4 color_end;\nvec4 get_color_end(){return color_end;}\nuniform float linewidth_end;\nfloat get_linewidth_end(){return linewidth_end;}\nuniform vec4 color_start;\nvec4 get_color_start(){return color_start;}\nuniform vec2 resolution;\nvec2 get_resolution(){return resolution;}\nuniform float linewidth;\nfloat get_linewidth(){return linewidth;}\nuniform mat4 model;\nmat4 get_model(){return model;}\n\n\nin vec4 frag_color;\n\nvoid main() {\n    fragment_color = frag_color;\n}\n","vertex_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n// Instance inputs: \nin vec2 position;\nvec2 get_position(){return position;}\nin vec2 uv;\nvec2 get_uv(){return uv;}\n\n// Uniforms: \nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform float linewidth_start;\nfloat get_linewidth_start(){return linewidth_start;}\nuniform vec4 color_end;\nvec4 get_color_end(){return color_end;}\nuniform float linewidth_end;\nfloat get_linewidth_end(){return linewidth_end;}\nuniform vec4 color_start;\nvec4 get_color_start(){return color_start;}\nuniform vec2 resolution;\nvec2 get_resolution(){return resolution;}\nuniform float linewidth;\nfloat get_linewidth(){return linewidth;}\nuniform mat4 model;\nmat4 get_model(){return model;}\n\n\n\n\n// Per instance attributes: \nin vec3 segment_start;\nvec3 get_segment_start(){return segment_start;}\nin vec3 segment_end;\nvec3 get_segment_end(){return segment_end;}\n\nuniform mat4 projection;\nuniform mat4 view;\n\nvec2 screen_space(vec4 position)\n{\n    return vec2(position.xy / position.w) * get_resolution();\n}\nvec3 tovec3(vec2 v){return vec3(v, 0.0);}\nvec3 tovec3(vec3 v){return v;}\n\nvec4 tovec4(vec3 v){return vec4(v, 1.0);}\nvec4 tovec4(vec4 v){return v;}\n\nout vec4 frag_color;\n\nvoid main()\n{\n    mat4 pvm = projection * view * get_model();\n    vec4 point1_clip = pvm * vec4(tovec3(get_segment_start()), 1);\n    vec4 point2_clip = pvm * vec4(tovec3(get_segment_end()), 1);\n    vec2 point1_screen = screen_space(point1_clip);\n    vec2 point2_screen = screen_space(point2_clip);\n    vec2 dir = normalize(point2_screen - point1_screen);\n    vec2 normal = vec2(-dir.y, dir.x);\n    vec4 anchor;\n    float thickness;\n\n    if(position.x == 0.0){\n        anchor = point1_clip;\n        frag_color = tovec4(get_color_start());\n        thickness = get_linewidth_start();\n    }else{\n        anchor = point2_clip;\n        frag_color = tovec4(get_color_end());\n        thickness = get_linewidth_end();\n    }\n    frag_color.a = frag_color.a * min(1.0, thickness * 2.0);\n    // I think GLMakie is drawing the lines too thick...\n    // untill we figure out who is right, we need to add 1.0 to linewidth\n    thickness = thickness > 0.0 ? thickness + 1.0 : 0.0;\n    normal *= (((thickness) / 2.0) / get_resolution()) * anchor.w;\n    // quadpos y (position.y) gives us the direction to expand the line\n    vec4 offset = vec4(normal * position.y, 0.0, 0.0);\n    // start, or end of quad, need to use current or next point as anchor\n    gl_Position = anchor + offset;\n\n}\n\n","instance_attributes":{"segment_start":{"flat":{"type":"Float32Array","data":[50,100,0]},"type_length":3},"segment_end":{"flat":{"type":"Float32Array","data":[50,100,100]},"type_length":3}}}]]]}},"msg_type":"2"}
Stack trace:
    TypeError: Cannot read property 'wgl_camera' of undefined
        at add_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:65)
        at /mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:50
        at Array.forEach (<anonymous>)
        at Object.insert_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:49)
        at eval (eval at deserialize_js (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:162), <anonymous>:3:14)
        at /mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:168
        at process_message (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:365)
        at WebSocket.websocket.onmessage (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:451)
An exception was thrown in JS: TypeError: Cannot read property 'wgl_camera' of undefined
Additional message: Error while processing message {"payload":{"__javascript_type__":"JSCode","payload":{"source":"    WGLMakie.insert_plot(...__eval_context__[0])\n","context":[["14856892172797685956",[{"vertexarrays":{"position":{"flat":{"type":"Float32Array","data":[0,-1,0,1,1,-1,1,1]},"type_length":2},"uv":{"flat":{"type":"Float32Array","data":[0,0,0,0,0,0,0,0]},"type_length":2}},"visible":"12010878752011511519","uniform_updater":"10469563115738943380","attribute_updater":"16471438189915849652","faces":[0,1,2,1,3,2],"name":"linesegments-16977061433485667660","uuid":"16977061433485667660","uniforms":{"color":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"linewidth_start":1.5,"color_end":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"linewidth_end":1.5,"color_start":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"resolution":{"__javascript_type__":"TypedVector","payload":[100,100]},"linewidth":1.5,"lightposition":{"__javascript_type__":"TypedVector","payload":[1,1,1]},"model":[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1]},"fragment_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n\nout vec4 fragment_color;\n\n// Uniforms: \nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform float linewidth_start;\nfloat get_linewidth_start(){return linewidth_start;}\nuniform vec4 color_end;\nvec4 get_color_end(){return color_end;}\nuniform float linewidth_end;\nfloat get_linewidth_end(){return linewidth_end;}\nuniform vec4 color_start;\nvec4 get_color_start(){return color_start;}\nuniform vec2 resolution;\nvec2 get_resolution(){return resolution;}\nuniform float linewidth;\nfloat get_linewidth(){return linewidth;}\nuniform mat4 model;\nmat4 get_model(){return model;}\n\n\nin vec4 frag_color;\n\nvoid main() {\n    fragment_color = frag_color;\n}\n","vertex_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n// Instance inputs: \nin vec2 position;\nvec2 get_position(){return position;}\nin vec2 uv;\nvec2 get_uv(){return uv;}\n\n// Uniforms: \nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform float linewidth_start;\nfloat get_linewidth_start(){return linewidth_start;}\nuniform vec4 color_end;\nvec4 get_color_end(){return color_end;}\nuniform float linewidth_end;\nfloat get_linewidth_end(){return linewidth_end;}\nuniform vec4 color_start;\nvec4 get_color_start(){return color_start;}\nuniform vec2 resolution;\nvec2 get_resolution(){return resolution;}\nuniform float linewidth;\nfloat get_linewidth(){return linewidth;}\nuniform mat4 model;\nmat4 get_model(){return model;}\n\n\n\n\n// Per instance attributes: \nin vec3 segment_start;\nvec3 get_segment_start(){return segment_start;}\nin vec3 segment_end;\nvec3 get_segment_end(){return segment_end;}\n\nuniform mat4 projection;\nuniform mat4 view;\n\nvec2 screen_space(vec4 position)\n{\n    return vec2(position.xy / position.w) * get_resolution();\n}\nvec3 tovec3(vec2 v){return vec3(v, 0.0);}\nvec3 tovec3(vec3 v){return v;}\n\nvec4 tovec4(vec3 v){return vec4(v, 1.0);}\nvec4 tovec4(vec4 v){return v;}\n\nout vec4 frag_color;\n\nvoid main()\n{\n    mat4 pvm = projection * view * get_model();\n    vec4 point1_clip = pvm * vec4(tovec3(get_segment_start()), 1);\n    vec4 point2_clip = pvm * vec4(tovec3(get_segment_end()), 1);\n    vec2 point1_screen = screen_space(point1_clip);\n    vec2 point2_screen = screen_space(point2_clip);\n    vec2 dir = normalize(point2_screen - point1_screen);\n    vec2 normal = vec2(-dir.y, dir.x);\n    vec4 anchor;\n    float thickness;\n\n    if(position.x == 0.0){\n        anchor = point1_clip;\n        frag_color = tovec4(get_color_start());\n        thickness = get_linewidth_start();\n    }else{\n        anchor = point2_clip;\n        frag_color = tovec4(get_color_end());\n        thickness = get_linewidth_end();\n    }\n    frag_color.a = frag_color.a * min(1.0, thickness * 2.0);\n    // I think GLMakie is drawing the lines too thick...\n    // untill we figure out who is right, we need to add 1.0 to linewidth\n    thickness = thickness > 0.0 ? thickness + 1.0 : 0.0;\n    normal *= (((thickness) / 2.0) / get_resolution()) * anchor.w;\n    // quadpos y (position.y) gives us the direction to expand the line\n    vec4 offset = vec4(normal * position.y, 0.0, 0.0);\n    // start, or end of quad, need to use current or next point as anchor\n    gl_Position = anchor + offset;\n\n}\n\n","instance_attributes":{"segment_start":{"flat":{"type":"Float32Array","data":[50,0,0]},"type_length":3},"segment_end":{"flat":{"type":"Float32Array","data":[50,100,0]},"type_length":3}}}]]]}},"msg_type":"2"}
Stack trace:
    TypeError: Cannot read property 'wgl_camera' of undefined
        at add_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:65)
        at /mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:50
        at Array.forEach (<anonymous>)
        at Object.insert_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:49)
        at eval (eval at deserialize_js (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:162), <anonymous>:3:14)
        at /mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:168
        at process_message (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:365)
        at WebSocket.websocket.onmessage (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:451)
An exception was thrown in JS: TypeError: Cannot read property 'wgl_camera' of undefined
Additional message: Error while processing message {"payload":{"__javascript_type__":"JSCode","payload":{"source":"    WGLMakie.insert_plot(...__eval_context__[0])\n","context":[["14856892172797685956",[{"vertexarrays":{"position":{"flat":{"type":"Float32Array","data":[0,-1,0,1,1,-1,1,1]},"type_length":2},"uv":{"flat":{"type":"Float32Array","data":[0,0,0,0,0,0,0,0]},"type_length":2}},"visible":"16290910745821115606","uniform_updater":"8397999855680794238","attribute_updater":"4593160458210820783","faces":[0,1,2,1,3,2],"name":"linesegments-16430222308285393006","uuid":"16430222308285393006","uniforms":{"color":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"linewidth_start":1.5,"color_end":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"linewidth_end":1.5,"color_start":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"resolution":{"__javascript_type__":"TypedVector","payload":[100,100]},"linewidth":1.5,"lightposition":{"__javascript_type__":"TypedVector","payload":[1,1,1]},"model":[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1]},"fragment_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n\nout vec4 fragment_color;\n\n// Uniforms: \nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform float linewidth_start;\nfloat get_linewidth_start(){return linewidth_start;}\nuniform vec4 color_end;\nvec4 get_color_end(){return color_end;}\nuniform float linewidth_end;\nfloat get_linewidth_end(){return linewidth_end;}\nuniform vec4 color_start;\nvec4 get_color_start(){return color_start;}\nuniform vec2 resolution;\nvec2 get_resolution(){return resolution;}\nuniform float linewidth;\nfloat get_linewidth(){return linewidth;}\nuniform mat4 model;\nmat4 get_model(){return model;}\n\n\nin vec4 frag_color;\n\nvoid main() {\n    fragment_color = frag_color;\n}\n","vertex_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n// Instance inputs: \nin vec2 position;\nvec2 get_position(){return position;}\nin vec2 uv;\nvec2 get_uv(){return uv;}\n\n// Uniforms: \nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform float linewidth_start;\nfloat get_linewidth_start(){return linewidth_start;}\nuniform vec4 color_end;\nvec4 get_color_end(){return color_end;}\nuniform float linewidth_end;\nfloat get_linewidth_end(){return linewidth_end;}\nuniform vec4 color_start;\nvec4 get_color_start(){return color_start;}\nuniform vec2 resolution;\nvec2 get_resolution(){return resolution;}\nuniform float linewidth;\nfloat get_linewidth(){return linewidth;}\nuniform mat4 model;\nmat4 get_model(){return model;}\n\n\n\n\n// Per instance attributes: \nin vec3 segment_start;\nvec3 get_segment_start(){return segment_start;}\nin vec3 segment_end;\nvec3 get_segment_end(){return segment_end;}\n\nuniform mat4 projection;\nuniform mat4 view;\n\nvec2 screen_space(vec4 position)\n{\n    return vec2(position.xy / position.w) * get_resolution();\n}\nvec3 tovec3(vec2 v){return vec3(v, 0.0);}\nvec3 tovec3(vec3 v){return v;}\n\nvec4 tovec4(vec3 v){return vec4(v, 1.0);}\nvec4 tovec4(vec4 v){return v;}\n\nout vec4 frag_color;\n\nvoid main()\n{\n    mat4 pvm = projection * view * get_model();\n    vec4 point1_clip = pvm * vec4(tovec3(get_segment_start()), 1);\n    vec4 point2_clip = pvm * vec4(tovec3(get_segment_end()), 1);\n    vec2 point1_screen = screen_space(point1_clip);\n    vec2 point2_screen = screen_space(point2_clip);\n    vec2 dir = normalize(point2_screen - point1_screen);\n    vec2 normal = vec2(-dir.y, dir.x);\n    vec4 anchor;\n    float thickness;\n\n    if(position.x == 0.0){\n        anchor = point1_clip;\n        frag_color = tovec4(get_color_start());\n        thickness = get_linewidth_start();\n    }else{\n        anchor = point2_clip;\n        frag_color = tovec4(get_color_end());\n        thickness = get_linewidth_end();\n    }\n    frag_color.a = frag_color.a * min(1.0, thickness * 2.0);\n    // I think GLMakie is drawing the lines too thick...\n    // untill we figure out who is right, we need to add 1.0 to linewidth\n    thickness = thickness > 0.0 ? thickness + 1.0 : 0.0;\n    normal *= (((thickness) / 2.0) / get_resolution()) * anchor.w;\n    // quadpos y (position.y) gives us the direction to expand the line\n    vec4 offset = vec4(normal * position.y, 0.0, 0.0);\n    // start, or end of quad, need to use current or next point as anchor\n    gl_Position = anchor + offset;\n\n}\n\n","instance_attributes":{"segment_start":{"flat":{"type":"Float32Array","data":[100,50,0]},"type_length":3},"segment_end":{"flat":{"type":"Float32Array","data":[100,50,100]},"type_length":3}}}]]]}},"msg_type":"2"}
Stack trace:
    TypeError: Cannot read property 'wgl_camera' of undefined
        at add_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:65)
        at /mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:50
        at Array.forEach (<anonymous>)
        at Object.insert_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:49)
        at eval (eval at deserialize_js (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:162), <anonymous>:3:14)
        at /mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:168
        at process_message (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:365)
        at WebSocket.websocket.onmessage (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:451)
An exception was thrown in JS: TypeError: Cannot read property 'wgl_camera' of undefined
Additional message: Error while processing message {"payload":{"__javascript_type__":"JSCode","payload":{"source":"    WGLMakie.insert_plot(...__eval_context__[0])\n","context":[["14856892172797685956",[{"vertexarrays":{"position":{"flat":{"type":"Float32Array","data":[0,-1,0,1,1,-1,1,1]},"type_length":2},"uv":{"flat":{"type":"Float32Array","data":[0,0,0,0,0,0,0,0]},"type_length":2}},"visible":"17573378169158573917","uniform_updater":"15403762454889646731","attribute_updater":"10388967762402854712","faces":[0,1,2,1,3,2],"name":"linesegments-16174496116228334602","uuid":"16174496116228334602","uniforms":{"color":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"linewidth_start":1.5,"color_end":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"linewidth_end":1.5,"color_start":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"resolution":{"__javascript_type__":"TypedVector","payload":[100,100]},"linewidth":1.5,"lightposition":{"__javascript_type__":"TypedVector","payload":[1,1,1]},"model":[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1]},"fragment_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n\nout vec4 fragment_color;\n\n// Uniforms: \nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform float linewidth_start;\nfloat get_linewidth_start(){return linewidth_start;}\nuniform vec4 color_end;\nvec4 get_color_end(){return color_end;}\nuniform float linewidth_end;\nfloat get_linewidth_end(){return linewidth_end;}\nuniform vec4 color_start;\nvec4 get_color_start(){return color_start;}\nuniform vec2 resolution;\nvec2 get_resolution(){return resolution;}\nuniform float linewidth;\nfloat get_linewidth(){return linewidth;}\nuniform mat4 model;\nmat4 get_model(){return model;}\n\n\nin vec4 frag_color;\n\nvoid main() {\n    fragment_color = frag_color;\n}\n","vertex_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n// Instance inputs: \nin vec2 position;\nvec2 get_position(){return position;}\nin vec2 uv;\nvec2 get_uv(){return uv;}\n\n// Uniforms: \nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform float linewidth_start;\nfloat get_linewidth_start(){return linewidth_start;}\nuniform vec4 color_end;\nvec4 get_color_end(){return color_end;}\nuniform float linewidth_end;\nfloat get_linewidth_end(){return linewidth_end;}\nuniform vec4 color_start;\nvec4 get_color_start(){return color_start;}\nuniform vec2 resolution;\nvec2 get_resolution(){return resolution;}\nuniform float linewidth;\nfloat get_linewidth(){return linewidth;}\nuniform mat4 model;\nmat4 get_model(){return model;}\n\n\n\n\n// Per instance attributes: \nin vec3 segment_start;\nvec3 get_segment_start(){return segment_start;}\nin vec3 segment_end;\nvec3 get_segment_end(){return segment_end;}\n\nuniform mat4 projection;\nuniform mat4 view;\n\nvec2 screen_space(vec4 position)\n{\n    return vec2(position.xy / position.w) * get_resolution();\n}\nvec3 tovec3(vec2 v){return vec3(v, 0.0);}\nvec3 tovec3(vec3 v){return v;}\n\nvec4 tovec4(vec3 v){return vec4(v, 1.0);}\nvec4 tovec4(vec4 v){return v;}\n\nout vec4 frag_color;\n\nvoid main()\n{\n    mat4 pvm = projection * view * get_model();\n    vec4 point1_clip = pvm * vec4(tovec3(get_segment_start()), 1);\n    vec4 point2_clip = pvm * vec4(tovec3(get_segment_end()), 1);\n    vec2 point1_screen = screen_space(point1_clip);\n    vec2 point2_screen = screen_space(point2_clip);\n    vec2 dir = normalize(point2_screen - point1_screen);\n    vec2 normal = vec2(-dir.y, dir.x);\n    vec4 anchor;\n    float thickness;\n\n    if(position.x == 0.0){\n        anchor = point1_clip;\n        frag_color = tovec4(get_color_start());\n        thickness = get_linewidth_start();\n    }else{\n        anchor = point2_clip;\n        frag_color = tovec4(get_color_end());\n        thickness = get_linewidth_end();\n    }\n    frag_color.a = frag_color.a * min(1.0, thickness * 2.0);\n    // I think GLMakie is drawing the lines too thick...\n    // untill we figure out who is right, we need to add 1.0 to linewidth\n    thickness = thickness > 0.0 ? thickness + 1.0 : 0.0;\n    normal *= (((thickness) / 2.0) / get_resolution()) * anchor.w;\n    // quadpos y (position.y) gives us the direction to expand the line\n    vec4 offset = vec4(normal * position.y, 0.0, 0.0);\n    // start, or end of quad, need to use current or next point as anchor\n    gl_Position = anchor + offset;\n\n}\n\n","instance_attributes":{"segment_start":{"flat":{"type":"Float32Array","data":[0,50,0]},"type_length":3},"segment_end":{"flat":{"type":"Float32Array","data":[100,50,0]},"type_length":3}}}]]]}},"msg_type":"2"}
Stack trace:
    TypeError: Cannot read property 'wgl_camera' of undefined
        at add_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:65)
        at /mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:50
        at Array.forEach (<anonymous>)
        at Object.insert_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:49)
        at eval (eval at deserialize_js (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:162), <anonymous>:3:14)
        at /mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:168
        at process_message (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:365)
        at WebSocket.websocket.onmessage (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:451)
An exception was thrown in JS: TypeError: Cannot read property 'wgl_camera' of undefined
Additional message: Error while processing message {"payload":{"__javascript_type__":"JSCode","payload":{"source":"    WGLMakie.insert_plot(...__eval_context__[0])\n","context":[["14856892172797685956",[{"vertexarrays":{"position":{"flat":{"type":"Float32Array","data":[0,-1,0,1,1,-1,1,1]},"type_length":2},"uv":{"flat":{"type":"Float32Array","data":[0,0,0,0,0,0,0,0]},"type_length":2}},"visible":"16935688732157034601","uniform_updater":"45197744301434032","attribute_updater":"2507415237541678019","faces":[0,1,2,1,3,2],"name":"linesegments-2514884535658223582","uuid":"2514884535658223582","uniforms":{"color":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"linewidth_start":1.5,"color_end":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"linewidth_end":1.5,"color_start":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"resolution":{"__javascript_type__":"TypedVector","payload":[100,100]},"linewidth":1.5,"lightposition":{"__javascript_type__":"TypedVector","payload":[1,1,1]},"model":[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1]},"fragment_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n\nout vec4 fragment_color;\n\n// Uniforms: \nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform float linewidth_start;\nfloat get_linewidth_start(){return linewidth_start;}\nuniform vec4 color_end;\nvec4 get_color_end(){return color_end;}\nuniform float linewidth_end;\nfloat get_linewidth_end(){return linewidth_end;}\nuniform vec4 color_start;\nvec4 get_color_start(){return color_start;}\nuniform vec2 resolution;\nvec2 get_resolution(){return resolution;}\nuniform float linewidth;\nfloat get_linewidth(){return linewidth;}\nuniform mat4 model;\nmat4 get_model(){return model;}\n\n\nin vec4 frag_color;\n\nvoid main() {\n    fragment_color = frag_color;\n}\n","vertex_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n// Instance inputs: \nin vec2 position;\nvec2 get_position(){return position;}\nin vec2 uv;\nvec2 get_uv(){return uv;}\n\n// Uniforms: \nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform float linewidth_start;\nfloat get_linewidth_start(){return linewidth_start;}\nuniform vec4 color_end;\nvec4 get_color_end(){return color_end;}\nuniform float linewidth_end;\nfloat get_linewidth_end(){return linewidth_end;}\nuniform vec4 color_start;\nvec4 get_color_start(){return color_start;}\nuniform vec2 resolution;\nvec2 get_resolution(){return resolution;}\nuniform float linewidth;\nfloat get_linewidth(){return linewidth;}\nuniform mat4 model;\nmat4 get_model(){return model;}\n\n\n\n\n// Per instance attributes: \nin vec3 segment_start;\nvec3 get_segment_start(){return segment_start;}\nin vec3 segment_end;\nvec3 get_segment_end(){return segment_end;}\n\nuniform mat4 projection;\nuniform mat4 view;\n\nvec2 screen_space(vec4 position)\n{\n    return vec2(position.xy / position.w) * get_resolution();\n}\nvec3 tovec3(vec2 v){return vec3(v, 0.0);}\nvec3 tovec3(vec3 v){return v;}\n\nvec4 tovec4(vec3 v){return vec4(v, 1.0);}\nvec4 tovec4(vec4 v){return v;}\n\nout vec4 frag_color;\n\nvoid main()\n{\n    mat4 pvm = projection * view * get_model();\n    vec4 point1_clip = pvm * vec4(tovec3(get_segment_start()), 1);\n    vec4 point2_clip = pvm * vec4(tovec3(get_segment_end()), 1);\n    vec2 point1_screen = screen_space(point1_clip);\n    vec2 point2_screen = screen_space(point2_clip);\n    vec2 dir = normalize(point2_screen - point1_screen);\n    vec2 normal = vec2(-dir.y, dir.x);\n    vec4 anchor;\n    float thickness;\n\n    if(position.x == 0.0){\n        anchor = point1_clip;\n        frag_color = tovec4(get_color_start());\n        thickness = get_linewidth_start();\n    }else{\n        anchor = point2_clip;\n        frag_color = tovec4(get_color_end());\n        thickness = get_linewidth_end();\n    }\n    frag_color.a = frag_color.a * min(1.0, thickness * 2.0);\n    // I think GLMakie is drawing the lines too thick...\n    // untill we figure out who is right, we need to add 1.0 to linewidth\n    thickness = thickness > 0.0 ? thickness + 1.0 : 0.0;\n    normal *= (((thickness) / 2.0) / get_resolution()) * anchor.w;\n    // quadpos y (position.y) gives us the direction to expand the line\n    vec4 offset = vec4(normal * position.y, 0.0, 0.0);\n    // start, or end of quad, need to use current or next point as anchor\n    gl_Position = anchor + offset;\n\n}\n\n","instance_attributes":{"segment_start":{"flat":{"type":"Float32Array","data":[100,0,50]},"type_length":3},"segment_end":{"flat":{"type":"Float32Array","data":[100,100,50]},"type_length":3}}}]]]}},"msg_type":"2"}
Stack trace:
    TypeError: Cannot read property 'wgl_camera' of undefined
        at add_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:65)
        at /mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:50
        at Array.forEach (<anonymous>)
        at Object.insert_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:49)
        at eval (eval at deserialize_js (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:162), <anonymous>:3:14)
        at /mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:168
        at process_message (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:365)
        at WebSocket.websocket.onmessage (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:451)
An exception was thrown in JS: TypeError: Cannot read property 'wgl_camera' of undefined
Additional message: Error while processing message {"payload":{"__javascript_type__":"JSCode","payload":{"source":"    WGLMakie.insert_plot(...__eval_context__[0])\n","context":[["14856892172797685956",[{"vertexarrays":{"position":{"flat":{"type":"Float32Array","data":[0,-1,0,1,1,-1,1,1]},"type_length":2},"uv":{"flat":{"type":"Float32Array","data":[0,0,0,0,0,0,0,0]},"type_length":2}},"visible":"12082864394710572322","uniform_updater":"5277899941245884872","attribute_updater":"1970944605490436150","faces":[0,1,2,1,3,2],"name":"linesegments-4007616765876304453","uuid":"4007616765876304453","uniforms":{"color":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"linewidth_start":1.5,"color_end":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"linewidth_end":1.5,"color_start":{"__javascript_type__":"TypedVector","payload":[0.800000011920929,0.800000011920929,0.800000011920929,1]},"resolution":{"__javascript_type__":"TypedVector","payload":[100,100]},"linewidth":1.5,"lightposition":{"__javascript_type__":"TypedVector","payload":[1,1,1]},"model":[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1]},"fragment_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n\nout vec4 fragment_color;\n\n// Uniforms: \nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform float linewidth_start;\nfloat get_linewidth_start(){return linewidth_start;}\nuniform vec4 color_end;\nvec4 get_color_end(){return color_end;}\nuniform float linewidth_end;\nfloat get_linewidth_end(){return linewidth_end;}\nuniform vec4 color_start;\nvec4 get_color_start(){return color_start;}\nuniform vec2 resolution;\nvec2 get_resolution(){return resolution;}\nuniform float linewidth;\nfloat get_linewidth(){return linewidth;}\nuniform mat4 model;\nmat4 get_model(){return model;}\n\n\nin vec4 frag_color;\n\nvoid main() {\n    fragment_color = frag_color;\n}\n","vertex_source":"#version 300 es\nprecision mediump int;\nprecision mediump float;\nprecision mediump sampler2D;\nprecision mediump sampler3D;\n// Instance inputs: \nin vec2 position;\nvec2 get_position(){return position;}\nin vec2 uv;\nvec2 get_uv(){return uv;}\n\n// Uniforms: \nuniform vec4 color;\nvec4 get_color(){return color;}\nuniform float linewidth_start;\nfloat get_linewidth_start(){return linewidth_start;}\nuniform vec4 color_end;\nvec4 get_color_end(){return color_end;}\nuniform float linewidth_end;\nfloat get_linewidth_end(){return linewidth_end;}\nuniform vec4 color_start;\nvec4 get_color_start(){return color_start;}\nuniform vec2 resolution;\nvec2 get_resolution(){return resolution;}\nuniform float linewidth;\nfloat get_linewidth(){return linewidth;}\nuniform mat4 model;\nmat4 get_model(){return model;}\n\n\n\n\n// Per instance attributes: \nin vec3 segment_start;\nvec3 get_segment_start(){return segment_start;}\nin vec3 segment_end;\nvec3 get_segment_end(){return segment_end;}\n\nuniform mat4 projection;\nuniform mat4 view;\n\nvec2 screen_space(vec4 position)\n{\n    return vec2(position.xy / position.w) * get_resolution();\n}\nvec3 tovec3(vec2 v){return vec3(v, 0.0);}\nvec3 tovec3(vec3 v){return v;}\n\nvec4 tovec4(vec3 v){return vec4(v, 1.0);}\nvec4 tovec4(vec4 v){return v;}\n\nout vec4 frag_color;\n\nvoid main()\n{\n    mat4 pvm = projection * view * get_model();\n    vec4 point1_clip = pvm * vec4(tovec3(get_segment_start()), 1);\n    vec4 point2_clip = pvm * vec4(tovec3(get_segment_end()), 1);\n    vec2 point1_screen = screen_space(point1_clip);\n    vec2 point2_screen = screen_space(point2_clip);\n    vec2 dir = normalize(point2_screen - point1_screen);\n    vec2 normal = vec2(-dir.y, dir.x);\n    vec4 anchor;\n    float thickness;\n\n    if(position.x == 0.0){\n        anchor = point1_clip;\n        frag_color = tovec4(get_color_start());\n        thickness = get_linewidth_start();\n    }else{\n        anchor = point2_clip;\n        frag_color = tovec4(get_color_end());\n        thickness = get_linewidth_end();\n    }\n    frag_color.a = frag_color.a * min(1.0, thickness * 2.0);\n    // I think GLMakie is drawing the lines too thick...\n    // untill we figure out who is right, we need to add 1.0 to linewidth\n    thickness = thickness > 0.0 ? thickness + 1.0 : 0.0;\n    normal *= (((thickness) / 2.0) / get_resolution()) * anchor.w;\n    // quadpos y (position.y) gives us the direction to expand the line\n    vec4 offset = vec4(normal * position.y, 0.0, 0.0);\n    // start, or end of quad, need to use current or next point as anchor\n    gl_Position = anchor + offset;\n\n}\n\n","instance_attributes":{"segment_start":{"flat":{"type":"Float32Array","data":[0,100,50]},"type_length":3},"segment_end":{"flat":{"type":"Float32Array","data":[100,100,50]},"type_length":3}}}]]]}},"msg_type":"2"}
Stack trace:
    TypeError: Cannot read property 'wgl_camera' of undefined
        at add_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:65)
        at /mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:50
        at Array.forEach (<anonymous>)
        at Object.insert_plot (/mnt/storage/sebastian/.julia/packages/WGLMakie/owBBW/src/wglmakie.js:49)
        at eval (eval at deserialize_js (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:162), <anonymous>:3:14)
        at /mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:168
        at process_message (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:365)
        at WebSocket.websocket.onmessage (/mnt/storage/sebastian/.julia/packages/JSServe/ZM5F2/js_dependencies/JSServe.js:451)

is the full error

@SebastianM-C SebastianM-C reopened this May 29, 2021
Sign up for free to subscribe to this conversation on GitHub. Already have an account? Sign in.
Labels
None yet
Projects
None yet
Development

No branches or pull requests

2 participants