rendering – Adding lighting to custom vertex shader in Unity

What’s the simplest way to add shadow functionality based on the modified vertices, nothing I’ve found online seems to work

 Shader "Unlit/HyperUnlit"
         _MainTex ("Texture", 2D) = "white" {}
         _Color("Color", Color) = (1,1,1,1)
         Tags { "RenderType"="Opaque" }
         LOD 100
             #pragma vertex vert
             #pragma fragment frag
             #include "UnityCG.cginc"
             #include "Complex.cginc"
             struct appdata
                 float4 vertex : POSITION;
                 float2 uv : TEXCOORD0;
             struct v2f
                 float2 uv : TEXCOORD0;
                 float4 vertex : SV_POSITION;
             sampler2D _MainTex;
             float4 _MainTex_ST;
             fixed4 _Color;
             float Tscale;
             float3 Tpos;
             float klein;
             float Rscale;
             //Mobius Transformation (a,b,c,d)
             float2 a;
             float2 b;
             float2 c;
             float2 d;
             v2f vert (appdata v)
                 v2f o;
                 //Reduce to float3
                 float3 worldPos = mul(unity_ObjectToWorld, float4(, 1)).xyz;
                 //2. Apply linear transformations
                 worldPos *= Tscale;
                 worldPos += Tpos;
                 //3. Apply mobius transformation    
                 worldPos = MobiusXZ(a,b,c,d, worldPos);
                 //4. Klein?
                 if (klein==1)
                     worldPos = PoincareToKlein(worldPos);
                 //5. Scale to disk radius
                 worldPos *= Rscale;
                 //Convert back to ? space
                 o.vertex = mul(UNITY_MATRIX_VP, float4(worldPos, 1));
                 //transform position to clip space
                 //o.vertex = UnityObjectToClipPos(v.vertex);
                 o.uv = TRANSFORM_TEX(v.uv, _MainTex);
                 return o;
             fixed4 frag (v2f i) : SV_Target
                 // sample the texture
                 fixed4 col = tex2D(_MainTex, i.uv) * _Color;
                 return col;
         UsePass "Legacy Shaders/VertexLit/SHADOWCASTER"

Cuda API to render using Physically Based Rendering (PBR) in OpenGL-ES based project

Thanks for contributing an answer to Game Development Stack Exchange!

  • Please be sure to answer the question. Provide details and share your research!

But avoid

  • Asking for help, clarification, or responding to other answers.
  • Making statements based on opinion; back them up with references or personal experience.

Use MathJax to format equations. MathJax reference.

To learn more, see our tips on writing great answers.

Django CreateView without template rendering

I have a problem with saving a form in my CreateView, I found
this solution and it worked for me:

class ScheduleDocumentView(CreateView):

    def post(self, request, pk, *args, **kwargs):
        form = ScheduleDocumentForm(request.POST, request.FILES)
        if form.is_valid():
            form.instance.relates_to = Schedule.objects.get(pk=pk)
        return redirect('planning:schedule-detail', pk=pk)

However my goal is to save a form using form_valid() and get_success_url() without a template in CreateView. I tried something like this(doesn’t work):

class ScheduleDocumentView(CreateView):
    model = ScheduleDocument
    form_class = ScheduleDocumentForm

    def form_valid(self, form):
        form.instance.schedule = Schedule.objects.get(pk=pk)
        return redirect('planning:schedule-detail', pk=pk)

    def get_success_url(self):
        return reverse('planning:schedule-detail', kwargs={'pk': pk})

It requires a template, is there any other way to handle my post request in DetailView, process it in separate CreateView and redirect it to my DetailView page?

Here’s my template for DetailView:

<form enctype="multipart/form-data" action="{% url 'planning:upload-document' %}" method="post">
{% csrf_token %}
{{ form.as_p }}
<button type="submit" class="button button--secondary">Attach</button>


path('schedules/<int:pk>/', ScheduleDetailView.as_view(), name='schedule-detail'),
path('schedules/<int:pk>/upload-document/', ScheduleDocumentView.as_view(), name='upload-document'),

views – HOOK_views_pre_render() not rendering the updated results arraycorrectly

I have a D8 view with the main page display and created an embed display to get another dataset. I want to merge the result of both displays and make the embed display result sit at the very bottom, but, I don’t want to create an attachment instead of the embed display. So I wrote this piece of code to achieve this task:

function test_views_pre_render(ViewExecutable $view) {
  if($view->id() == 'courses' && $view->current_display == 'page_1') {
    $view2 = views_get_view_result('courses', 'embed_1');
    $view->result = array_merge($view->result, $view2);

When I check the result through xDebug I can see the view output contains the merged data. But, in the preview, I can only see 3 dots, which represent 3 items merged from the embed display onto the main display.

enter image description here

Surely a problem with rendering. Does anyone can see if I’m doing anything wrong here?. Thanks in advance for your help. Please excuse me about the question title.

An ECS rendering system – Game Development Stack Exchange

Thanks for contributing an answer to Game Development Stack Exchange!

  • Please be sure to answer the question. Provide details and share your research!

But avoid

  • Asking for help, clarification, or responding to other answers.
  • Making statements based on opinion; back them up with references or personal experience.

Use MathJax to format equations. MathJax reference.

To learn more, see our tips on writing great answers.

rendering – Java AWT drawing tiles laves gap

I am trying to make a tile based game in Java and I have noticed a problem with the drawing. There is about a one pixel gap between some of the tiles and I am not sure what is causing it.

Here is a screenshot of it:

Here is my code so far:

Main class:

package Game.main;

import javax.swing.JFrame;

public class Main {
    public static JFrame window;
    public static void main(String() args) {
        window = new JFrame();
        window.setSize(1280, 720);
        Game game = new Game();

Game class:

package Game.main;

import javax.swing.JPanel;

import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Color;

import java.util.ArrayList;

import Game.tile.Tile;

public class Game extends JPanel implements Runnable {

    public Game() {

    boolean running = false;

    int width,height;

    ArrayList<Tile> tiles = new ArrayList<>();

    public void paint(Graphics g1) {
        //cast to graphics2d
        Graphics2D g = (Graphics2D)g1;
        //fill background
        g.fillRect(0, 0, width, height);
        //draw tiles
        for(int i = 0; i < tiles.size(); i++) {
            tiles.get(i).render(g, width, height);

    private void start() {
        Thread gameThread = new Thread(this);
        //create start tiles
        for(int x = 0; x < 1000; x+=10) {
            for(int y = 700; y < 1000; y+=20) {
                Tile tile = new Tile(x, y);

    public void run() {
        running = true;
        while(running) {
            //get window width and height
            width = Main.window.getWidth();
            height = Main.window.getHeight();
            //redraw current frame

Tile class:

package Game.tile;

import java.awt.Graphics2D;
import java.awt.Color;

public class Tile {
    public int x,y;
    public int w,h;
    public Tile(int x, int y) {
        this.x = x;
        this.y = y;
        this.w = 10;
        this.h = this.w*2;
    public void render(Graphics2D g, int width, int height) {
        g.fillRect(this.x*width/1000, this.y*height/1000, this.w*width/1000, this.h*height/1000);


Unity Project Settings: What is the difference between “Graphics -> SRP settings” and “Quality -> Rendering -> URP Asset”

As the title says, I am a bit confused about the aspect that the URP asset is referenced from two different settings:

  • Project Settings -> Graphics -> SRP settings
  • Project Settings -> Quality -> Rendering -> URP Asset

In a blank URP Project, “Project Settings -> Graphics -> SRP settings” references the HighQuality URP asset, and “Project Settings -> Quality -> Rendering -> URP Asset” has three different tiers, all of which reference their respective URP assed (i.e., either LowQuality, MediumQuality, or HighQuality.

Switching from one quality tier to another applies the respective URP asset, i.e., if I switch the quality tier to Low, the LowQuality URP asset is applied, even though the HighQuality is still set in “Project Settings -> Graphics -> SRP settings“.

So now the question: Why do I have to set a URP asset in “Project Settings -> Graphics -> SRP settings” if it seems that it is overridden by the URP asset defined in the quality tab?

SRP settings" and "Quality -> Rendering -> URP Asset"" />

How to use conditional rendering in Tsx file (React +Typescript)

I want to use condition in my Task -Status if value is 0 then "Not-Started" elseif value is 50 then in "In-progress" else "Completed"

Now I am using TaskStatus with value like 0,50,100

I am fetching the data through Graph API

enter image description here

Here is my code "item.percentcomplete" is having the value of tasks-status as 0,50,100

<h2>All Planner Tasks </h2>
    <table id="customers">
        <td>Start Date</td>
         <td>End Date</td>

     , key) =>
            <td id = "status">{item.percentComplete}</td>
            <td><a href =' '>{item.title}</a></td>
            <td>{moment(item.startDateTime).format('MM/DD/YYYY ')}</td>
          </tr> )


directx – Rendering a ID3D11Texture2D into a SkImage (SkiaSharp/Avalonia)

I’m currently trying to create an interop layer to render my render target texture into a Skia SkImage. This is being done to facilitate rendering from my graphics API into Avalonia.

I’ve managed to piece together enough code to get everything running without any errors (at least, none that I can see), but when I draw the SkImage I see nothing but a black image.

Of course, these things are easier to describe with code:

private EglPlatformOpenGlInterface _platform;
private AngleWin32EglDisplay _angleDisplay;
private readonly int() _glTexHandle = new int(1);

IDrawingContextImpl context // <-- From Avalonia

_platform = (EglPlatformOpenGlInterface)platform;
_angleDisplay = (AngleWin32EglDisplay)_platform.Display;

IntPtr d3dDevicePtr = _angleDisplay.GetDirect3DDevice();

// Device5 is from SharpDX.
_d3dDevice = new Device5(d3dDevicePtr);  

// Texture.GetSharedHandle() is the shared handle of my render target.
_eglTarget = _d3dDevice.OpenSharedResource<Texture2D>(_target.Texture.GetSharedHandle());

// WrapDirect3D11Texture calls eglCreatePbufferFromClientBuffer.
_glSurface = _angleDisplay.WrapDirect3D11Texture(_platform, _eglTarget.NativePointer);

using (_platform.PrimaryEglContext.MakeCurrent())
   _platform.PrimaryEglContext.GlInterface.GenTextures(1, _glTexHandle);

var fbInfo = new GRGlTextureInfo(GlConsts.GL_TEXTURE_2D, (uint)_glTexHandle(0), GlConsts.GL_RGBA8);
_backendTarget = new GRBackendTexture(_target.Width, _target.Height, false, fbInfo);            

using (_platform.PrimaryEglContext.MakeCurrent())
   // Here's where we find the gl surface to our texture object apparently.
   _platform.PrimaryEglContext.GlInterface.BindTexture(GlConsts.GL_TEXTURE_2D, _glTexHandle(0));

   EglBindTexImage(_angleDisplay.Handle, _glSurface.DangerousGetHandle(), EglConsts.EGL_BACK_BUFFER);

   _platform.PrimaryEglContext.GlInterface.BindTexture(GlConsts.GL_TEXTURE_2D, 0);

// context is a GRContext
_skiaSurface = SKImage.FromTexture(context, _backendTarget, GRSurfaceOrigin.BottomLeft, SKColorType.Rgba8888, SKAlphaType.Premul);

// This clears my render target (obviously). I should be seeing this when I draw the image right?

canvas.DrawImage(_skiaSurface, new SKPoint(320, 240));

So, as far as I can tell, this should be working. But as I said before, it’s only showing me a black image. It’s supposed to be cornflower blue. I’ve tried calling Flush on the ID3D11DeviceContext, but I’m still getting the black image.

Anyone have any idea what I could be doing wrong?

rendering – In Unity, why does DrawProceduralNow work in OnRenderObject but DrawProcedural does not?

In Unity, if I call DrawProcedural (or one of its variants) from Update() it works fine, but if I make the exact same call from OnRenderObject() it draws nothing. (It also doesn’t work from OnPreRender or OnPostRender).

HOWEVER, DrawProceduralNow() works fine from all of these functions.

Why is that so? Is it documented somewhere? What can I do to make DrawProcedural and variants work in OnRenderObject? What is different about DrawProceduralNow that makes it work? (Yes, I have tried using material.SetPass(0) in the DrawProcedural version and it still renders nothing.

I understand that the OnRender functions are called later in the frame and some grahpics state is different at that point, but it seems like DrawProcedural is exactly the kind of thing we should be doing in OnRenderObject. I feel like there is some fundamental thing about the Unity rendering pipeline that I am misunderstanding that would clarify all this. (Tested in 2019 LTS and 2020.2.3)

For example:


public class TestRender : MonoBehaviour {  

  public Material material ;
  private ComputeBuffer vertexBuffer;
  private GraphicsBuffer indexBuffer;
  private ComputeBuffer paramsBuffer;

  private Bounds _bounds;
  void Start () {
    material = new Material(material);
    // Copy geometry from MeshFilter (you can just use a cube for testing)
    Mesh mesh = GetComponent<MeshFilter>().sharedMesh;
    vertexBuffer = new ComputeBuffer(mesh.vertices.Length, Marshal.SizeOf<Vector3>());
    indexBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Index, mesh.triangles.Length, Marshal.SizeOf<int>());

    paramsBuffer = new ComputeBuffer(5, sizeof(int), ComputeBufferType.IndirectArguments);
    paramsBuffer.SetData(new int() { mesh.triangles.Length, 1, 0, 0, 0 });
    material.SetBuffer("_VertexBuffer", vertexBuffer);
    _bounds = new Bounds(transform.position, new Vector3(5, 5, 5));

  private void Update() {
    // This works!
    Graphics.DrawProcedural(material, _bounds, MeshTopology.Triangles, indexBuffer, indexBuffer.count);
    // So does this!
    Graphics.DrawProceduralNow(MeshTopology.Triangles, indexBuffer, indexBuffer.count);
  private void OnRenderObject() {
    // This does NOT work
    Graphics.DrawProcedural(material, _bounds, MeshTopology.Triangles, indexBuffer, indexBuffer.count);
    // But this does!
    Graphics.DrawProceduralNow(MeshTopology.Triangles, indexBuffer, indexBuffer.count);

Simple shader:

Shader "Test Render" {
     SubShader {
         Pass {
         #include "UnityCG.cginc"
         #pragma target 4.5  
         #pragma vertex vertex_shader
         #pragma fragment fragment_shader
         StructuredBuffer<float3> _VertexBuffer;
         struct v2f {
             float4 pos : SV_POSITION;
         v2f vertex_shader (uint id : SV_VertexID, uint inst : SV_InstanceID)
             v2f o;
             float4 vertex_position =  float4(_VertexBuffer(id),1.0f);
             o.pos = UnityObjectToClipPos(vertex_position);
             return o;
         fixed4 fragment_shader (v2f i) : SV_Target
            fixed4 final = fixed4(0, 0, 1, 1);
            return final;