diff --git a/Packages/com.unity.render-pipelines.core/Editor/MaterialUpgrader.cs b/Packages/com.unity.render-pipelines.core/Editor/MaterialUpgrader.cs index 777a76b1121..f6349692102 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/MaterialUpgrader.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/MaterialUpgrader.cs @@ -72,6 +72,7 @@ class KeywordFloatRename public float setVal, unsetVal; } List m_KeywordFloatRename = new List(); + Dictionary)> m_ConditionalFloatRename; /// /// Type of property to rename. @@ -220,6 +221,20 @@ public virtual void Convert(Material srcMaterial, Material dstMaterial) dstMaterial.SetFloat(t.property, srcMaterial.IsKeywordEnabled(t.keyword) ? t.setVal : t.unsetVal); } + + // Handle conditional float renaming + if (m_ConditionalFloatRename != null) + { + foreach (var (oldName, (newName, condition)) in m_ConditionalFloatRename) + { + if (srcMaterial.HasProperty(oldName) && + condition(srcMaterial.GetFloat(oldName)) && + dstMaterial.HasProperty(newName)) + { + dstMaterial.SetFloat(newName, 1.0f); + } + } + } } /// @@ -316,6 +331,17 @@ public void RenameKeywordToFloat(string oldName, string newName, float setVal, f m_KeywordFloatRename.Add(new KeywordFloatRename { keyword = oldName, property = newName, setVal = setVal, unsetVal = unsetVal }); } + /// + /// Rename a float property conditionally based on its value + /// + /// Old property name + /// New property name + /// Condition function that takes the float value and returns true if renaming should occur + protected void RenameFloat(string oldName, string newName, System.Func condition) + { + (m_ConditionalFloatRename ??= new Dictionary)>())[oldName] = (newName, condition); + } + static MaterialUpgrader GetUpgrader(List upgraders, Material material) { if (material == null || material.shader == null) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Atmospheric-Scattering.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Atmospheric-Scattering.md index 5cd9f6b88d1..8a77f351437 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Atmospheric-Scattering.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Atmospheric-Scattering.md @@ -11,3 +11,5 @@ HDRP implements an exponential fog, where density varies exponentially with dist Instead of using a constant color, fog can use the background sky as a source for color. In this case, HDRP samples the color from different mipmaps of the cubemap generated from the current sky settings. The chosen mip varies linearly between the lowest resolution and the highest resolution mipmaps, depending on the distance from the Camera and the values in the fog component’s **Mip Fog** properties. You can also choose to limit the resolution of the highest mip that HDRP uses. Doing this adds a volumetric effect to the fog and is less resource intensive to use than actual volumetric fog. Optionally, you can enable volumetric fog for GameObjects close to the camera. It realistically simulates the interaction of lights with fog, which allows for physically plausible rendering of glow and crepuscular rays, which are beams of light that stream through gaps in objects like clouds and trees from a central point. + +**Note:** Volumetric fog doesn't support [light rendering layers](Light-Layers). diff --git a/Packages/com.unity.render-pipelines.universal/Editor/2D/LightBatchingDebugger/LightBatchingDebugger.cs b/Packages/com.unity.render-pipelines.universal/Editor/2D/LightBatchingDebugger/LightBatchingDebugger.cs index 67b44df185c..f98c64c2e4a 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/2D/LightBatchingDebugger/LightBatchingDebugger.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/2D/LightBatchingDebugger/LightBatchingDebugger.cs @@ -457,18 +457,17 @@ private void Update() private bool IsDirty() { + if (lightCullResult == null) + return false; + bool isDirty = false; // Refresh if layers are added or removed isDirty |= Light2DManager.GetCachedSortingLayer().Count() != batchList.Sum(x => x.LayerNames.Count()); isDirty |= cachedSceneHandle != SceneManager.GetActiveScene().handle; isDirty |= cachedCamPos != Camera.main?.transform.position; - - if (lightCullResult != null) - { - isDirty |= totalLightCount != lightCullResult.visibleLights.Count(); - isDirty |= totalShadowCount != lightCullResult.visibleShadows.Count(); - } + isDirty |= totalLightCount != lightCullResult.visibleLights.Count(); + isDirty |= totalShadowCount != lightCullResult.visibleShadows.Count(); return isDirty; } diff --git a/Packages/com.unity.render-pipelines.universal/Editor/UniversalRenderPipelineMaterialUpgrader.cs b/Packages/com.unity.render-pipelines.universal/Editor/UniversalRenderPipelineMaterialUpgrader.cs index 15f93c2d219..c6e8dd58ca6 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/UniversalRenderPipelineMaterialUpgrader.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/UniversalRenderPipelineMaterialUpgrader.cs @@ -551,6 +551,7 @@ public StandardUpgrader(string oldShaderName) } RenameFloat("_Mode", "_Surface"); + RenameFloat("_Mode", "_AlphaClip", renderingMode => renderingMode == 1.0f); RenameTexture("_MainTex", "_BaseMap"); RenameColor("_Color", "_BaseColor"); RenameFloat("_GlossyReflections", "_EnvironmentReflections"); diff --git a/Packages/com.unity.shadergraph/Documentation~/Branch-Node.md b/Packages/com.unity.shadergraph/Documentation~/Branch-Node.md index 10152860c73..460dd770d6b 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Branch-Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Branch-Node.md @@ -1,17 +1,17 @@ -# Branch Node +# Branch node -## Description +The Branch node adds a dynamic branch to the shader, which outputs a different value depending on whether the input is true or false. -Provides a dynamic branch to the shader. If input **Predicate** is true, this node returns input **True**, otherwise it returns input **False**. The **Branch Node** evaluates the **Predicate** per vertex or per pixel depending on shader stage. Both sides of the branch are evaluated in the shader, and the branch not used is discarded. +Both sides of the branch are evaluated in the shader, and the output from the unused path is discarded. ## Ports -| Name | Direction | Type | Binding | Description | +| **Name** | **Direction** | **Type** | **Binding** | **Description** | |:----------|:----------|:---------------|:--------|:------------| -| Predicate | Input | Boolean | None | Determines which input to return. | -| True | Input | Dynamic Vector | None | Returned if **Predicate** is true. | -| False | Input | Dynamic Vector | None | Returned if **Predicate** is false. | -| Out | Output | Dynamic Vector | None | Output value | +| **Predicate** | Input | Boolean | None | The input to test the value of. If you input a float, all values are evaluated as `true` except `0`. | +| **True** | Input | Dynamic Vector | None | The value to output as **Out** if **Predicate** is true. | +| **False** | Input | Dynamic Vector | None | The value to output as **Out** if **Predicate** is false. | +| **Out** | Output | Dynamic Vector | None | Outputs either **True** or **False**. | ## Generated Code Example diff --git a/Packages/com.unity.shadergraph/Documentation~/Circle-Pupil-Animation-Node.md b/Packages/com.unity.shadergraph/Documentation~/Circle-Pupil-Animation-Node.md index db4e37329f3..953c09123af 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Circle-Pupil-Animation-Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Circle-Pupil-Animation-Node.md @@ -12,9 +12,8 @@ This node applies a deformation to a normalized IrisUV coordinate to simulate th | name | **Direction** | type | description | | -------------------------- | ------------- | ------- | ------------------------------------------------------------ | -| **IrisUV** | Input | Vector2 | Position of the fragment to shade in object space. | -| **Pupil Radius** | Input | float | Direction of the incident ray in object space. Either from the camera in rasterization or from the previous bounce in ray tracing. | -| **Maximal Pupil Aperture** | Input | float | The normal of the eye surface in object space. | -| **Minimal Pupil Aperture** | Input | float | The index of refraction of the eye (**1.333** by default). | -| **Pupil Apertur** | Input | float | Distance between the end of the cornea and the iris plane. For the default model, this value should be **0.02** | -| **IrisUV** | Output | Vector2 | Position of the refracted point on the iris plane in object space. | +| **Iris UV** | Input | Vector2 | Normalized UV coordinates that can be used to sample either a texture or procedurally generate an Iris Texture. | +| **Pupil Radius** | Input | float | Radius of the pupil in the iris texture as a percentage. | +| **Pupil Aperture** | Input | float | Set the current diameter of the pupil opening. | +| **Maximal Pupil Aperture** | Input | float | Define the largest size the pupil opening can reach. | +| **Minimal Pupil Aperture** | Input | float | Define the smallest size the pupil opening can reach. | \ No newline at end of file diff --git a/Packages/com.unity.shadergraph/Documentation~/Custom-Interpolators.md b/Packages/com.unity.shadergraph/Documentation~/Custom-Interpolators.md index 91ed8318d32..eabcb1a1885 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Custom-Interpolators.md +++ b/Packages/com.unity.shadergraph/Documentation~/Custom-Interpolators.md @@ -1,65 +1,53 @@ -# Custom Interpolators +# Add a custom interpolator -## Description +To pass custom data from the vertex shader to the fragment shader, add a custom interpolator to the vertex context of the Master Stack. -The Custom Interpolator feature provides fine-grained control over the specific calculations Shader Graph uses to bring data from the vertex stage to the pixel stage. - -There are two target audiences for Custom Interpolators: +There are two target audiences for custom interpolators: * Technical Directors and Lead Technical Artists setting up environments for their teams. * Graphics programmers helping artists to optimize content performance. +**Note:** If you use the Built-In Render Pipeline, refer to [Input vertex data into a shader](https://docs.unity3d.com/Manual/SL-VertexProgramInputs.html) instead. ## Supported data types -Custom interpolators support float, vec2, vec3, and vec4 options. + +Custom interpolators support float, vector 2, vector 3, and vector 4 types. ## Channel limits -The Custom Interpolator feature supports a maximum of 32 channels. A channel is equivalent to four floats. Each float is an interpolator variable. -Different platforms and GPUs have different interpolator variable limits. Exceeding the interpolator limitations of your target platform prevents your shaders from compiling. For detailed information about the number of interpolators supported by common interfaces, see the Unity documentation on [Shader semantics](https://docs.unity3d.com/Manual/SL-ShaderSemantics.html), and view the section **Interpolator count limits**. Test your Custom Interpolators on your target configuration to ensure that your content compiles properly. -Technical directors can set warnings and errors to help their team members avoid creating graphs with too many channels to be compatible with their target pipeline, platform, or GPU. See **Creating channel warnings and errors** below. -## How to use -To use this feature, create a Custom Interpolator block in the Vertex context of the Master Stack and set a name and a data type. Create a vertex node to write data to that interpolator. Use the interpolator in your graph, then connect your graph to the relevant block in the Fragment context. -These instructions include a contextual example illustrating the process of using a Custom Interpolator to fetch per-vertex data from a texture. -To read the HLSL you use to replicate this behavior with the Built In Render Pipeline, see the Unity documentation on [Shader semantics](https://docs.unity3d.com/Manual/SL-ShaderSemantics.html) and view the section **Vertex ID: SV_VertexID**. +A custom interpolator supports a maximum of 32 channels. A channel is equivalent to four floats. Each float is an interpolator variable. -### Creating channel warnings and errors +Different platforms and GPUs may have different limits, which might prevent your shaders compiling. Test your custom interpolators on your build targets to make sure your shaders compile properly. For more information, refer to the **Interpolator count limits** section in [Input vertex data into a shader](https://docs.unity3d.com/Manual/SL-VertexProgramInputs.html). -It is not possible to limit the number of channels a user can create in a Shader Graph. However, it is possible to create alerts to let users know when they are close to or exceeding a certain number of channels. -The **Warning Threshold** lets users know that they are approaching the channel limit, and the **Error Threshold** informs them if they have reached or surpassed that limit. The **Warning Threshold** value must be between 8 and 32 channels. The **Error Threshold** value must be higher than the **Warning Threshold**, and has a minimum value of 8 channels. -To configure these parameters, go to the Unity Editor [Project Settings](https://docs.unity3d.com/Manual/comp-ManagerGroup.html) menu and open the **Custom Interpolator Channel Settings**. +You can't limit the number of channels another user creates in a shader graph. However, to warn users about the limits, go to [Project Settings](https://docs.unity3d.com/Manual/comp-ManagerGroup.html) and set the following: -### Adding a Custom Interpolator block to the Master Stack +- **Warning Threshold** to tell users when they approach the channel limit. The range is 8 to 32 channels. +- **Error Threshold** to tell users when they reach or exceed the channel limit. The minimum value is 8 channels, and it must be higher than the **Warning Threshold**. -![](images/custom-interpolators-3.gif) ![](images/custom-interpolators-2.png) +## Add a custom interpolator block to the Master Stack -1. Right-click in the **Vertex** contex to create a block node. +1. Right-click in the **Vertex** context to create a block node. 2. Select **Custom Interpolator**. -3. Select a data type. -4. Enter a name for this interpolator. +3. In the **Node Settings** tab of the **Graph Inspector** window, select a data type, for example **Vector 4**. +4. In the same tab, enter a name for the interpolator. -In the illustrated example, you use the Vector 4 (vec4) data type. +## Write data to the interpolator -### Writing data to the interpolator +1. Right-click in your graph to create a node. +2. Select the type, for example **Vertex ID**. - ![](images/custom-interpolators-1.png) + Custom interpolator blocks support many types of data, so you can connect the data from many other nodes including UV nodes and color nodes. -1. Right-click in your graph to create a node. -2. Select the type **Vertex ID**. -3. Connect this node to the Custom Interpolator block. +3. Connect the node to the custom interpolator block. -In the example, you write Vertex ID values from your graph into the Custom Interpolator. +The graph now writes Vertex ID values into the custom interpolator. -### Reading data from the interpolator +## Read data from the interpolator 1. Right-click in your graph to create a node. 2. Select **Custom Interpolator**. -3. Connect the Custom Interpolator node to the relevant block in the Fragment context. - - ![](images/custom-interpolators-4.png) - -In this example, you connect to the **Base Color** block in order to pass the Vertex ID from the vertex shader to the fragment shader and use it as color output. +3. Connect the **Custom Interpolator** node to the relevant block in the **Fragment** context, for example **Base Color** to use the Vertex ID as color output. -### Deleting the block from the Master Stack +## Delete a custom interpolator -If you delete a Custom Interpolator which is associated with nodes that are still in your graph, Unity displays an alert. If you want to keep using these nodes, you can create a new Custom Interpolator and associate them with it. This prevents the alert from appearing. +If you delete a custom interpolator that's associated with nodes that are still in your graph, Unity displays an alert. If you want to keep using these nodes, you can create a new custom interpolator and associate the nodes with it. This prevents the alert from appearing. diff --git a/Packages/com.unity.shadergraph/Documentation~/Dither-Node.md b/Packages/com.unity.shadergraph/Documentation~/Dither-Node.md index d82a83adfa8..63b8a9e161f 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Dither-Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Dither-Node.md @@ -1,20 +1,20 @@ -# Dither Node +# Dither node -## Description +The Dither node adds a structured form of noise to the input. Use the Dither node to reduce the color bands that might appear if you move from a high number of colors to a low number (quantizing), or to simulate transparency by adding random alpha pixels to an opaque object. -Dither is an intentional form of noise used to randomize quantization error. It is used to prevent large-scale patterns such as color banding in images. The **Dither** node applies dithering in screen-space to ensure a uniform distribution of the pattern. This can be adjusted by connecting another node to input **Screen Position**. +The Dither node applies dithering in screen space to ensure a uniform distribution of the pattern. To change the space the node uses, connect another node to the **Screen Position** input, such as a [UV node](UV-Nodes.md). -This [Node](Node.md) is commonly used as an input to **Alpha Clip Threshold** on the [Master Node](Master-Stack.md) to give the appearance of transparency to an opaque item. This is useful for creating geometry that appears to be transparent but has the advantages of rendering as opaque, such as writing depth or being rendered in deferred. +To use a dither pattern for transparency, connect the Dither node to the **Alpha Clip Threshold** input in the [Master Stack](Master-Stack.md). As a result, when you adjust the overall alpha value of the material, some pixels are discarded because the alpha value is lower than their alpha clip threshold. This technique is useful for creating geometry that appears to be transparent but has the advantages of rendering as opaque, such as writing to the depth buffer or rendering using a deferred [rendering path](https://docs.unity3d.com/Manual/built-in-rendering-paths.html). ## Ports -| Name | Direction | Type | Binding | Description | +| **Name** | **Direction** | **Type** | **Binding** | **Description** | |:------------ |:-------------|:-----|:---|:---| -| In | Input | Dynamic Vector | None | Input value | -| Screen Position | Input | Vector 4 | Screen Position | Coordinates used to apply dither pattern | -| Out | Output | Dynamic Vector | None | Output value | +| **In** | Input | Dynamic vector | None | The input to dither. The noise stays within the overall minimum and maximum range of the input values. | +| **Screen Position** | Input | Vector 4 | Screen Position | The coordinates Unity uses to calculate the dither pattern. For more information about the options, refer to the [Screen Position node](Screen-Position-Node.md). | +| **Out** | Output | Dynamic vector | None | The dithered output. | -## Generated Code Example +## Generated code example The following example code represents one possible outcome of this node. @@ -33,3 +33,4 @@ void Unity_Dither_float4(float4 In, float4 ScreenPosition, out float4 Out) Out = In - DITHER_THRESHOLDS[index]; } ``` + diff --git a/Packages/com.unity.shadergraph/Documentation~/Emission-Node.md b/Packages/com.unity.shadergraph/Documentation~/Emission-Node.md index bf108042bec..94c2c1f3537 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Emission-Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Emission-Node.md @@ -1,30 +1,42 @@ -# Emission Node +# Emission node -The Emission Node allows you to apply emission in your Shader Graph. +The Emission node outputs a color that makes a material appear as a visible source of light. ## Render pipeline compatibility -| **Node** | **Universal Render Pipeline (URP)** | **High Definition Render Pipeline (HDRP)** | -| -------- | ----------------------------------- | ------------------------------------------ | -| Emission | No | Yes | +The Emission node is compatible only with the High Definition Render Pipeline (HDRP). ## Ports -| Name | Direction | Type | Description | -| :------------ | :-------- | :------------- | :----------------------------------------------------------- | -| **color** | Input | LDR Color(RGB) | Sets the low dynamic range (LDR) color of the emission. | -| **intensity** | Input | Float | Sets the intensity of the emission color. | -| **output** | Output | HDR Color(RGB) | Outputs the high dynamic range (HDR) color that this Node produces. | +| **Name** | **Direction** | **Type** | **Description** | +|-|-|-|-| +| **Color** | Input | Low dynamic range (LDR) RGB color | Sets the low dynamic range (LDR) color to make emissive. | +| **Intensity** | Input | Float | Sets the intensity of the emission of the output color. | +| **Exposure Weight** | Input | Float | Sets how much the [exposure](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@latest/index.html?subfolder=/manual/Override-Exposure.html) of the scene affects emission. The range is between 0 and 1. A value of 0 means that exposure does not affect this part of the emission. A value of 1 means that exposure fully affects this part of the emission. | +| **Output** | Output | High dynamic range (HDR) RGB color | The emissive color. | -## Notes +## Properties -### Emission Unit +| **Name** | **Description** | +|-|-| +| **Intensity Unit** | Sets the unit of the **Intensity** property. For more information, refer to [Understand physical light units](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@latest/index.html?subfolder=/manual/Physical-Light-Units.html). The options are:
  • **Nits**: Sets the units as nits, which is a measure of luminance, the surface power of a light source.
  • **EV100**: Sets the units as EV100, which is a measure of exposure value.
| +| **Normalize Color** | Adjusts the intensity of the input color so the red, green, and blue values look similar. As a result, colors are more balanced in the **Output**. | -You can use two [physical light units](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@latest/index.html?subfolder=/manual/Physical-Light-Units.html) to control the strength of the emission: +## Generated code example -* [Nits](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@latest/index.html?subfolder=/manual/Physical-Light-Units.html%23Nits). -* [EV100](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@latest/index.html?subfolder=/manual/Physical-Light-Units.html%23EV). +```hlsl +float3 Unity_HDRP_GetEmissionHDRColor_float(float3 ldrColor, float luminanceIntensity, float exposureWeight) +{ + // Convert the LDR color. This line is generated only if Normalize Color is enabled. + ldrColor = ldrColor * rcp(max(Luminance(ldrColor), 1e-6)); + float3 hdrColor = ldrColor * luminanceIntensity; + float inverseExposureMultiplier = GetInverseCurrentExposureMultiplier(); + hdrColor = lerp(hdrColor * inverseExposureMultiplier, hdrColor, exposureWeight); + return hdrColor; +} +``` -### Exposure Weight -You can use Exposure Weight to determine how exposure affects emission. It is a value between **0** and **1** where. A value of **0** means that exposure does not effect this part of the emission. A value of **1** means that exposure fully affects this part of the emission. +## Additional resources + +- [Add light emission to a material](https://docs.unity3d.com/Manual/StandardShaderMaterialParameterEmission.html) diff --git a/Packages/com.unity.shadergraph/Documentation~/Exposure-Node.md b/Packages/com.unity.shadergraph/Documentation~/Exposure-Node.md index d387e5b4bf3..d754dbe716c 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Exposure-Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Exposure-Node.md @@ -1,25 +1,24 @@ -# Exposure Node +# Exposure node -The Exposure Node allows you to get the Camera's exposure value from the current or previous frame. +The Exposure node outputs the exposure value of the current camera. You can output the value from the current frame or the previous frame. ## Render pipeline compatibility -| **Node** | **Universal Render Pipeline (URP)** | **High Definition Render Pipeline (HDRP)** | -| -------- | ----------------------------------- | ------------------------------------------ | -| Exposure | No | Yes | +The Exposure node is compatible only with the High Definition Render Pipeline (HDRP). + +For more information about exposure, refer to [Control exposure](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@latest/index.html?subfolder=/manual/Override-Exposure.html) in the HDRP manual. ## Ports -| name | **Direction** | type | description | +| **Name** | **Direction** | **Type** | **Description** | |--- | --- | --- | --- | -|**Output** |Output | float | The exposure value.| +| **Output** | Output | Float | The exposure value of the camera. | + +## Type -## Exposure Type +Use the **Type** property to select which exposure value to get. The options are: -You can use Exposure Type to select which exposure value to get. -| name | description | -|--- | ---| -| **CurrentMultiplier** | Gets the Camera's exposure value from the current frame. | -| **InverseCurrentMultiplier** | Gets the inverse of the Camera's exposure value from the current frame. | -| **PreviousMultiplier** | Gets the Camera's exposure value from the previous frame. | -| **InversePreviousMultiplier** | Gets the inverse of the Camera's exposure value from the previous frame. | +- **Current Multiplier**: Gets the camera's exposure value from the current frame. +- **Inverse Current Multiplier**: Gets the inverse of the camera's exposure value from the current frame. +- **Previous Multiplier**: Gets the camera's exposure value from the previous frame. +- **Inverse Previous Multiplier**: Gets the inverse of the camera's exposure value from the previous frame. diff --git a/Packages/com.unity.shadergraph/Documentation~/Normal-Vector-Node.md b/Packages/com.unity.shadergraph/Documentation~/Normal-Vector-Node.md index d5cb972bacf..cf6c4c9dca8 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Normal-Vector-Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Normal-Vector-Node.md @@ -1,15 +1,22 @@ -## Description +# Normal Vector node -Provides access to the mesh vertex or fragment's **Normal Vector**. The coordinate space of the output value can be selected with the **Space** dropdown parameter. +The Normal Vector node outputs the normal of a vertex or fragment of a mesh. + +For more information about normals, refer to [Normal maps](https://docs.unity3d.com/Manual/StandardShaderMaterialParameterNormalMapLanding.html). ## Ports -| Name | Direction | Type | Binding | Description | +| **Name** | **Direction** | **Type** | **Binding** | **Description** | |:------------ |:-------------|:-----|:---|:---| -| Out | Output | Vector 3 | None | Mesh's **Normal Vector**. | +| **Out** | Output | Vector 3 | None | The normal of the vertex or fragment of the mesh, depending on the [shader stage](Shader-Stage.md) of the graph section. | + +## Space -## Parameters +The **Space** dropdown determines the coordinate space of the normal vector. -| Name | Type | Options | Description | -|:------------ |:-------------|:-----|:---| -| Space | Dropdown | Object, View, World, Tangent | Selects coordinate space of **Normal Vector** to output. | +| **Option** | **Description** | +|-|-| +| **Object** | Returns the vertex or fragment normal in object space, where up is the up axis of local space. | +| **View** | Returns the vertex or fragment normal in view space, where up is the up direction of the camera. | +| **World** | Returns the vertex or fragment normal in world space, where up is the up direction of the scene. | +| **Tangent** | Returns the vertex or fragment normal in tangent space, where up is away from the surface of the mesh. | diff --git a/Packages/com.unity.shadergraph/Documentation~/Not-Node.md b/Packages/com.unity.shadergraph/Documentation~/Not-Node.md index 600c17a6837..bd098259e76 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Not-Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Not-Node.md @@ -1,23 +1,18 @@ -# Not Node +# Not node -## Description - -Returns the opposite of input **In**. If **In** is true the output will be false, otherwise it will be true. This is useful for [Branching](Branch-Node.md). +The Not node outputs the opposite of an input. If the input is true the output is false, otherwise the output is true. This node is useful for [branching](Branch-Node.md). ## Ports -| Name | Direction | Type | Binding | Description | +| **Name** | **Direction** | **Type** | **Binding** | **Description** | |:------------ |:-------------|:-----|:---|:---| -| In | Input | Boolean | None | Input value | -| Out | Output | Boolean | None | Output value | +| **In** | Input | Boolean | None | The input value. | +| **Out** | Output | Boolean | None | The opposite of **In**. | -## Generated Code Example +## Generated code example The following example code represents one possible outcome of this node. ``` -void Unity_NormalUnpack_float(float In, out float Out) -{ - Out = !In; -} +Out = !In; ``` diff --git a/Packages/com.unity.shadergraph/Documentation~/Object-Node.md b/Packages/com.unity.shadergraph/Documentation~/Object-Node.md index fdad51f00db..c01fb9d4348 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Object-Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Object-Node.md @@ -1,32 +1,33 @@ -# Object Node +# Object node -## Description +The Object node outputs the position, scale, or bounds of the overall GameObject that Unity is currently rendering. -Provides access to various parameters of the currently rendering **Object**. +## Render pipeline compatibility -Note: The behaviour of the Position [Port](Port.md) can be defined per Render Pipeline. Different Render Pipelines may produce different results. If you're building a shader in one Render Pipeline that you want to use in both, try checking it in both pipelines before production. +The Object node is compatible with the following render pipelines: -#### Unity Render Pipelines Support -- Universal Render Pipeline -- High Definition Render Pipeline +- Universal Render Pipeline (URP) +- High Definition Render Pipeline (HDRP) + +**Note:** The output of the **Position** port might depend on the render pipeline you use. If you use your shader in both URP and HDRP, check the results in both pipelines before you use the shader in production. ## Ports -| Name | Direction | Type | Binding | Description | +| **Name** | **Direction** | **Type** | **Binding** | **Description** | |:------------ |:-------------|:-----|:---|:---| -| Position | Output | Vector 3 | None | Object position in world space | -| Scale | Output | Vector 3 | None | Object scale in world space | -| World Bounds Min | Output | Vector 3 | None | Minimum value of the renderer bounds in world space | -| World Bounds Max | Output | Vector 3 | None | Maximum value of the renderer bounds in world space | -| Bounds Size | Output | Vector 3 | None | Size of the renderer bounds | +| **Position** | Output | Vector 3 | None | The position of the overall GameObject in world space. | +| **Scale** | Output | Vector 3 | None | The scale of the overall GameObject in world space | +| **World Bounds Min** | Output | Vector 3 | None | The minimum position of the axis-aligned bounding box that fully encloses the GameObject in world space. | +| **World Bounds Max** | Output | Vector 3 | None | The maximum position of the bounding box. | +| **Bounds Size** | Output | Vector 3 | None | The total size of the bounding box. | -Note: the bounds values are the equivalent of [the bounds in the renderer component](https://docs.unity3d.com/ScriptReference/Renderer-bounds.html). This means that vertex deformation done in ShaderGraph doesn't affect these values. +**Note:** The bounds values are equivalent to the [bounds in the Renderer component](https://docs.unity3d.com/ScriptReference/Renderer-bounds.html) of the GameObject. If you deform the vertices in the shader graph, the bounds in the Renderer component don't change. -## Generated Code Example +## Generated code example The following example code represents one possible outcome of this node. -``` +```hlsl float3 _Object_Position = SHADERGRAPH_OBJECT_POSITION; float3 _Object_Scale = float3(length(float3(UNITY_MATRIX_M[0].x, UNITY_MATRIX_M[1].x, UNITY_MATRIX_M[2].x)), length(float3(UNITY_MATRIX_M[0].y, UNITY_MATRIX_M[1].y, UNITY_MATRIX_M[2].y)), diff --git a/Packages/com.unity.shadergraph/Documentation~/Replace-Color-Node.md b/Packages/com.unity.shadergraph/Documentation~/Replace-Color-Node.md index 5534cfe1ffe..e85467cf9c0 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Replace-Color-Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Replace-Color-Node.md @@ -1,21 +1,19 @@ -# Replace Color Node +# Replace Color node -## Description - -Replaces values in input **In** equal to input **From** to the value of input **To**. Input **Range** can be used to define a wider range of values around input **From** to replace. Input **Fuzziness** can be used to soften the edges around the selection similar to anti-aliasing. +The Replace Color node replaces a color in the input with another color. ## Ports -| Name | Direction | Type | Binding | Description | +| **Name** | **Direction** | **Type** | **Binding** | **Description** | |:------------ |:-------------|:-----|:---|:---| -| In | Input | Vector 3 | None | Input value | -| From | Input | Vector 3 | Color | Color to replace | -| To | Input | Vector 3 | Color | Color to replace with | -| Range | Input | Float | None | Replace colors within this range from input **From** | -| Fuzziness | Input | Float | None | Soften edges around selection | -| Out | Output | Vector 3 | None | Output value | +| **In** | Input | Vector 3 | None | Sets the input you want to replace a color in. For example, a texture. | +| **From** | Input | Vector 3 | Color | Sets the color to replace. | +| **To** | Input | Vector 3 | Color | Sets the color to replace **From** with. | +| **Range** | Input | Float | None | Sets the range around **From** to replace. For example, if you set **From** to (0, 0, 0) and **Range** to 0.1, Unity replaces colors from (0, 0, 0) to (0.1, 0.1, 0.1) with **To**. | +| **Fuzziness** | Input | Float | None | Sets how much to soften the boundary between the replaced color and the rest of the colors. | +| **Out** | Output | Vector 3 | None | The **In** input, with the **From** color replaced with the **To** color. | -## Generated Code Example +## Generated code example The following example code represents one possible outcome of this node. @@ -23,6 +21,8 @@ The following example code represents one possible outcome of this node. void Unity_ReplaceColor_float(float3 In, float3 From, float3 To, float Range, float Fuzziness, out float3 Out) { float Distance = distance(From, In); + + // Use max to avoid division by zero Out = lerp(To, In, saturate((Distance - Range) / max(Fuzziness, 1e-5f))); } ``` diff --git a/Packages/com.unity.shadergraph/Documentation~/Scene-Color-Node.md b/Packages/com.unity.shadergraph/Documentation~/Scene-Color-Node.md index a33f6540be0..953f99047f4 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Scene-Color-Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Scene-Color-Node.md @@ -1,34 +1,33 @@ -# Scene Color Node +# Scene Color node -## Description +The Scene Color node samples the color buffer of the current camera, using the screen space coordinates you input. -Provides access to the current **Camera**'s color buffer using input **UV**, which is expected to be normalized screen coordinates. +If you use the Universal Render Pipeline (URP), the node samples the opaque texture, which is a copy of the color buffer before Unity renders transparent objects. For more information, refer to [Universal Render Pipeline asset reference](https://docs.unity3d.com/Manual/urp/universalrp-asset.html). -The behavior of the Scene Color node isn't defined globally. The executed HLSL code for the Scene Color node is defined per **Render Pipeline**, and different **Render Pipelines** can produce different results. Custom **Render Pipelines** that wish to support the Scene Color node need to explicitly define the behavior for it. If the behavior is undefined, the Scene Color node returns 0 (black). +To make sure the Scene Color node outputs the correct values, follow these steps: -In the **Universal Render Pipeline** the Scene Color node returns the value of the **Camera Opaque Texture**. Refer to the **Universal Render Pipeline** for more documentation on this feature. The contents of this texture are only available for **Transparent** objects. Set the **Surface Type** dropdown on the [**Graph Settings** tab](Graph-Settings-Tab.md) of the [**Graph Inspector**](Internal-inspector.md) to **Transparent** to receive the correct values from this node. +1. Connect the node to the fragment [shader stage](Shader-Stage.md). The Scene Color node doesn't support the vertex shader stage. +2. In the **Graph Settings** tab of the [**Graph Inspector**](Internal-inspector.md) window, set **Surface Type** to **Transparent**. Otherwise, the node samples the color buffer before Unity renders all the opaque contents in the scene. ->[!NOTE] ->You can only use the Scene Color node in the **Fragment** [Shader Stage](Shader-Stage.md). +## Render pipeline support -#### Supported Unity render pipelines +The Scene Color node supports the following render pipelines: -The following table indicates which render pipelines support the Scene Color node. When used with unsupported render pipelines, the Scene Color node returns 0 (black). +- Universal Render Pipeline (URP) +- High Definition Render Pipeline (HDRP) -|Pipeline | Supported | -|:--------------------------------|:----------| -| Built-in Render Pipeline | No | -| Universal Render Pipeline | Yes | -| High Definition Render Pipeline | Yes | +If you use the Scene Color node with an unsupported pipeline, it returns 0 (black). ## Ports -| Name | Direction | Type | Binding | Description | +| **Name** | **Direction** | **Type** | **Binding** | **Description** | |:-----|:----------|:---------|:----------------|:------------| -| UV | Input | Vector 4 | Screen Position | Normalized screen coordinates | -| Out | Output | Vector 3 | None | Output value | +| **UV** | Input | Vector 4 | Screen position | The normalized screen space coordinates to sample from. | +| **Out** | Output | Vector 3 | None | The color value from the color buffer at the **UV** coordinates. | -## Generated Code Example +## Generated code example + +The HLSL code this node generates depends on the render pipeline you use. If you use your own custom render pipeline, you must define the behavior of the node yourself. Otherwise, the node returns a value of 0 (black). The following example code represents one possible outcome of this node. diff --git a/Packages/com.unity.shadergraph/Documentation~/Scene-Depth-Difference-Node.md b/Packages/com.unity.shadergraph/Documentation~/Scene-Depth-Difference-Node.md index 165dabe773e..ee7ed98b9dc 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Scene-Depth-Difference-Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Scene-Depth-Difference-Node.md @@ -1,19 +1,19 @@ -# Scene Depth Difference +# Scene Depth Difference node -## Description - -Provide a difference between a World Space Position and a Depth value for a given UV. +The Scene Depth Difference node returns the difference in depth between a world space position and a value from the depth buffer. ## Ports -| Name | Direction | Type | Binding | Description | -|:-------|:-----------|:------|:--------|:------------| -| Scene UV | Input | Vector4 | None | UV where to sample the depth. | -| Position WS | Input | Vector3 | None | The world space position to compare with scene depth. | -| Out | Output | Float | None | The difference between PositionWS and the depth. The difference is given relative to camera with **Eye** mode, in depth-buffer-value with **Raw** mode and in Linear value remap between 0 and 1 with the **Linear01** Mode. | +| **Name** | **Direction** | **Type** | **Binding** | **Description** | +|-|-|-|-|-| +| **Scene UV** | Input | Vector 4 | None | Sets the normalized coordinates at which to fetch the scene depth from the depth buffer. The default is the normalized x, y coordinates of the fragment in screen space. For more information about the options, refer to the [Screen Position node](Screen-Position-Node.md). | +| **Position WS** | Input | Vector 3 | None | Sets the world space position to compare the depth value at **Scene UV** to. The default is the x, y, z position of the fragment in world space. | +| **Out** | Output | Float | None | The difference in depth between **Scene UV** and **Position WS**. The value depends on the **Sampling mode** property. The distance is negative if the depth value from **Scene UV** is closer to the camera than the depth from **Position WS**. | -## Controls +## Sampling modes -| Name | Type | Options | Description | -|:------------ |:-------------|:-----|:---| -| Mode | Dropdown | Select **Linear01** to have a value between 0 and 1, **Eye** to have a World-Space value comparable to unit used on the scene and **Raw** if it's used with SceneDepthBuffer. | +| **Name** | **Description** | +|----------|------------------------------------| +| **Linear 01** | Returns the distance in linear normalized space. The minimum distance is 0, and the maximum distance is 1. | +| **Raw** | Returns the distance in the non-linear space the depth buffer uses. The minimum distance is 0, and the maximum distance is 1. | +| **Eye** | Returns the distance in meters. | diff --git a/Packages/com.unity.shadergraph/Documentation~/Voronoi-Node.md b/Packages/com.unity.shadergraph/Documentation~/Voronoi-Node.md index 64df71b4855..1250c9bbe7d 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Voronoi-Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Voronoi-Node.md @@ -39,7 +39,6 @@ void Unity_Voronoi_float(float2 UV, float AngleOffset, float CellDensity, out fl { float2 g = floor(UV * CellDensity); float2 f = frac(UV * CellDensity); - float t = 8.0; float3 res = float3(8.0, 0.0, 0.0); for(int y=-1; y<=1; y++)