diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/Look-Dev-Environment-Library.md b/Packages/com.unity.render-pipelines.core/Documentation~/Look-Dev-Environment-Library.md
index 975f6a8e3ae..8b836adba35 100644
--- a/Packages/com.unity.render-pipelines.core/Documentation~/Look-Dev-Environment-Library.md
+++ b/Packages/com.unity.render-pipelines.core/Documentation~/Look-Dev-Environment-Library.md
@@ -4,7 +4,7 @@ An Environment Library is an Asset that contains a list of environments that you
-
+
## Creating an Environment Library
@@ -26,13 +26,13 @@ To add, remove, or duplicate environments, use the toolbar at the bottom of the
| **Button** | **Function** | **Description** |
| ------------------------------------------------------------ | ------------- | ------------------------------------------------------------ |
-|  | **Add** | Click this button to add a new environment to the bottom of the list. |
-|  | **Remove** | Click this button to remove the environment currently selected. Note that the environment that you have selected is the one with the blue frame. |
-|  | **Duplicate** | Click this button to duplicate the currently selected environment and add it as a new environment to the bottom of the list. |
+|  | **Add** | Click this button to add a new environment to the bottom of the list. |
+|  | **Remove** | Click this button to remove the environment currently selected. Note that the environment that you have selected is the one with the blue frame. |
+|  | **Duplicate** | Click this button to duplicate the currently selected environment and add it as a new environment to the bottom of the list. |
## Properties
-
+
| **Property** | **Description** |
| ------------------- | ------------------------------------------------------------ |
diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/Look-Dev.md b/Packages/com.unity.render-pipelines.core/Documentation~/Look-Dev.md
index 5bf6be3a328..4b9f4763648 100644
--- a/Packages/com.unity.render-pipelines.core/Documentation~/Look-Dev.md
+++ b/Packages/com.unity.render-pipelines.core/Documentation~/Look-Dev.md
@@ -46,7 +46,7 @@ Use the toolbar in the top-left of the window to change which viewing mode Look
### Single viewport
-
+
By default, Look Dev displays a single viewport which contains the Prefab or GameObject you are working with. If you are in another viewing mode, you can click either the number **1** or number **2** button to go back to single view. Each button corresponds to a viewport in Look Dev. Select button **1** to use viewport 1, and button 2 to use viewport **2**.
@@ -54,7 +54,7 @@ By default, Look Dev displays a single viewport which contains the Prefab or Gam
### Multi-viewport
-
+
Use multiple viewports to compare different environments and settings for the same Asset. You can arrange viewports:
@@ -73,23 +73,21 @@ When using multiple viewports, it only makes sense to compare different Prefabs
Vertical and horizontal side-by-side viewports show an identical view of your Asset.
-
+
##### Split-screen
In a split-screen view, there is a red/blue manipulation Gizmo that separates the two viewports. For information on how to use this Gizmo, see [Using the manipulation Gizmo](#ManipulationGizmo).
-
+
#### Multi-viewport Camera
By default, Look Dev synchronizes the camera movement for both views. To decouple the Cameras from one another, and manipulate them independently, click the **Synchronized Cameras** button in-between the two numbered Camera buttons.
-
-
To align the cameras with each other, or reset them, click on the drop-down arrow next to the viewport **2** icon:
-
+
@@ -101,13 +99,11 @@ The manipulation Gizmo represents the separation plane between the two viewports
To move the separator, click and drag the straight line of the Gizmo to the location you want.
-
-
#### Changing the orientation and length
To change the orientation and length of the manipulator Gizmo, click and drag the circle at either end of the manipulator. Changing the length of the Gizmo lets you set the orientation and [blending](#Blending) values more precisely.
-)
+
#### Changing the split in increments
@@ -121,7 +117,7 @@ The central white circle on the separator allows you to blend between the two vi
The white circle automatically snaps back into the center when you drag it back. This helps you get back to the default blending value quickly.
-
+
### HDRI environments in Look Dev
diff --git a/Packages/com.unity.render-pipelines.core/Editor/HeaderFoldout.cs b/Packages/com.unity.render-pipelines.core/Editor/HeaderFoldout.cs
index ecf57e7d89b..73dc932d6f2 100644
--- a/Packages/com.unity.render-pipelines.core/Editor/HeaderFoldout.cs
+++ b/Packages/com.unity.render-pipelines.core/Editor/HeaderFoldout.cs
@@ -9,8 +9,12 @@ namespace UnityEditor.Rendering
public partial class HeaderFoldout : Foldout
{
const string k_StylesheetPathFormat = "Packages/com.unity.render-pipelines.core/Editor/StyleSheets/HeaderFoldout{0}.uss";
- const string k_Class = "header-foldout";
- const string k_IconName = "header-foldout__icon";
+ const string k_MainClass = "header-foldout";
+ const string k_EnableClass = k_MainClass + "__enable";
+ const string k_IconClass = k_MainClass + "__icon";
+ const string k_LabelClass = k_MainClass + "__label";
+ const string k_HelpButtonClass = k_MainClass + "__help-button";
+ const string k_ContextButtonClass = k_MainClass + "__context-button";
private string m_DocumentationURL;
private Texture2D m_Icon;
@@ -18,6 +22,8 @@ public partial class HeaderFoldout : Foldout
private VisualElement m_HelpButton;
private VisualElement m_ContextMenuButton;
private VisualElement m_IconElement;
+ private Toggle m_Toggle;
+ private Label m_Text;
/// URL to use on documentation icon. If null, button don't show.
public string documentationURL
@@ -61,51 +67,91 @@ public Texture2D icon
m_IconElement.style.display = m_Icon != null ? DisplayStyle.Flex : DisplayStyle.None;
}
}
+
+ /// Property to get the enablement state
+ public bool enabled
+ {
+ get => m_Toggle.value;
+ set => m_Toggle.value = value;
+ }
+
+ /// Property to get the enablement visibility state
+ public bool showEnableCheckbox
+ {
+ get => m_Toggle.style.display == DisplayStyle.Flex;
+ set => m_Toggle.style.display = value ? DisplayStyle.Flex : DisplayStyle.None;
+ }
+
+ /// Quick access to the enable toggle if one need to register events
+ public Toggle enableToggle => m_Toggle;
+
+ /// Property to get the title
+ public new string text
+ {
+ get => m_Text.text;
+ set => m_Text.text = value;
+ }
/// Constructor
public HeaderFoldout() : base()
{
styleSheets.Add(AssetDatabase.LoadAssetAtPath(string.Format(k_StylesheetPathFormat, "")));
styleSheets.Add(AssetDatabase.LoadAssetAtPath(string.Format(k_StylesheetPathFormat, EditorGUIUtility.isProSkin ? "Dark" : "Light")));
- AddToClassList(k_Class);
+ AddToClassList(k_MainClass);
RegisterCallback(DelayedInit);
var line = hierarchy[0][0]; //pass by herarchy to ignore content redirection
+ m_IconElement = new Image()
+ {
+ style =
+ {
+ display = DisplayStyle.None // hidden by default, will be enabled if icon is set
+ }
+ };
+ m_IconElement.AddToClassList(k_IconClass);
+ line.Add(m_IconElement);
+
+ m_Toggle = new Toggle()
+ {
+ value = true
+ };
+ m_Toggle.AddToClassList(k_EnableClass);
+ m_Toggle.RegisterValueChangedCallback(HandleDisabling);
+ m_Toggle.style.display = DisplayStyle.None; // hidden by default
+ line.Add(m_Toggle);
+
+ m_Text = new Label();
+ m_Text.AddToClassList(k_LabelClass);
+ line.Add(m_Text);
+
m_HelpButton = new Button(Background.FromTexture2D(CoreEditorStyles.iconHelp), () => Help.BrowseURL(m_DocumentationURL));
+ m_HelpButton.AddToClassList(k_HelpButtonClass);
m_HelpButton.SetEnabled(!string.IsNullOrEmpty(m_DocumentationURL));
line.Add(m_HelpButton);
- m_ContextMenuButton =
- new Button(Background.FromTexture2D(CoreEditorStyles.paneOptionsIcon), () => ShowMenu())
- {
- style =
- {
- paddingRight = 2
- }
- };
-
+ m_ContextMenuButton = new Button(Background.FromTexture2D(CoreEditorStyles.paneOptionsIcon), () => ShowMenu());
+ m_ContextMenuButton.AddToClassList(k_ContextButtonClass);
m_ContextMenuButton.SetEnabled(m_ContextMenuGenerator != null);
line.Add(m_ContextMenuButton);
-
- m_IconElement = new Image();
- m_IconElement.name = k_IconName;
- m_IconElement.style.display = DisplayStyle.None; // Disable by default, will be enabled if icon is set
- // Delay insertion of icon to happen after foldout is constructed so we can put it in the right place
- RegisterCallbackOnce(evt => line.Insert(1, m_IconElement));
}
void DelayedInit(AttachToPanelEvent evt)
{
//Only show top line if previous item is not a HeaderFoldout to avoid bolder border
- bool shouldShowTopLine = true;
var parent = hierarchy.parent;
int posInParent = parent.hierarchy.IndexOf(this);
- if (posInParent > 0 && parent[posInParent - 1].ClassListContains(k_Class))
- shouldShowTopLine = false;
+ if (posInParent == 0 || !parent[posInParent - 1].ClassListContains(k_MainClass))
+ AddToClassList("first-in-collection");
- style.borderTopWidth = shouldShowTopLine ? 1 : 0;
+ //fix to transfer label assigned in UXML from base label to new label
+ if (!string.IsNullOrEmpty(base.text))
+ {
+ if (string.IsNullOrEmpty(m_Text.text))
+ m_Text.text = base.text;
+ base.text = null;
+ }
}
void ShowMenu()
@@ -113,40 +159,17 @@ void ShowMenu()
var menu = m_ContextMenuGenerator.Invoke();
menu.DropDown(new Rect(m_ContextMenuButton.worldBound.position + m_ContextMenuButton.worldBound.size.y * Vector2.up, Vector2.zero));
}
+
+ void HandleDisabling(ChangeEvent evt)
+ => contentContainer.SetEnabled(evt.newValue);
}
/// UITK component to display header styled foldout. This variant have an enable checkbox.
+ [Obsolete("Please directly use HeaderFoldout now #from(6000.2) (UnityUpgradable) -> HeaderFoldout", false)]
public class HeaderToggleFoldout : HeaderFoldout
{
- private Toggle m_Toggle;
-
- /// Property to get the enablement state
- public bool enabled
- {
- get => m_Toggle.value;
- set => m_Toggle.value = value;
- }
-
- /// Quick access to the enable toggle if one need to register events
- public Toggle enableToggle => m_Toggle;
-
/// Constructor
public HeaderToggleFoldout() : base()
- {
- var line = hierarchy[0][0]; //pass by herarchy to ignore content redirection
- m_Toggle = new Toggle()
- {
- value = true,
- name = "enable-checkbox",
- };
-
- //Need to delay insertion as foldout will be constructed after and we need to squeeze rigth after
- RegisterCallbackOnce(evt => line.Insert(1, m_Toggle));
-
- m_Toggle.RegisterValueChangedCallback(HandleDisabling);
- }
-
- void HandleDisabling(ChangeEvent evt)
- => contentContainer.SetEnabled(evt.newValue);
+ => showEnableCheckbox = true;
}
}
\ No newline at end of file
diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs
index 12733d3cc13..e44748d4704 100644
--- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs
+++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs
@@ -1070,7 +1070,11 @@ unsafe static void WriteBakingCells(BakingCell[] bakingCells)
AssetDatabase.ImportAsset(cellDataFilename);
AssetDatabase.ImportAsset(cellOptionalDataFilename);
- AssetDatabase.ImportAsset(cellProbeOcclusionDataFilename);
+ // If we did not write a probe occlusion file (because it was zero bytes), don't try to load it (UUM-101480)
+ if (probeOcclusion.Length > 0)
+ {
+ AssetDatabase.ImportAsset(cellProbeOcclusionDataFilename);
+ }
AssetDatabase.ImportAsset(cellBricksDataFilename);
AssetDatabase.ImportAsset(cellSharedDataFilename);
AssetDatabase.ImportAsset(cellSupportDataFilename);
diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeLightingTab.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeLightingTab.cs
index adeab0c8faa..a6782816899 100644
--- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeLightingTab.cs
+++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeLightingTab.cs
@@ -372,6 +372,8 @@ void BakingGUI()
{
if (newSet != null) { EditorUtility.SetDirty(newSet); newSet.singleSceneMode = false; }
activeSet = newSet;
+
+ ProbeReferenceVolume.instance.Clear();
ProbeReferenceVolume.instance.SetActiveBakingSet(activeSet);
}
@@ -525,7 +527,7 @@ void UseTemporaryBakingSet(string sceneGUID, ProbeVolumeBakingSet set = null)
set = ScriptableObject.CreateInstance();
set.SetDefaults();
- ProbeReferenceVolume.instance.AddPendingSceneRemoval(sceneGUID);
+ ProbeReferenceVolume.instance.Clear();
}
EditorUtility.SetDirty(set);
diff --git a/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedProperties.cs b/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedProperties.cs
index c35644cf7ac..21d8361a651 100644
--- a/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedProperties.cs
+++ b/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedProperties.cs
@@ -120,7 +120,10 @@ public static bool BeginGroup(AnimFloat animation = null)
animation ??= s_AnimFloat;
- GUI.color = Color.Lerp(CoreEditorStyles.backgroundColor * oldColor, CoreEditorStyles.backgroundHighlightColor, animation.value);
+ Color transparent = CoreEditorStyles.backgroundHighlightColor;
+ transparent.a = 0f;
+
+ GUI.color = Color.Lerp(transparent, CoreEditorStyles.backgroundHighlightColor, animation.value);
EditorGUILayout.BeginVertical(CoreEditorStyles.additionalPropertiesHighlightStyle);
GUI.color = oldColor;
diff --git a/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.SidePanel.cs b/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.SidePanel.cs
index ef70fe56e88..c670fbd4610 100644
--- a/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.SidePanel.cs
+++ b/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.SidePanel.cs
@@ -66,7 +66,11 @@ void InitializeSidePanel()
rootVisualElement.RegisterCallback(_ =>
{
SaveSplitViewFixedPaneHeight(); // Window resized - save the current pane height
- UpdatePanelHeights();
+
+ // TwoPaneSplitView also updates draglineanchor offset using the same event, conflicting with what we
+ // do here. Deferring our panel height update to next frame solves a bug with dragline "jumping" when
+ // window is resized down vertically and the lower panel is already at minimum height.
+ rootVisualElement.schedule.Execute(UpdatePanelHeights);
});
var contentSplitView = rootVisualElement.Q(Names.kContentContainer);
diff --git a/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.cs b/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.cs
index 04fbe881e80..bdd225a41ca 100644
--- a/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.cs
+++ b/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.cs
@@ -35,6 +35,7 @@ static partial class Names
public const string kGridlineContainer = "grid-line-container";
public const string kHoverOverlay = "hover-overlay";
public const string kEmptyStateMessage = "empty-state-message";
+ public const string kPassListCornerOccluder = "pass-list-corner-occluder";
}
static partial class Classes
@@ -191,6 +192,7 @@ static void Init()
{
var window = GetWindow();
window.titleContent = new GUIContent("Render Graph Viewer");
+ window.minSize = new Vector2(880f, 300f);
}
[Flags]
@@ -1817,6 +1819,27 @@ void RebuildUI()
RebuildGraphViewerUI();
}
+ void RerouteWheelEvent(VisualElement source, VisualElement target)
+ {
+ source.RegisterCallback(evt =>
+ {
+ evt.StopImmediatePropagation();
+
+ // Need to create an intermediate Event to be able to call WheelEvent.GetPooled()
+ var imguiEvt = new Event {
+ type = EventType.ScrollWheel,
+ delta = new Vector2(evt.delta.x, evt.delta.y),
+ mousePosition = evt.mousePosition,
+ modifiers = evt.modifiers
+ };
+ using (var newEvt = WheelEvent.GetPooled(imguiEvt))
+ {
+ newEvt.target = target;
+ target.SendEvent(newEvt);
+ }
+ }, TrickleDown.TrickleDown);
+ }
+
// Initialize, register callbacks & manipulators etc. once
void InitializePersistentElements()
{
@@ -1881,9 +1904,12 @@ void InitializePersistentElements()
resourceGridScrollView.horizontalScroller.valueChanged += value =>
passListScrollView.scrollOffset = new Vector2(value, passListScrollView.scrollOffset.y);
- // Disable mouse wheel on the scroll views that are synced to the resource grid
- resourceListScrollView.RegisterCallback(evt => evt.StopImmediatePropagation(), TrickleDown.TrickleDown);
- passListScrollView.RegisterCallback(evt => evt.StopImmediatePropagation(), TrickleDown.TrickleDown);
+ // Scroll views are synced to the resource grid, so we don't want them to scroll independently. To have
+ // consistent behavior, redirect the wheel events to the resource grid and let it handle them.
+ RerouteWheelEvent(resourceListScrollView, resourceGridScrollView);
+ RerouteWheelEvent(passListScrollView, resourceGridScrollView);
+ var passListCornerOccluder = rootVisualElement.Q(Names.kPassListCornerOccluder);
+ RerouteWheelEvent(passListCornerOccluder, resourceGridScrollView);
InitializeSidePanel();
}
diff --git a/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/HeaderFoldout.uss b/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/HeaderFoldout.uss
index 2c4a86002e5..98d3b2f877d 100644
--- a/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/HeaderFoldout.uss
+++ b/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/HeaderFoldout.uss
@@ -1,70 +1,107 @@
.header-foldout
{
- border-width: 1px 0px 1px 0px;
+ border-width: 0px 0px 1px 0px;
border-color: var(--unity-colors-inspector_titlebar-border);
+}
+
+.header-foldout.first-in-collection
+{
+ border-top-width: 1px;
+}
- /* ensure border take all width */
- margin: 0px -6px 0px -31px;
- padding: 0px 0px 0px 0;
+.unity-inspector-element__custom-inspector-container > .header-foldout
+{
+ /* At root of inspector we need to remove the Padding of the InspectorElement */
+ margin: 0px -6px 0px -15px;
}
-.header-foldout > Toggle
+.unity-foldout__toggle
{
- /* ensure background take all width */
margin: 0px 0px 0px 0px;
- padding: 0px 6px 0px 31px;
}
-.header-foldout > Toggle Label
+
+.unity-foldout__toggle Image
{
- -unity-font-style: bold;
- font-size: 13px;
- flex-grow: 1;
+ min-width: 16px;
}
-.header-foldout > Toggle Button
+.unity-foldout__input
{
- background-color: transparent;
- border-width: 0px;
- margin: 1px 2px 1px 0px;
- padding: 0px 0px 0px 0px;
- border-radius: 0px;
+ flex-shrink: 1;
}
-.header-foldout > Toggle Button:hover
+.unity-foldout__toggle #unity-checkmark
{
- background-color: var(--unity-colors-button-background-hover);
+ margin: 0px 2px 0px 4px;
}
-.header-foldout > Toggle Button:disabled
+.header-foldout__icon
{
- display: none;
+ margin: 1px 2px 0px 1px;
+ height: 16px;
+ width: 16px;
}
-.header-foldout > Toggle Image
+.header-foldout__enable
{
- min-width: 16px;
+ margin: 2px 1px 2px 0px;
+ height: 16px;
+ width: 16px;
}
-.header-foldout > #unity-content
+.header-foldout__enable #unity-checkmark
{
- margin: 0px 5px 0px 47px;
+ margin: 0px 2px 0px 2px;
+ background-size: 80% 80%;
}
-.header-foldout #enable-checkbox
+.header-foldout__label
{
- margin: 2px 6px 3px 1px;
+ -unity-font-style: bold;
+ font-size: 13px;
+ flex-grow: 1;
+ flex-shrink: 1;
+ margin: 0px 0px 0px 3px;
+ overflow: hidden;
}
-.header-foldout #enable-checkbox #unity-checkmark
+.header-foldout__help-button
{
- background-size: 80% 80%;
+ background-color: transparent;
+ margin: 2px 4px 1px 3px;
+ padding: 0px 2px 0px 0px;
+ border-width: 0px;
+ border-radius: 2px 2px 2px 2px;
+ width: 16px;
+ height: 16px;
}
-.header-foldout #header-foldout__icon
+.header-foldout__context-button
{
- margin-top: 2px;
- margin-right: 6px;
- height: 16px;
+ background-color: transparent;
+ margin: 2px 5px 1px 0px;
+ padding: 0px 2px 0px 0px;
+ border-width: 0px;
+ border-radius: 2px 2px 2px 2px;
width: 16px;
+ height: 16px;
+}
+
+.header-foldout__help-button:hover,
+.header-foldout__context-button:hover
+{
+ background-color: var(--unity-colors-button-background-hover);
+}
+
+.header-foldout__help-button:disabled,
+.header-foldout__context-button:disabled
+{
+ display: none;
+}
+
+.project-settings-section__content .header-foldout__context-button
+{
+ /* Specific fix for alignment in project settings to keep alignment with category three dots */
+ margin-right: 0px;
}
\ No newline at end of file
diff --git a/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/HeaderFoldoutDark.uss b/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/HeaderFoldoutDark.uss
index 9aa3af9766d..d77be284af0 100644
--- a/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/HeaderFoldoutDark.uss
+++ b/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/HeaderFoldoutDark.uss
@@ -3,7 +3,7 @@
border-color: #1f1f1f;
}
-.header-foldout > Toggle
+.unity-foldout__toggle
{
background-color: #323232;
}
\ No newline at end of file
diff --git a/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/HeaderFoldoutLight.uss b/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/HeaderFoldoutLight.uss
index 0c0cdf686c6..20840862e23 100644
--- a/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/HeaderFoldoutLight.uss
+++ b/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/HeaderFoldoutLight.uss
@@ -3,7 +3,7 @@
border-color: #999999;
}
-.header-foldout > Toggle
+.unity-foldout__toggle
{
background-color: #d3d3d3;
}
\ No newline at end of file
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/AssemblyInfo.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/AssemblyInfo.cs
index 306bf1bd40a..3441704c446 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/AssemblyInfo.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/AssemblyInfo.cs
@@ -2,3 +2,4 @@
[assembly: InternalsVisibleTo("Unity.RenderPipelines.Core.Editor")]
[assembly: InternalsVisibleTo("Unity.RenderPipelines.Core.Editor.Tests")]
+[assembly: InternalsVisibleTo("UnityEngine.TestTools.Graphics.Contexts")]
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawer.Validator.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawer.Validator.cs
index f76c930b549..bf5d886f7ef 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawer.Validator.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawer.Validator.cs
@@ -88,7 +88,7 @@ internal static bool IsGPUResidentDrawerSupportedBySRP(GPUResidentDrawerSettings
return false;
#endif
// If we are forcing the system, no need to perform further checks
- if (IsForcedOnViaCommandLine())
+ if (IsForcedOnViaCommandLine() || MaintainContext)
return true;
if (GraphicsSettings.currentRenderPipeline is not IGPUResidentRenderPipeline asset)
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawer.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawer.cs
index ccf1c993352..1c459a8e22a 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawer.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawer.cs
@@ -113,7 +113,7 @@ public static void UpdateInstanceOccluders(RenderGraph renderGraph, in OccluderP
public static void ReinitializeIfNeeded()
{
#if UNITY_EDITOR
- if (!IsForcedOnViaCommandLine() && (IsProjectSupported() != IsEnabled()))
+ if (!IsForcedOnViaCommandLine() && !MaintainContext && (IsProjectSupported() != IsEnabled()))
{
Reinitialize();
}
@@ -269,6 +269,8 @@ private static bool IsOcclusionForcedOnViaCommandLine()
#endif
}
+ internal static bool MaintainContext { get; set; } = false;
+
internal static void Reinitialize()
{
var settings = GetGlobalSettingsFromRPAsset();
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Binding.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Binding.cs
index 352f617c2bc..0340eca5044 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Binding.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Binding.cs
@@ -137,8 +137,8 @@ public bool UpdateShaderVariablesProbeVolumes(CommandBuffer cmd, ProbeVolumesOpt
parameters.skyOcclusionIntensity = skyOcclusion ? probeVolumeOptions.skyOcclusionIntensityMultiplier.value : 0.0f;
parameters.skyOcclusionShadingDirection = skyOcclusion && skyOcclusionShadingDirection;
- parameters.regionCount = m_CurrentBakingSet.bakedMaskCount;
- parameters.regionLayerMasks = supportRenderingLayers ? m_CurrentBakingSet.bakedLayerMasks : 0xFFFFFFFF;
+ parameters.regionCount = m_CurrentBakingSet != null ? m_CurrentBakingSet.bakedMaskCount : 0;
+ parameters.regionLayerMasks = (supportRenderingLayers && m_CurrentBakingSet != null) ? m_CurrentBakingSet.bakedLayerMasks : 0xFFFFFFFF;
parameters.worldOffset = probeVolumeOptions.worldOffset.value;
UpdateConstantBuffer(cmd, parameters);
}
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Debug.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Debug.cs
index 2e0da43ec7f..7ce2bb2bc03 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Debug.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Debug.cs
@@ -1108,19 +1108,35 @@ static void DecompressSH(ref SphericalHarmonicsL2 shv)
}
}
+ internal static Vector3 DecodeSkyShadingDirection(uint directionIndex)
+ {
+ var precomputedDirections = ProbeVolumeConstantRuntimeResources.GetSkySamplingDirections();
+ Debug.Assert(directionIndex < precomputedDirections.Length + 1);
+ return directionIndex == precomputedDirections.Length ? new Vector3(0.0f, 0.0f, 0.0f) : precomputedDirections[directionIndex];
+ }
+
internal bool GetFlattenedProbeData(
string scenario,
out Vector3[] positions,
out SphericalHarmonicsL2[] irradiance,
- out float[] validity)
+ out float[] validity,
+ out Vector4[] occlusion,
+ out Vector4[] skyOcclusion,
+ out Vector3[] skyOcclusionDirections)
{
positions = null;
irradiance = null;
validity = null;
+ occlusion = null;
+ skyOcclusion = null;
+ skyOcclusionDirections = null;
var positionsList = new List();
var irradianceList = new List();
var validityList = new List();
+ var occlusionList = new List();
+ var skyOcclusionList = new List();
+ var skyOcclusionDirectionList = new List();
foreach (var cell in cells.Values)
{
@@ -1162,6 +1178,30 @@ internal bool GetFlattenedProbeData(
positionsList.Add(position);
validityList.Add(cell.data.validity[probeFlatIndex]);
+ var occlusionOffset = probeFlatIndex * 4;
+ float occlusionValue0 = scenarioData.probeOcclusion[occlusionOffset] / 255.0f;
+ float occlusionValue1 = scenarioData.probeOcclusion[occlusionOffset+1] / 255.0f;
+ float occlusionValue2 = scenarioData.probeOcclusion[occlusionOffset+2] / 255.0f;
+ float occlusionValue3 = scenarioData.probeOcclusion[occlusionOffset+3] / 255.0f;
+ occlusionList.Add(new Vector4(occlusionValue0, occlusionValue1, occlusionValue2, occlusionValue3));
+
+ if (cell.data.skyOcclusionDataL0L1.Length > 0)
+ {
+ // sky occlusion L0/L1 SH
+ var skyOccSH_dc = Mathf.HalfToFloat(cell.data.skyOcclusionDataL0L1[probeFlatIndex * 4]);
+ var skyOccSH_x = Mathf.HalfToFloat(cell.data.skyOcclusionDataL0L1[probeFlatIndex * 4 + 1]);
+ var skyOccSH_y = Mathf.HalfToFloat(cell.data.skyOcclusionDataL0L1[probeFlatIndex * 4 + 2]);
+ var skyOccSH_z = Mathf.HalfToFloat(cell.data.skyOcclusionDataL0L1[probeFlatIndex * 4 + 3]);
+ skyOcclusionList.Add(new Vector4(skyOccSH_dc, skyOccSH_x, skyOccSH_y, skyOccSH_z));
+ }
+
+ if (cell.data.skyShadingDirectionIndices.Length > 0)
+ {
+ // sky occlusion direction
+ var skyOccSDI = cell.data.skyShadingDirectionIndices[probeFlatIndex];
+ var skyOcclusionDirection = DecodeSkyShadingDirection(skyOccSDI);
+ skyOcclusionDirectionList.Add(skyOcclusionDirection);
+ }
Vector4 L0_L1Rx = Vector4.zero;
Vector4 L1G_L1Ry = Vector4.zero;
@@ -1261,6 +1301,9 @@ internal bool GetFlattenedProbeData(
positions = positionsList.ToArray();
irradiance = irradianceList.ToArray();
validity = validityList.ToArray();
+ occlusion = occlusionList.ToArray();
+ skyOcclusion = skyOcclusionList.ToArray();
+ skyOcclusionDirections = skyOcclusionDirectionList.ToArray();
return true;
}
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeGIContributor.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeGIContributor.cs
index 54b467918eb..83e44b9319c 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeGIContributor.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeGIContributor.cs
@@ -390,10 +390,14 @@ public GIContributors FilterLayerMaskOnly(LayerMask layerMask)
var filteredPrototypes = new List();
foreach (var treeProto in terrain.treePrototypes)
{
- int treeProtoLayerMask = 1 << treeProto.component.gameObject.layer;
+ // check if the mesh renderer exists
+ if (treeProto.component != null)
+ {
+ int treeProtoLayerMask = 1 << treeProto.component.gameObject.layer;
- if ((treeProtoLayerMask & layerMask) != 0)
- filteredPrototypes.Add(treeProto);
+ if ((treeProtoLayerMask & layerMask) != 0)
+ filteredPrototypes.Add(treeProto);
+ }
}
var terrainContrib = new TerrainContributor()
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/CompilerContextData.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/CompilerContextData.cs
index 1d3b0ba1293..578cf80a25a 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/CompilerContextData.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/CompilerContextData.cs
@@ -43,41 +43,59 @@ internal static int LastIndex(this ref NativeList list) where T : unmanage
// Datastructure that contains passes and dependencies and allow you to iterate and reason on them more like a graph
internal class CompilerContextData : IDisposable, RenderGraph.ICompiledGraph
{
- public CompilerContextData(int estimatedNumPasses)
+ public CompilerContextData()
{
- passData = new NativeList(estimatedNumPasses, AllocatorManager.Persistent);
fences = new Dictionary();
- passNames = new DynamicArray(estimatedNumPasses, false); // T in NativeList cannot contain managed types, so the names are stored separately
- inputData = new NativeList(estimatedNumPasses * 2, AllocatorManager.Persistent);
- outputData = new NativeList(estimatedNumPasses * 2, AllocatorManager.Persistent);
- fragmentData = new NativeList(estimatedNumPasses * 4, AllocatorManager.Persistent);
- randomAccessResourceData = new NativeList(4, AllocatorManager.Persistent); // We assume not a lot of passes use random write
resources = new ResourcesData();
- nativePassData = new NativeList(estimatedNumPasses, AllocatorManager.Persistent);// assume nothing gets merged
- nativeSubPassData = new NativeList(estimatedNumPasses, AllocatorManager.Persistent);// there should "never" be more subpasses than graph passes
- createData = new NativeList(estimatedNumPasses * 2, AllocatorManager.Persistent); // assume every pass creates two resources
- destroyData = new NativeList(estimatedNumPasses * 2, AllocatorManager.Persistent); // assume every pass destroys two resources
+ passNames = new DynamicArray(0, false); // T in NativeList cannot contain managed types, so the names are stored separately
}
- public void Initialize(RenderGraphResourceRegistry resourceRegistry)
+ void AllocateNativeDataStructuresIfNeeded(int estimatedNumPasses)
+ {
+ // Only first init or if Dispose() has been called through RenderGraph.Cleanup()
+ if (!m_AreNativeListsAllocated)
+ {
+ // These are risky heuristics that only work because we purposely estimate a very high number of passes
+ // We need to fix this with a proper size computation
+ passData = new NativeList(estimatedNumPasses, AllocatorManager.Persistent);
+ inputData = new NativeList(estimatedNumPasses * 2, AllocatorManager.Persistent);
+ outputData = new NativeList(estimatedNumPasses * 2, AllocatorManager.Persistent);
+ fragmentData = new NativeList(estimatedNumPasses * 4, AllocatorManager.Persistent);
+ randomAccessResourceData = new NativeList(4, AllocatorManager.Persistent); // We assume not a lot of passes use random write
+ nativePassData = new NativeList(estimatedNumPasses, AllocatorManager.Persistent);// assume nothing gets merged
+ nativeSubPassData = new NativeList(estimatedNumPasses, AllocatorManager.Persistent);// there should "never" be more subpasses than graph passes
+ createData = new NativeList(estimatedNumPasses * 2, AllocatorManager.Persistent); // assume every pass creates two resources
+ destroyData = new NativeList(estimatedNumPasses * 2, AllocatorManager.Persistent); // assume every pass destroys two resources
+
+ m_AreNativeListsAllocated = true;
+ }
+ }
+
+ public void Initialize(RenderGraphResourceRegistry resourceRegistry, int estimatedNumPasses)
{
resources.Initialize(resourceRegistry);
+ passNames.Reserve(estimatedNumPasses, false);
+ AllocateNativeDataStructuresIfNeeded(estimatedNumPasses);
}
public void Clear()
{
- passData.Clear();
- fences.Clear();
passNames.Clear();
- inputData.Clear();
- outputData.Clear();
- fragmentData.Clear();
- randomAccessResourceData.Clear();
resources.Clear();
- nativePassData.Clear();
- nativeSubPassData.Clear();
- createData.Clear();
- destroyData.Clear();
+
+ if (m_AreNativeListsAllocated)
+ {
+ passData.Clear();
+ fences.Clear();
+ inputData.Clear();
+ outputData.Clear();
+ fragmentData.Clear();
+ randomAccessResourceData.Clear();
+ nativePassData.Clear();
+ nativeSubPassData.Clear();
+ createData.Clear();
+ destroyData.Clear();
+ }
}
public ResourcesData resources;
@@ -272,7 +290,7 @@ internal List GetNativePasses()
// IDisposable implementation
- bool m_Disposed;
+ bool m_AreNativeListsAllocated = false;
~CompilerContextData() => Cleanup();
@@ -284,10 +302,10 @@ public void Dispose()
void Cleanup()
{
- if (!m_Disposed)
- {
- resources.Dispose();
+ resources.Dispose();
+ if (m_AreNativeListsAllocated)
+ {
passData.Dispose();
inputData.Dispose();
outputData.Dispose();
@@ -297,7 +315,8 @@ void Cleanup()
randomAccessResourceData.Dispose();
nativePassData.Dispose();
nativeSubPassData.Dispose();
- m_Disposed = true;
+
+ m_AreNativeListsAllocated = false;
}
}
}
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.cs
index b9ebe8aa866..4a7c581a9e0 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.cs
@@ -34,15 +34,12 @@ internal struct RenderGraphInputInfo
public NativePassCompiler(RenderGraphCompilationCache cache)
{
m_CompilationCache = cache;
- defaultContextData = new CompilerContextData(k_EstimatedPassCount);
+ defaultContextData = new CompilerContextData();
toVisitPassIds = new Stack(k_EstimatedPassCount);
- m_BeginRenderPassAttachments = new NativeList(FixedAttachmentArray.MaxAttachments, Allocator.Persistent);
}
// IDisposable implementation
- bool m_Disposed;
-
~NativePassCompiler() => Cleanup();
public void Dispose()
@@ -51,12 +48,15 @@ public void Dispose()
GC.SuppressFinalize(this);
}
- void Cleanup()
+ public void Cleanup()
{
- if (!m_Disposed)
+ // If caching enabled, the two can be different
+ contextData?.Dispose();
+ defaultContextData?.Dispose();
+
+ if (m_BeginRenderPassAttachments.IsCreated)
{
m_BeginRenderPassAttachments.Dispose();
- m_Disposed = true;
}
}
@@ -127,7 +127,7 @@ void SetupContextData(RenderGraphResourceRegistry resources)
{
using (new ProfilingScope(ProfilingSampler.Get(NativeCompilerProfileId.NRPRGComp_SetupContextData)))
{
- contextData.Initialize(resources);
+ contextData.Initialize(resources, k_EstimatedPassCount);
}
}
@@ -1190,6 +1190,9 @@ internal unsafe void ExecuteBeginRenderPass(InternalRenderGraphContext rgContext
}
// Filling the attachments array to be sent to the rendering command buffer
+ if(!m_BeginRenderPassAttachments.IsCreated)
+ m_BeginRenderPassAttachments = new NativeList(FixedAttachmentArray.MaxAttachments, Allocator.Persistent);
+
m_BeginRenderPassAttachments.Resize(attachmentCount, NativeArrayOptions.UninitializedMemory);
for (var i = 0; i < attachmentCount; ++i)
{
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/PassesData.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/PassesData.cs
index bdebac96bd0..7bdd2f70aa4 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/PassesData.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/PassesData.cs
@@ -657,6 +657,32 @@ public NativePassData(ref PassData pass, CompilerContextData ctx)
TryMergeNativeSubPass(ctx, ref this, ref pass);
}
+ // Gets the best SubPassFlag for a pass that originally had no depth attachment, that we want to merge with this pass.
+ public SubPassFlags GetSubPassFlagForMerging()
+ {
+ // We should not be calling this method if native pass doesn't have depth.
+ if (hasDepth == false)
+ {
+ throw new Exception("SubPassFlag for merging can not be determined if native pass doesn't have a depth attachment");
+ }
+
+ // Only do this for mobile using Vulkan.
+#if (PLATFORM_ANDROID)
+ if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.Vulkan)
+ {
+ // Depth attachment is always at index 0.
+ return (fragments[0].accessFlags.HasFlag(AccessFlags.Write)) ? SubPassFlags.None : SubPassFlags.ReadOnlyDepth;
+ }
+ else
+ {
+ return SubPassFlags.ReadOnlyDepth;
+ }
+#else
+ // By default flag this subpass as ReadOnlyDepth.
+ return SubPassFlags.ReadOnlyDepth;
+#endif
+ }
+
public void Clear()
{
firstGraphPass = 0;
@@ -928,11 +954,11 @@ static bool CanMergeNativeSubPass(CompilerContextData contextData, ref NativePas
SubPassFlags flags = SubPassFlags.None;
- // If depth ends up being bound only because of merging, we explicitly say that we will not write to it
- // which could have been implied by leaving the flag to None
+ // If depth ends up being bound only because of merging
if (!currRenderGraphPassHasDepth && nativePass.hasDepth)
{
- flags = SubPassFlags.ReadOnlyDepth;
+ // Set SubPassFlags to best match the pass we are trying to merge with
+ flags = nativePass.GetSubPassFlagForMerging();
}
ref readonly var fragmentList = ref nativePass.fragments;
@@ -1027,11 +1053,11 @@ public static void TryMergeNativeSubPass(CompilerContextData contextData, ref Na
return;
}
- // If depth ends up being bound only because of merging we explicitly say that we will not write to it
- // which could have been implied by leaving the flag to None
+ // If depth ends up being bound only because of merging
if (!passToMerge.fragmentInfoHasDepth && nativePass.hasDepth)
{
- desc.flags = SubPassFlags.ReadOnlyDepth;
+ // Set SubPassFlags to best match the pass we are trying to merge with
+ desc.flags = nativePass.GetSubPassFlagForMerging();
}
// MRT attachments
@@ -1160,11 +1186,11 @@ static void UpdateNativeSubPassesAttachments(CompilerContextData contextData, re
ref var nativeSubPassDescriptor =
ref contextData.nativeSubPassData.ElementAt(lastVisitedNativeSubpassIdx);
- // If depth ends up being bound only because of merging we explicitly say that we will not write to it
- // which could have been implied by leaving the flag to None
+ // If depth ends up being bound only because of merging
if (!currGraphPass.fragmentInfoHasDepth && nativePass.hasDepth)
{
- nativeSubPassDescriptor.flags = SubPassFlags.ReadOnlyDepth;
+ // Set SubPassFlags to best match the pass we are trying to merge with
+ nativeSubPassDescriptor.flags = nativePass.GetSubPassFlagForMerging();
}
// MRT attachments
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/ResourcesData.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/ResourcesData.cs
index 23381fe193e..c1330b14715 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/ResourcesData.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/ResourcesData.cs
@@ -207,27 +207,38 @@ public ResourcesData()
resourceNames = new DynamicArray[(int)RenderGraphResourceType.Count];
for (int t = 0; t < (int)RenderGraphResourceType.Count; t++)
- {
- // Note: All these lists are allocated with zero capacity, they will be resized in Initialize when
- // the amount of resources is known.
- versionedData[t] = new NativeList(0, AllocatorManager.Persistent);
- unversionedData[t] = new NativeList(0, AllocatorManager.Persistent);
- readerData[t] = new NativeList(0, AllocatorManager.Persistent);
resourceNames[t] = new DynamicArray(0); // T in NativeList cannot contain managed types, so the names are stored separately
- }
}
public void Clear()
{
for (int t = 0; t < (int)RenderGraphResourceType.Count; t++)
{
- unversionedData[t].Clear();
- versionedData[t].Clear();
- readerData[t].Clear();
+ if (unversionedData[t].IsCreated)
+ unversionedData[t].Clear();
+
+ if (versionedData[t].IsCreated)
+ versionedData[t].Clear();
+
+ if (readerData[t].IsCreated)
+ readerData[t].Clear();
+
resourceNames[t].Clear();
}
}
+ void AllocateAndResizeNativeListIfNeeded(ref NativeList nativeList, int size, NativeArrayOptions options) where T : unmanaged
+ {
+ // Allocate the first time or if Dispose() has been called through RenderGraph.Cleanup()
+ // Length remains 0, list is still empty
+ if (!nativeList.IsCreated)
+ nativeList = new NativeList(size, AllocatorManager.Persistent);
+
+ // Resize the list (it will allocate if necessary)
+ // List is not empty anymore
+ nativeList.Resize(size, options);
+ }
+
public void Initialize(RenderGraphResourceRegistry resources)
{
uint maxReaders = 0;
@@ -238,9 +249,9 @@ public void Initialize(RenderGraphResourceRegistry resources)
RenderGraphResourceType resourceType = (RenderGraphResourceType) t;
var numResources = resources.GetResourceCount(resourceType);
- // For each resource type, resize the buffer (only allocate if bigger)
- // We don't clear the buffer as we reinitialize it right after
- unversionedData[t].Resize(numResources, NativeArrayOptions.UninitializedMemory);
+ // We don't clear the list as we reinitialize it right after
+ AllocateAndResizeNativeListIfNeeded(ref unversionedData[t], numResources, NativeArrayOptions.UninitializedMemory);
+
resourceNames[t].Resize(numResources, true);
if (numResources > 0) // Null Resource
@@ -301,8 +312,8 @@ public void Initialize(RenderGraphResourceRegistry resources)
MaxVersions = (int)maxWriters + 1;
// Clear the other caching structures, they will be filled later
- versionedData[t].Resize(MaxVersions * numResources, NativeArrayOptions.ClearMemory);
- readerData[t].Resize(MaxVersions * MaxReaders * numResources, NativeArrayOptions.ClearMemory);
+ AllocateAndResizeNativeListIfNeeded(ref versionedData[t], MaxVersions * numResources, NativeArrayOptions.ClearMemory);
+ AllocateAndResizeNativeListIfNeeded(ref readerData[t], MaxVersions * MaxReaders * numResources, NativeArrayOptions.ClearMemory);
}
}
@@ -340,9 +351,14 @@ public void Dispose()
{
for (int t = 0; t < (int)RenderGraphResourceType.Count; t++)
{
- versionedData[t].Dispose();
- unversionedData[t].Dispose();
- readerData[t].Dispose();
+ if (versionedData[t].IsCreated)
+ versionedData[t].Dispose();
+
+ if (unversionedData[t].IsCreated)
+ unversionedData[t].Dispose();
+
+ if (readerData[t].IsCreated)
+ readerData[t].Dispose();
}
}
}
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Debug/RenderGraphDebugParams.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Debug/RenderGraphDebugParams.cs
index c01e7e7a31a..5283d470837 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Debug/RenderGraphDebugParams.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Debug/RenderGraphDebugParams.cs
@@ -13,10 +13,17 @@ class RenderGraphDebugParams : IDebugDisplaySettingsQuery
public bool disablePassCulling;
public bool disablePassMerging;
public bool immediateMode;
- public bool enableLogging;
public bool logFrameInformation;
public bool logResources;
+ public bool enableLogging => logFrameInformation || logResources;
+
+ public void ResetLogging()
+ {
+ logFrameInformation = false;
+ logResources = false;
+ }
+
internal void Reset()
{
clearRenderTargetsAtCreation = false;
@@ -24,9 +31,8 @@ internal void Reset()
disablePassCulling = false;
disablePassMerging = false;
immediateMode = false;
- enableLogging = false;
- logFrameInformation = false;
- logResources = false;
+
+ ResetLogging();
}
private static class Strings
@@ -39,7 +45,6 @@ private static class Strings
public static readonly NameAndTooltip EnableLogging = new() { name = "Enable Logging", tooltip = "Enable to allow HDRP to capture information in the log." };
public static readonly NameAndTooltip LogFrameInformation = new() { name = "Log Frame Information", tooltip = "Enable to log information output from each frame." };
public static readonly NameAndTooltip LogResources = new() { name = "Log Resources", tooltip = "Enable to log the current render graph's global resource usage." };
- public static readonly NameAndTooltip EnableNativeCompiler = new() { name = "Enable Native Pass Compiler", tooltip = "Enable the new native pass compiler." };
}
internal List GetWidgetList(string name)
@@ -75,7 +80,8 @@ private static class Strings
{
nameAndTooltip = Strings.DisablePassMerging,
getter = () => disablePassMerging,
- setter = value => disablePassMerging = value
+ setter = value => disablePassMerging = value,
+ isHiddenCallback = () => !RenderGraph.hasAnyRenderGraphWithNativeRenderPassesEnabled
},
new DebugUI.BoolField
{
@@ -85,19 +91,11 @@ private static class Strings
// [UUM-64948] Temporarily disable for URP while we implement support for Immediate Mode in the RenderGraph
isHiddenCallback = () => !IsImmediateModeSupported()
},
- new DebugUI.BoolField
- {
- nameAndTooltip = Strings.EnableLogging,
- getter = () => enableLogging,
- setter = value => enableLogging = value
- },
new DebugUI.Button
{
nameAndTooltip = Strings.LogFrameInformation,
action = () =>
{
- if (!enableLogging)
- Debug.Log("You must first enable logging before logging frame information.");
logFrameInformation = true;
#if UNITY_EDITOR
UnityEditor.SceneView.RepaintAll();
@@ -109,8 +107,6 @@ private static class Strings
nameAndTooltip = Strings.LogResources,
action = () =>
{
- if (!enableLogging)
- Debug.Log("You must first enable logging before logging resources.");
logResources = true;
#if UNITY_EDITOR
UnityEditor.SceneView.RepaintAll();
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs
index cb316a635ea..8ac51762651 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs
@@ -401,6 +401,17 @@ public bool nativeRenderPassesEnabled
get; set;
}
+ internal static bool hasAnyRenderGraphWithNativeRenderPassesEnabled
+ {
+ get
+ {
+ foreach (var graph in s_RegisteredGraphs)
+ if (graph.nativeRenderPassesEnabled)
+ return true;
+ return false;
+ }
+ }
+
internal/*for tests*/ RenderGraphResourceRegistry m_Resources;
RenderGraphObjectPool m_RenderGraphPool = new RenderGraphObjectPool();
RenderGraphBuilders m_builderInstance = new RenderGraphBuilders();
@@ -559,6 +570,18 @@ public void Cleanup()
{
CheckNotUsedWhenActive();
+ ForceCleanup();
+ }
+
+ // Internal, only for testing
+ // Useful when we need to clean when calling
+ // internal functions in tests even if Render Graph is active
+ internal void ForceCleanup()
+ {
+ // Usually done at the end of Execute step
+ // Also doing it here in case RG stopped before it
+ ClearCurrentCompiledGraph();
+
m_Resources.Cleanup();
m_DefaultResources.Cleanup();
m_RenderGraphPool.Cleanup();
@@ -566,7 +589,7 @@ public void Cleanup()
s_RegisteredGraphs.Remove(this);
onGraphUnregistered?.Invoke(this);
- nativeCompiler?.contextData?.Dispose();
+ nativeCompiler?.Cleanup();
m_CompilationCache?.Clear();
@@ -644,15 +667,11 @@ public void EndFrame()
m_Resources.PurgeUnusedGraphicsResources();
if (m_DebugParameters.logFrameInformation)
- {
- Debug.Log(m_FrameInformationLogger.GetAllLogs());
- m_DebugParameters.logFrameInformation = false;
- }
+ m_FrameInformationLogger.FlushLogs();
if (m_DebugParameters.logResources)
- {
m_Resources.FlushLogs();
- m_DebugParameters.logResources = false;
- }
+
+ m_DebugParameters.ResetLogging();
}
///
@@ -2609,7 +2628,7 @@ void LogFrameInformation()
{
if (m_DebugParameters.enableLogging)
{
- m_FrameInformationLogger.LogLine($"==== Staring render graph frame for: {m_CurrentExecutionName} ====");
+ m_FrameInformationLogger.LogLine($"==== Render Graph Frame Information Log ({m_CurrentExecutionName}) ====");
if (!m_DebugParameters.immediateMode)
m_FrameInformationLogger.LogLine("Number of passes declared: {0}\n", m_RenderPasses.Count);
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphCompilationCache.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphCompilationCache.cs
index 04bed65e761..fb21fca2270 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphCompilationCache.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphCompilationCache.cs
@@ -38,7 +38,7 @@ public RenderGraphCompilationCache()
for (int i = 0; i < k_CachedGraphCount; ++i)
{
m_CompiledGraphPool.Push(new RenderGraph.CompiledGraph());
- m_NativeCompiledGraphPool.Push(new CompilerContextData(NativePassCompiler.k_EstimatedPassCount));
+ m_NativeCompiledGraphPool.Push(new CompilerContextData());
}
}
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphDefaultResources.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphDefaultResources.cs
index 5f5b7fef861..8fbe5617fba 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphDefaultResources.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphDefaultResources.cs
@@ -36,20 +36,37 @@ public class RenderGraphDefaultResources
internal RenderGraphDefaultResources()
{
- m_BlackTexture2D = RTHandles.Alloc(Texture2D.blackTexture);
- m_WhiteTexture2D = RTHandles.Alloc(Texture2D.whiteTexture);
- m_ShadowTexture2D = RTHandles.Alloc(1, 1, CoreUtils.GetDefaultDepthOnlyFormat(), isShadowMap: true, name: "DefaultShadowTexture");
+ InitDefaultResourcesIfNeeded();
+ }
+
+ private void InitDefaultResourcesIfNeeded()
+ {
+ if (m_BlackTexture2D == null)
+ m_BlackTexture2D = RTHandles.Alloc(Texture2D.blackTexture);
+
+ if (m_WhiteTexture2D == null)
+ m_WhiteTexture2D = RTHandles.Alloc(Texture2D.whiteTexture);
+
+ if (m_ShadowTexture2D == null)
+ m_ShadowTexture2D = RTHandles.Alloc(1, 1, CoreUtils.GetDefaultDepthOnlyFormat(), isShadowMap: true, name: "DefaultShadowTexture");
}
internal void Cleanup()
{
- m_BlackTexture2D.Release();
- m_WhiteTexture2D.Release();
- m_ShadowTexture2D.Release();
+ m_BlackTexture2D?.Release();
+ m_BlackTexture2D = null;
+
+ m_WhiteTexture2D?.Release();
+ m_WhiteTexture2D = null;
+
+ m_ShadowTexture2D?.Release();
+ m_ShadowTexture2D = null;
}
internal void InitializeForRendering(RenderGraph renderGraph)
{
+ InitDefaultResourcesIfNeeded();
+
blackTexture = renderGraph.ImportTexture(m_BlackTexture2D, true);
whiteTexture = renderGraph.ImportTexture(m_WhiteTexture2D, true);
defaultShadowTexture = renderGraph.ImportTexture(m_ShadowTexture2D, true);
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphLogger.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphLogger.cs
index 43b1c40642f..10bcbd8af2d 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphLogger.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphLogger.cs
@@ -77,17 +77,7 @@ public void LogLine(string format, params object[] args)
m_CurrentBuilder.AppendLine();
}
- public string GetLog(string logName)
- {
- if (m_LogMap.TryGetValue(logName, out var builder))
- {
- return builder.ToString();
- }
-
- return "";
- }
-
- public string GetAllLogs()
+ public void FlushLogs()
{
string result = "";
foreach (var kvp in m_LogMap)
@@ -100,7 +90,7 @@ public string GetAllLogs()
m_LogMap.Clear();
- return result;
+ Debug.Log(result);
}
}
}
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceRegistry.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceRegistry.cs
index e485bd5dd7b..4b6a3d9be56 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceRegistry.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceRegistry.cs
@@ -1250,16 +1250,11 @@ internal void Cleanup()
RTHandles.Release(m_CurrentBackbuffer);
}
- internal void FlushLogs()
- {
- Debug.Log(m_ResourceLogger.GetAllLogs());
- }
-
void LogResources()
{
if (m_RenderGraphDebug.enableLogging)
{
- m_ResourceLogger.LogLine("==== Allocated Resources ====\n");
+ m_ResourceLogger.LogLine("==== Render Graph Resource Log ====\n");
for (int type = 0; type < (int)RenderGraphResourceType.Count; ++type)
{
@@ -1272,6 +1267,11 @@ void LogResources()
}
}
+ internal void FlushLogs()
+ {
+ m_ResourceLogger.FlushLogs();
+ }
+
#endregion
}
}
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRGraphicsAutomatedTests.cs b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRGraphicsAutomatedTests.cs
index b1c64f65bad..aa518d9725b 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRGraphicsAutomatedTests.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRGraphicsAutomatedTests.cs
@@ -74,7 +74,7 @@ internal static void OverrideLayout(XRLayout layout, Camera camera)
viewMatrix *= Matrix4x4.Translate(new Vector3(.34f, 0.25f, -0.08f));
}
- XRView xrView = new XRView(projMatrix, viewMatrix, Matrix4x4.identity, false, xrPass.GetViewport(viewId), null, xrPass.GetTextureArraySlice(viewId));
+ XRView xrView = new XRView(projMatrix, viewMatrix, Matrix4x4.identity, false, xrPass.GetViewport(viewId), null, null, xrPass.GetTextureArraySlice(viewId));
xrPass.AssignView(viewId, xrView);
}
}
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRPass.cs b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRPass.cs
index 65c9de7b120..960ea65d855 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRPass.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRPass.cs
@@ -40,6 +40,7 @@ public class XRPass
{
readonly List m_Views;
readonly XROcclusionMesh m_OcclusionMesh;
+ readonly XRVisibleMesh m_VisibleMesh;
///
/// Parameterless constructor.
@@ -49,6 +50,7 @@ public XRPass()
{
m_Views = new List(2);
m_OcclusionMesh = new XROcclusionMesh(this);
+ m_VisibleMesh = new XRVisibleMesh(this);
}
///
@@ -68,6 +70,7 @@ public static XRPass CreateDefault(XRPassCreateInfo createInfo)
///
virtual public void Release()
{
+ m_VisibleMesh.Dispose();
GenericPool.Release(this);
}
@@ -284,6 +287,16 @@ public Mesh GetOcclusionMesh(int viewIndex = 0)
return m_Views[viewIndex].occlusionMesh;
}
+ ///
+ /// Returns the visible mesh for a given view.
+ ///
+ /// Index of XRView to retrieve the data from.
+ /// XR visible mesh for the specified XRView.
+ public Mesh GetVisibleMesh(int viewIndex = 0)
+ {
+ return m_Views[viewIndex].visibleMesh;
+ }
+
///
/// Returns the destination slice index (for texture array) for a given view.
///
@@ -372,6 +385,11 @@ public void StopSinglePass(BaseCommandBuffer cmd)
///
public bool hasValidOcclusionMesh { get => m_OcclusionMesh.hasValidOcclusionMesh; }
+ ///
+ /// Returns true if the pass was setup with expected mesh and enabled by settings.
+ ///
+ public bool hasValidVisibleMesh { get => m_VisibleMesh.hasValidVisibleMesh && XRSystem.GetUseVisibilityMesh(); }
+
///
/// Generate commands to render the occlusion mesh for this pass.
/// In single-pass mode : the meshes for all views are combined into one mesh,
@@ -400,6 +418,44 @@ public void RenderOcclusionMesh(RasterCommandBuffer cmd, bool renderIntoTexture
m_OcclusionMesh.RenderOcclusionMesh(cmd.m_WrappedCommandBuffer, occlusionMeshScale, renderIntoTexture);
}
+ ///
+ /// Generate commands to render the visible mesh for this pass using a custom material and set of material property block.
+ /// In single-pass mode : the meshes for all views are combined into one mesh,
+ /// where the corresponding view index is encoded into each vertex.
+ ///
+ /// RasterCommandBuffer to modify
+ /// Occlusion Mesh scale
+ /// Material that the visibility mesh will render.
+ /// Material block with all the shader parameters that need to be set.
+ /// Material shader pass to render, set 0 by default.
+ /// Set to true when rendering into a render texture. Used for handling Unity yflip.
+ public void RenderVisibleMeshCustomMaterial(RasterCommandBuffer cmd, float occlusionMeshScale,
+ Material material, MaterialPropertyBlock materialBlock, int shaderPass, bool renderIntoTexture = false)
+ {
+ if (occlusionMeshScale > 0)
+ m_VisibleMesh.RenderVisibleMeshCustomMaterial(cmd.m_WrappedCommandBuffer, occlusionMeshScale, material, materialBlock, shaderPass, renderIntoTexture);
+
+ }
+
+ ///
+ /// Generate commands to render the visible mesh for this pass using a custom material and set of material property block.
+ /// In single-pass mode : the meshes for all views are combined into one mesh,
+ /// where the corresponding view index is encoded into each vertex.
+ ///
+ /// RasterCommandBuffer to modify
+ /// Occlusion Mesh scale
+ /// Material that the visibility mesh will render.
+ /// Material block with all the shader parameters that need to be set.
+ /// Material shader pass to render, set 0 by default.
+ /// Set to true when rendering into a render texture. Used for handling Unity yflip.
+ public void RenderVisibleMeshCustomMaterial(CommandBuffer cmd, float occlusionMeshScale,
+ Material material, MaterialPropertyBlock materialBlock, int shaderPass = 0, bool renderIntoTexture = false)
+ {
+ if (occlusionMeshScale > 0)
+ m_VisibleMesh.RenderVisibleMeshCustomMaterial(cmd, occlusionMeshScale, material, materialBlock, shaderPass, renderIntoTexture);
+
+ }
+
///
/// Draw debug line for all XR views.
///
@@ -461,6 +517,7 @@ internal void AssignCullingParams(int cullingPassId, ScriptableCullingParameters
internal void UpdateCombinedOcclusionMesh()
{
m_OcclusionMesh.UpdateCombinedMesh();
+ m_VisibleMesh.UpdateCombinedMesh();
}
///
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRSRPSettings.cs b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRSRPSettings.cs
index bb519251b85..9b58d551900 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRSRPSettings.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRSRPSettings.cs
@@ -155,6 +155,28 @@ public static float occlusionMeshScale
}
}
+ ///
+ /// Controls the use of the Visibility Mesh passes in SRP.
+ ///
+ public static bool useVisibilityMesh
+ {
+ get
+ {
+#if ENABLE_VR && ENABLE_VR_MODULE
+ if (enabled)
+ return XRSystem.GetUseVisibilityMesh();
+#endif
+ return false;
+ }
+ set
+ {
+#if ENABLE_VR && ENABLE_VR_MODULE
+ if (enabled)
+ XRSystem.SetUseVisibilityMesh(value);
+#endif
+ }
+ }
+
///
/// Controls XR mirror view blit operation
///
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRSystem.cs b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRSystem.cs
index aa80bd23d70..1ef3882fea8 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRSystem.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRSystem.cs
@@ -49,6 +49,9 @@ static public XRDisplaySubsystem GetActiveDisplay()
#if ENABLE_VR && ENABLE_XR_MODULE
// Occlusion Mesh scaling factor
static float s_OcclusionMeshScaling = 1.0f;
+
+ // Return true if wants to enable visibility mesh passes
+ static bool s_UseVisibilityMesh = true;
#endif
// Internal resources used by XR rendering
@@ -183,6 +186,29 @@ internal static float GetOcclusionMeshScale()
#endif
}
+ ///
+ /// Used by the render pipeline to enable all visibility meshes passes.
+ ///
+ /// True to enable visibility mesh passes, false to disable them.
+ internal static void SetUseVisibilityMesh(bool useVisibilityMesh)
+ {
+#if ENABLE_VR && ENABLE_XR_MODULE
+ s_UseVisibilityMesh = useVisibilityMesh;
+#endif
+ }
+
+ ///
+ /// Returned value used by the render pipeline to use all visibility mesh passes.
+ ///
+ internal static bool GetUseVisibilityMesh()
+ {
+#if ENABLE_VR && ENABLE_XR_MODULE
+ return s_UseVisibilityMesh;
+#else
+ return false;
+#endif
+ }
+
///
/// Used to communicate to the XR device how to render the XR MirrorView. Note: not all blit modes are supported by all providers. Blitmode set here serves as preference purpose.
///
@@ -497,8 +523,9 @@ static XRView BuildView(XRDisplaySubsystem.XRRenderPass renderPass, XRDisplaySub
// XRTODO : remove this line and use XRSettings.useOcclusionMesh instead when it's fixed
Mesh occlusionMesh = XRGraphicsAutomatedTests.running ? null : renderParameter.occlusionMesh;
+ Mesh visibleMesh = XRGraphicsAutomatedTests.running ? null : renderParameter.visibleMesh;
- return new XRView(renderParameter.projection, renderParameter.view, renderParameter.previousView, renderParameter.isPreviousViewValid, viewport, occlusionMesh, renderParameter.textureArraySlice);
+ return new XRView(renderParameter.projection, renderParameter.view, renderParameter.previousView, renderParameter.isPreviousViewValid, viewport, occlusionMesh, visibleMesh, renderParameter.textureArraySlice);
}
private static RenderTextureDescriptor XrRenderTextureDescToUnityRenderTextureDesc(RenderTextureDescriptor xrDesc)
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRView.cs b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRView.cs
index 71803ba1e82..bb94d29d9e2 100644
--- a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRView.cs
+++ b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRView.cs
@@ -9,17 +9,19 @@ internal readonly struct XRView
internal readonly Matrix4x4 prevViewMatrix;
internal readonly Rect viewport;
internal readonly Mesh occlusionMesh;
+ internal readonly Mesh visibleMesh;
internal readonly int textureArraySlice;
internal readonly Vector2 eyeCenterUV;
internal readonly bool isPrevViewMatrixValid;
- internal XRView(Matrix4x4 projMatrix, Matrix4x4 viewMatrix, Matrix4x4 prevViewMatrix, bool isPrevViewMatrixValid, Rect viewport, Mesh occlusionMesh, int textureArraySlice)
+ internal XRView(Matrix4x4 projMatrix, Matrix4x4 viewMatrix, Matrix4x4 prevViewMatrix, bool isPrevViewMatrixValid, Rect viewport, Mesh occlusionMesh, Mesh visibleMesh, int textureArraySlice)
{
this.projMatrix = projMatrix;
this.viewMatrix = viewMatrix;
this.prevViewMatrix = prevViewMatrix;
this.viewport = viewport;
this.occlusionMesh = occlusionMesh;
+ this.visibleMesh = visibleMesh;
this.textureArraySlice = textureArraySlice;
this.isPrevViewMatrixValid = isPrevViewMatrixValid;
eyeCenterUV = ComputeEyeCenterUV(projMatrix);
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRVisibleMesh.cs b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRVisibleMesh.cs
new file mode 100644
index 00000000000..b15b40fc41c
--- /dev/null
+++ b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRVisibleMesh.cs
@@ -0,0 +1,162 @@
+using UnityEngine.Rendering;
+
+namespace UnityEngine.Experimental.Rendering
+{
+ // Helper class to render the visible mesh using custom materials.
+ // If possible, the mesh for each view will be combined into one mesh to reduce draw calls.
+ internal class XRVisibleMesh
+ {
+ XRPass m_Pass;
+ Mesh m_CombinedMesh;
+ int m_CombinedMeshHashCode;
+
+ static readonly ProfilingSampler k_VisibleMeshProfilingSampler = new ProfilingSampler("XR Visible Mesh");
+
+ internal XRVisibleMesh(XRPass xrPass)
+ {
+ m_Pass = xrPass;
+ }
+
+ internal void Dispose()
+ {
+ if (m_CombinedMesh)
+ {
+ CoreUtils.Destroy(m_CombinedMesh);
+ m_CombinedMesh = null;
+ }
+ }
+
+ internal bool hasValidVisibleMesh
+ {
+ get
+ {
+ if (IsVisibleMeshSupported())
+ {
+ if (m_Pass.singlePassEnabled)
+ return m_CombinedMesh != null;
+ else
+ return m_Pass.GetVisibleMesh() != null;
+ }
+
+ return false;
+ }
+ }
+
+ internal void RenderVisibleMeshCustomMaterial(CommandBuffer cmd, float occlusionMeshScale, Material material, MaterialPropertyBlock materialBlock, int shaderPass, bool yFlip = false)
+ {
+ if (IsVisibleMeshSupported())
+ {
+ using (new ProfilingScope(cmd, k_VisibleMeshProfilingSampler))
+ {
+ Vector3 scale = new Vector3(occlusionMeshScale, yFlip ? occlusionMeshScale : -occlusionMeshScale, 1.0f);
+ Mesh VisMesh = m_Pass.singlePassEnabled ? m_CombinedMesh : m_Pass.GetVisibleMesh(0);
+ cmd.DrawMesh(VisMesh, Matrix4x4.Scale(scale), material, 0, shaderPass, materialBlock);
+ }
+ }
+ }
+
+ internal void UpdateCombinedMesh()
+ {
+ if (IsVisibleMeshSupported() && m_Pass.singlePassEnabled && TryGetVisibleMeshCombinedHashCode(out var hashCode))
+ {
+ if (m_CombinedMesh == null || hashCode != m_CombinedMeshHashCode)
+ {
+ CreateVisibleMeshCombined();
+ m_CombinedMeshHashCode = hashCode;
+ }
+ }
+ else
+ {
+ m_CombinedMesh = null;
+ m_CombinedMeshHashCode = 0;
+ }
+ }
+
+ bool IsVisibleMeshSupported()
+ {
+ return m_Pass.enabled && m_Pass.occlusionMeshScale > 0.0f;
+ }
+
+ bool TryGetVisibleMeshCombinedHashCode(out int hashCode)
+ {
+ hashCode = 17;
+
+ for (int viewId = 0; viewId < m_Pass.viewCount; ++viewId)
+ {
+ Mesh mesh = m_Pass.GetVisibleMesh(viewId);
+
+ if (mesh != null)
+ {
+ hashCode = hashCode * 23 + mesh.GetHashCode();
+ }
+ else
+ {
+ hashCode = 0;
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ // Create a new mesh that contains the visible data from all views
+ // This essentially fetches the mesh vertices from XRPass.GetVisibleMesh(viewId=0,1)
+ // and combines them into one mesh.
+ void CreateVisibleMeshCombined()
+ {
+ CoreUtils.Destroy(m_CombinedMesh);
+
+ m_CombinedMesh = new Mesh();
+ m_CombinedMesh.indexFormat = IndexFormat.UInt16;
+
+ int combinedVertexCount = 0;
+ uint combinedIndexCount = 0;
+
+ for (int viewId = 0; viewId < m_Pass.viewCount; ++viewId)
+ {
+ Mesh mesh = m_Pass.GetVisibleMesh(viewId);
+
+ Debug.Assert(mesh != null);
+ Debug.Assert(mesh.subMeshCount == 1);
+ Debug.Assert(mesh.indexFormat == IndexFormat.UInt16);
+
+ combinedVertexCount += mesh.vertexCount;
+ combinedIndexCount += mesh.GetIndexCount(0);
+ }
+
+ Vector3[] vertices = new Vector3[combinedVertexCount];
+ ushort[] indices = new ushort[combinedIndexCount];
+ int vertexStart = 0;
+ int indexStart = 0;
+
+ for (int viewId = 0; viewId < m_Pass.viewCount; ++viewId)
+ {
+ Mesh mesh = m_Pass.GetVisibleMesh(viewId);
+ var meshIndices = mesh.GetIndices(0);
+
+ // Encode the viewId into the z channel
+ {
+ mesh.vertices.CopyTo(vertices, vertexStart);
+
+ for (int i = 0; i < mesh.vertices.Length; i++)
+ vertices[vertexStart + i].z = viewId;
+ }
+
+ // Combine indices into one buffer
+ for (int i = 0; i < meshIndices.Length; i++)
+ {
+ int newIndex = vertexStart + meshIndices[i];
+ Debug.Assert(meshIndices[i] < ushort.MaxValue);
+
+ indices[indexStart + i] = (ushort)newIndex;
+ }
+
+ vertexStart += mesh.vertexCount;
+ indexStart += meshIndices.Length;
+ }
+
+ m_CombinedMesh.vertices = vertices;
+ m_CombinedMesh.SetIndices(indices, MeshTopology.Triangles, 0);
+ }
+ }
+}
diff --git a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRVisibleMesh.cs.meta b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRVisibleMesh.cs.meta
new file mode 100644
index 00000000000..7c6e6b557d7
--- /dev/null
+++ b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRVisibleMesh.cs.meta
@@ -0,0 +1,2 @@
+fileFormatVersion: 2
+guid: 7fd1ba8431960304588429bd8a60e2df
\ No newline at end of file
diff --git a/Packages/com.unity.render-pipelines.core/Tests/Editor/RenderGraph.ComputeGraphHash.cs b/Packages/com.unity.render-pipelines.core/Tests/Editor/RenderGraph.ComputeGraphHash.cs
index f849e39b882..2173abae891 100644
--- a/Packages/com.unity.render-pipelines.core/Tests/Editor/RenderGraph.ComputeGraphHash.cs
+++ b/Packages/com.unity.render-pipelines.core/Tests/Editor/RenderGraph.ComputeGraphHash.cs
@@ -1,4 +1,4 @@
-using NUnit.Framework;
+using NUnit.Framework;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.RenderGraphModule;
@@ -38,6 +38,12 @@ public static void RenderFunc(RenderGraphTestPassData data, RenderGraphContext c
}
}
+ void ClearCompiledGraphAndHash()
+ {
+ m_RenderGraph.ClearCurrentCompiledGraph();
+ DelegateHashCodeUtils.ClearCache();
+ }
+
[Test]
public void ComputeGraphHash_WhenCalledMultipleTimes_CacheForDelegatesIsNotGrowingBetweenComputes()
{
@@ -438,4 +444,4 @@ void LocalRenderFunc(RenderGraphTestPassData data, RenderGraphContext renderGrap
}
}
}
-}
\ No newline at end of file
+}
diff --git a/Packages/com.unity.render-pipelines.core/Tests/Editor/RenderGraphTests.cs b/Packages/com.unity.render-pipelines.core/Tests/Editor/RenderGraphTests.cs
index 3f5c384c0ff..cd783e957c8 100644
--- a/Packages/com.unity.render-pipelines.core/Tests/Editor/RenderGraphTests.cs
+++ b/Packages/com.unity.render-pipelines.core/Tests/Editor/RenderGraphTests.cs
@@ -190,16 +190,12 @@ public void Cleanup()
m_Camera = null;
}
- void ClearCompiledGraphAndHash()
+ [TearDown]
+ public void CleanupRenderGraph()
{
- m_RenderGraph.ClearCurrentCompiledGraph();
- DelegateHashCodeUtils.ClearCache();
- }
-
- [SetUp]
- public void SetupRenderGraph()
- {
- ClearCompiledGraphAndHash();
+ // Cleaning all Render Graph resources and data structures
+ // Nothing remains, Render Graph in next test will start from scratch
+ m_RenderGraph.ForceCleanup();
}
class RenderGraphTestPassData
@@ -1456,5 +1452,98 @@ void RecordGraphAPIError(RenderGraphState graphState, string exceptionExpected,
m_Camera.Render();
}
+
+ class RenderGraphCleanupTestData
+ {
+ public TextureHandle textureToRelease;
+ }
+
+ [Test]
+ public void Cleanup_ReleaseGraphicsResources_WhenCallingCleanup()
+ {
+ // We need to capture this variable in the lambda function of the CleanupPass unfortunately
+ RenderTexture renderTextureToRemove = null;
+
+ m_RenderGraphTestPipeline.recordRenderGraphBody = (context, camera, cmd) =>
+ {
+ using (var builder = m_RenderGraph.AddUnsafePass("CleanupPass", out var passData))
+ {
+ builder.AllowPassCulling(false);
+
+ var texDesc = new TextureDesc(Vector2.one, false, false)
+ {
+ width = 1920,
+ height = 1080,
+ format = GraphicsFormat.B10G11R11_UFloatPack32,
+ clearBuffer = true,
+ clearColor = Color.red,
+ name = "Texture To Release"
+ };
+ passData.textureToRelease = m_RenderGraph.CreateTexture(texDesc);
+ builder.UseTexture(passData.textureToRelease);
+ builder.SetRenderFunc((RenderGraphCleanupTestData data, UnsafeGraphContext context) =>
+ {
+ // textureToRelease has been allocated before executing this node
+
+ renderTextureToRemove = (RenderTexture)data.textureToRelease;
+ Assert.IsNotNull(renderTextureToRemove);
+
+ // textureToRelease will returned to the texture pool after executing this node
+ });
+ }
+ };
+
+ // Render Graph hasn't started yet, no texture allocated
+ Assert.IsNull(renderTextureToRemove);
+
+ m_Camera.Render();
+
+ // Cleanup pass has been executed
+ // RG resource has been created and then released to the pool
+ // but the graphics resource has not been released, still attached to the pooled resource
+ // in case a next pass will reuse it
+ Assert.IsNotNull(renderTextureToRemove);
+
+ m_RenderGraph.Cleanup();
+
+ // All RG resources and data structures have been released
+ Assert.IsTrue(renderTextureToRemove == null);
+ }
+
+ [Test]
+ public void Cleanup_RenderAgain_AfterCallingCleanup()
+ {
+ m_RenderGraphTestPipeline.recordRenderGraphBody = (context, camera, cmd) =>
+ {
+ using (var builder = m_RenderGraph.AddUnsafePass("MidCleanupPass", out var passData))
+ {
+ builder.AllowPassCulling(false);
+
+ var texDesc = new TextureDesc(Vector2.one, false, false)
+ {
+ width = 1920,
+ height = 1080,
+ format = GraphicsFormat.B10G11R11_UFloatPack32,
+ clearBuffer = true,
+ clearColor = Color.red,
+ name = "Texture To Release Twice"
+ };
+
+ passData.textureToRelease = m_RenderGraph.CreateTexture(texDesc);
+ builder.SetRenderFunc((RenderGraphCleanupTestData data, UnsafeGraphContext context) =>
+ {
+ ///
+ });
+ }
+ };
+
+ m_Camera.Render();
+
+ // Cleanup everything in Render Graph, even the native data structures
+ m_RenderGraph.Cleanup();
+
+ // Ensure that the Render Graph data structures can be reinitialized at runtime, even native ones
+ Assert.DoesNotThrow(() => m_Camera.Render());
+ }
}
}
diff --git a/Packages/com.unity.render-pipelines.core/Tests/Runtime/RuntimeProfilerTests.cs b/Packages/com.unity.render-pipelines.core/Tests/Runtime/RuntimeProfilerTests.cs
index 4a0468a9564..ee29f1edc51 100644
--- a/Packages/com.unity.render-pipelines.core/Tests/Runtime/RuntimeProfilerTests.cs
+++ b/Packages/com.unity.render-pipelines.core/Tests/Runtime/RuntimeProfilerTests.cs
@@ -55,10 +55,10 @@ protected IEnumerator Warmup()
}
}
- // Fails on WebGL and Oculus Quest.
+ // Fails on WebGL, Oculus Quest and Switch.
// Unfortunately, there is no good way to exclude Oculus Quest from the test without excluding all Android devices.
// https://jira.unity3d.com/browse/GFXFOUND-559
- [UnityPlatform(exclude = new RuntimePlatform[] { RuntimePlatform.WebGLPlayer, RuntimePlatform.Android })]
+ [UnityPlatform(exclude = new RuntimePlatform[] { RuntimePlatform.WebGLPlayer, RuntimePlatform.Android, RuntimePlatform.Switch })]
class RuntimeProfilerTests : RuntimeProfilerTestBase
{
[UnityTest]
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/AOVs.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/AOVs.md
index 054a85046c9..32f3c233859 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/AOVs.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/AOVs.md
@@ -4,7 +4,7 @@ Arbitrary Output Variables (AOVs) are additional images that an [HDRP Camera](hd
Here is an example of three AOVs, containing from left to right the Albedo, Normal, and Object ID of each pixel:
-
+
In HDRP, you can access and configure AOVs in the following ways:
- Using the [HDRP Compositor tool](graphics-compositor.md).
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Anti-Aliasing.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Anti-Aliasing.md
index 284e42b1c6c..9a62582081f 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Anti-Aliasing.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Anti-Aliasing.md
@@ -4,7 +4,7 @@
This is most obvious if you compare the original and digital signals for an audio source at its highest frequencies, or a visual source in its smallest details. Regular signal processing uses the [Nyquist rate](Glossary.md#NyquistRate) to avoid aliasing, however it's not practical for image rendering because it's resource intensive.
-
+
An example of the rasterization process creating some aliasing.
@@ -96,18 +96,18 @@ When you enable MSAA in your Unity Project, you must also enable it for your Cam
Increasing the MSAA Sample Count produces smoother antialiasing, at the cost of performance. Here are some visual examples showing the effect of the different MSAA Sample Counts:
-
+
MSAA Sample Count set to None.
-
+
MSAA Sample Count set to MSAA 2X.
-
+
MSAA Sample Count set to MSAA 4X.
-
+
MSAA Sample Count set to MSAA 8X.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Custom-Pass-Injection-Points.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Custom-Pass-Injection-Points.md
index 5f56f44eae3..3e20c3bc9c3 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Custom-Pass-Injection-Points.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Custom-Pass-Injection-Points.md
@@ -23,3 +23,4 @@ Unity triggers the following injection points in a frame, in order from top to b
| BeforeTransparent | Color (Pyramid \| Read \| Write), Depth (Read \| Write), Normal and roughness (Read), Motion Vectors (Read) | The available buffers for this injection point contain: - All opaque objects. - Transparent PreRefraction objects. - Transparent objects with depth-prepass and screen space reflections (SSR) enabled.
In this Injection Point you can sample the color pyramid that Unity uses for transparent refraction. You can use this to create a blur effect. All objects Unity renders in this injection point will not be in the color pyramid.
You can also use this injection point to draw some transparent objects that refract the whole scene, like water. |
| BeforePostProcess | Color (Pyramid \| Read \| Write), Depth (Read \| Write), Normal and roughness (Read), Motion Vectors (Read) | The available buffers for this injection point contain all geometry in the frame that uses High Dynamic Range (HDR). |
| AfterPostProcess | Color (Read \| Write), Depth (Read) | The available buffers for this injection point contain the final render of the scene, including post-process effects.
This injection point executes the available buffers after Unity applies any post-processing effects.
If you select this injection point, objects that use the depth buffer display jittering artifacts.
When you select this injection point for a [FullscreenCustom Pass](custom-pass-create-gameobject.md#full-screen-custom-pass), Unity assigns the camera color buffer as the target by default.**Note:** When sampling scene color using HDSceneColor node in a FullScreenShaderGraph at this injection point, consider using a temporary buffer to handle concurrent read/write operations. See [Scene Color Sampling in AfterPostProcess](Custom-Pass-Scene-Color-Read.md) for implementation details. |
+
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Custom-Pass-Troubleshooting.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Custom-Pass-Troubleshooting.md
index e5d80f9c4ea..b7d0e880a02 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Custom-Pass-Troubleshooting.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Custom-Pass-Troubleshooting.md
@@ -4,7 +4,7 @@ This section provides examples of common issues you might encounter when using a
## Display scaling issues
-
+
A scaling issue can appear in your built scene when you have two cameras that don't use the same resolution. This is most common between Game and Scene views. This can happen when:
@@ -36,7 +36,7 @@ To fix this:
Sometimes when you enable [Temporal antialiasing (TAA)](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@10.1/manual/Anti-Aliasing.html?q=anti#TAA), some GameObjects appear to jitter.
-
+
Jittering can happen when both of the following conditions are met:
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Displacement-Mode.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Displacement-Mode.md
index 6db1b51299a..cc23b985c11 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Displacement-Mode.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Displacement-Mode.md
@@ -54,5 +54,5 @@ The options in the **Displacement Mode** drop-down change depending on the Shade
| **- Min** | Set the minimum value in the **Height Map**. |
| **- Max** | Set the maximum value in the **Height Map**. |
| **- Offset** | Set the offset that HDRP applies to the **Height Map**. |
-| **- Amplitude** | Set the amplitude of the **Height Map**. |
+| **- Amplitude** | Set the amplitude of the **Height Map** in centimeters. |
| **- Base** | Use the slider to set the base for the **Height Map**. |
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Dynamic-Resolution.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Dynamic-Resolution.md
index 4c764a0f016..0fcb062b722 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Dynamic-Resolution.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Dynamic-Resolution.md
@@ -13,7 +13,7 @@ When you enable dynamic resolution, HDRP allocates render targets to accommodate
HDRP always uses a software method to upscale the result. The method HDRP uses is defined in the **Upscale Filter**. You can select which upscaling method HDRP uses to do this. For more information, see [Choosing an upscale filter](#Choosing_Upscale_Filter).
-
+
## Using dynamic resolution
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Frame-Settings-API.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Frame-Settings-API.md
index 520c8ef6fc3..7314e89ca90 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Frame-Settings-API.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Frame-Settings-API.md
@@ -82,8 +82,6 @@ In the override mask, to allow you to easily access the bit for a given Frame Se
The following example shows how to compare the `humanizedData` from a rendering component's override mask with the rendering component's custom Frame Settings. There are some custom Frame Settings set, but the mask is all zeros which means that this rendering component uses the default Frame Settings.
-
-
## FrameSettings Scripting API
This is a structure that contains information on how a rendering component should render the Scene. For more information about this structure and the API it contains, see [FrameSettings](xref:UnityEngine.Rendering.HighDefinition.FrameSettings).
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDR-Output.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDR-Output.md
index 66ff54a9c7a..b02f7127c5c 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDR-Output.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDR-Output.md
@@ -13,7 +13,7 @@ To activate HDR output, navigate to **Project Settings > Player** > **Other Sett
> **Note**: Only enable **Use HDR Display Output** if you need the main display to use HDR Output.
-HDR Output will be active only in Game View and in Player. Currently the feature is not working on DirectX 11 on PC, please use DirectX 12 to make use of it.
+HDR Output is only active in the Player when using DirectX 11, and both in the Player and Game View when using DirectX 12.
## HDR tonemapping in HDRP
@@ -91,5 +91,3 @@ HDRP only supports HDR Output on the following platforms:
* HDRP Supported Devices that use Metal
* Consoles
* XR devices with HDR support
-
-> **Note**: DirectX 11 only supports HDR Output in the Player, it does not support HDR Output in the Editor.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Custom_Pass_DrawRenderers_Error.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Custom_Pass_DrawRenderers_Error.png
deleted file mode 100644
index d9fae9d23d0..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Custom_Pass_DrawRenderers_Error.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Custom_Pass_HDRPAsset_CBF.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Custom_Pass_HDRPAsset_CBF.png
deleted file mode 100644
index d47a4c5d277..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Custom_Pass_HDRPAsset_CBF.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/FrameSettingsAPI-watch.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/FrameSettingsAPI-watch.png
deleted file mode 100644
index 6a75cae8018..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/FrameSettingsAPI-watch.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/HDRP-frame-graph-diagram.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/HDRP-frame-graph-diagram.png
index c57a54eb9bf..43aa99d3dfd 100644
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/HDRP-frame-graph-diagram.png and b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/HDRP-frame-graph-diagram.png differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Override-GradientSky1.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Override-GradientSky1.png
deleted file mode 100644
index 286f201a483..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Override-GradientSky1.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Override-Shadows1.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Override-Shadows1.png
deleted file mode 100644
index cfb28cf0d47..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Override-Shadows1.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Post-processingColorCurves1.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Post-processingColorCurves1.png
deleted file mode 100644
index 5460948016f..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Post-processingColorCurves1.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Post-processingLiftGammaGain1.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Post-processingLiftGammaGain1.png
deleted file mode 100644
index e75ffcd1d9f..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/Post-processingLiftGammaGain1.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RayTracingGettingStarted1.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RayTracingGettingStarted1.png
deleted file mode 100644
index 1a78b8a8d84..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RayTracingGettingStarted1.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RaytracingQualityNode.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RaytracingQualityNode.png
deleted file mode 100644
index 7e23fc5f6f4..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RaytracingQualityNode.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RenderPipelineWizard1.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RenderPipelineWizard1.png
deleted file mode 100644
index d7a58b6e408..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RenderPipelineWizard1.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RenderPipelineWizardDXRTab.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RenderPipelineWizardDXRTab.png
deleted file mode 100644
index a1bd33739b5..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RenderPipelineWizardDXRTab.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RenderPipelineWizardHDRPTab.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RenderPipelineWizardHDRPTab.png
deleted file mode 100644
index 879ada6e923..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RenderPipelineWizardHDRPTab.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RenderPipelineWizardVRTab.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RenderPipelineWizardVRTab.png
deleted file mode 100644
index da7121a7eb3..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RenderPipelineWizardVRTab.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RendererAndMaterialPriority1.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RendererAndMaterialPriority1.png
deleted file mode 100644
index d1c4cd4007a..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RendererAndMaterialPriority1.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RendererAndMaterialPriority5.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RendererAndMaterialPriority5.png
deleted file mode 100644
index 07d2f1a2abd..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/RendererAndMaterialPriority5.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SampleWaterVFX.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SampleWaterVFX.png
deleted file mode 100644
index 90c06d035f2..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SampleWaterVFX.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/UpgradingToHDRP2.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/UpgradingToHDRP2.png
deleted file mode 100644
index 77d676e640c..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/UpgradingToHDRP2.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/custom-material-inspector-ui-blocks.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/custom-material-inspector-ui-blocks.png
deleted file mode 100644
index 16426fc5258..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/custom-material-inspector-ui-blocks.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/shader-variants.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/shader-variants.png
deleted file mode 100644
index d1179cfdec5..00000000000
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/shader-variants.png and /dev/null differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/watersystem-deformer.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/watersystem-deformer.png
index 7b3a640fb94..ae9a1c85d24 100644
Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/watersystem-deformer.png and b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/watersystem-deformer.png differ
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/LUT-Authoring-Photoshop.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/LUT-Authoring-Photoshop.md
index e4150c5cf68..557f2772c50 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/LUT-Authoring-Photoshop.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/LUT-Authoring-Photoshop.md
@@ -19,7 +19,7 @@ Export the current [Camera](hdrp-camera-component-reference.md)'s view to a log-
2. Select **Edit** > **Rendering** > **Render Selected HDRP Camera to Log EXR** **(**or press **Ctrl+Shift+Alt+E)**.
3. Save the EXR file.
-
+
After you export the EXR file, transform the data from the format that Unity uses (Alexa LogC El.1000) to Linear RGB space, so that external software can use it. To do this, HDRP provides a set of ready-to-use transform LUTs. To get them:
@@ -32,7 +32,7 @@ After you export the EXR file, transform the data from the format that Unity use
Import the EXR file into Photoshop. Because it's a raw log-encoded file, it appears washed-out.
-
+
Add a non-destructive **Color Lookup** adjustment layer to your image (menu: **Layer** > **New Adjustment Layer** > **Color Lookup…**). Then, in the new layer's **Properties**:
@@ -40,13 +40,13 @@ Add a non-destructive **Color Lookup** adjustment layer to your image (menu: **L
- Set **Table Order** to **BGR**.
- In the **3DLUT File** drop-down, select **Load 3D LUT…** and select the Unity Log To Linear file to transform the LUT to a usable color space. This is in your Unity Project folder at Assets/HDRP Sample Content/Post-processing/Cube LUTs/.
-
+
## Step 4: Apply color grading
You can now start grading your image. Make sure you only do global color operations through the use of non-destructive adjustment layers. LUTs can't store local operators or any filters that affect neighboring pixels (such as blur), so these create visual artifacts. Place all the adjustment layers on top of the **Color Lookup** layer, unless you intend to grade directly on the Log input.
-
+
## Step 5: Export your work as a CUBE file
@@ -66,6 +66,6 @@ Unity automatically interprets the CUBE file as a usable Texture3D Asset. You ca
4. Assign your CUBE Texture to the **Lookup Texture** property.
5. Change the **Contribution** to set how much the **Lookup Texture** contributes to the Scene.
-
+
You can continue to color grade in Photoshop and override the previously saved CUBE file. Unity automatically updates the grading in the Scene with your changes.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/LUT-Authoring-Resolve.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/LUT-Authoring-Resolve.md
index c8615a98eec..e7c80724617 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/LUT-Authoring-Resolve.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/LUT-Authoring-Resolve.md
@@ -19,7 +19,7 @@ Export the current [Camera](hdrp-camera-component-reference.md)'s view to a log-
2. Select **Edit** > **Rendering** > **Render Selected HDRP Camera to Log EXR** **(**or press **Ctrl+Shift+Alt+E)**.
3. Save the EXR file.
-
+
After you export the EXR file, transform the data from the format that Unity uses (Alexa LogC El.1000) to Linear RGB space, so that external software can use it. To do this, HDRP provides a set of ready-to-use transform LUTs. To get them:
@@ -47,13 +47,13 @@ Now import the EXR into Resolve and apply the **Unity Log To Linear r1** LUT to
2. In the context menu, select **3D Lut**.
3. Select the **Unity Log To Linear r1** LUT.
-
+
## Step 4: Apply color grading
You can now start grading your image. Make sure you only do global color operations because LUTs can't store local operators or any filters that affect neighboring pixels (such as blur or sharpen).
-
+
## Step 5: Export your work as a CUBE file
@@ -73,6 +73,6 @@ Unity automatically interprets the CUBE file as a usable Texture3D Asset. You ca
4. Assign your CUBE Texture to the **Lookup Texture** property.
5. Change the **Contribution** to set how much the **Lookup Texture** contributes to the Scene.
-
+
You can continue to color grade in Resolve and override the previously saved CUBE file. Unity automatically updates the grading in the Scene with your changes.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Override-Screen-Space-GI.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Override-Screen-Space-GI.md
index dcfbedd1298..1a07e544424 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Override-Screen-Space-GI.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Override-Screen-Space-GI.md
@@ -6,7 +6,7 @@ HDRP implements [ray-traced global illumination](Ray-Traced-Global-Illumination.
SSGI and RTGI replace all [lightmap](https://docs.unity3d.com/Manual/Lightmapping.html) and [Light Probe](https://docs.unity3d.com/Manual/LightProbes.html) data. If you enable this override on a Volume that affects the Camera, Light Probes and the ambient probe stop contributing to lighting for GameObjects.
-
+
## Enable Screen Space Global Illumination
[!include[](Snippets/Volume-Override-Enable-Override.md)]
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Override-Screen-Space-Reflection.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Override-Screen-Space-Reflection.md
index 9095d477047..22fd761d550 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Override-Screen-Space-Reflection.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Override-Screen-Space-Reflection.md
@@ -44,7 +44,8 @@ In the following example image, the car GameObject is in the center of the Camer
This example image uses **Speed From Reflected Surface** to accumulate the samples from the car and partially accumulate the samples from the sky. This makes the car and its reflection appear green, and the surface that reflects the sky appear orange.
-
+
+
## Limitations
### Screen-space reflection
@@ -53,7 +54,7 @@ To calculate SSR, HDRP reads a color buffer with a blurred mipmap generated duri
The color buffer only includes transparent GameObjects that use the **BeforeRefraction** [Rendering Pass](Surface-Type.md). However, HDRP incorrectly reflects a transparent GameObject using the depth of the surface behind it, even if you enable **Depth Write** in the GameObject's Material properties. This is because HDRP calculates SSR before it adds the depth of transparent GameObjects to the depth buffer.
-
+
If a transparent material has **Receive SSR Transparent** enabled, HDRP always uses the **Approximation** algorithm to calculate SSR, even you select **PBR Accumulation**.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Channel-Mixer.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Channel-Mixer.md
index e7f7f38eef9..c624ab64caf 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Channel-Mixer.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Channel-Mixer.md
@@ -13,7 +13,7 @@ The Channel Mixer effect modifies the influence of each input color channel on t
## Properties
-
+The Channel Mixer component has the following properties.
### Output channels
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Color-Curves.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Color-Curves.md
index c61bee9ba84..519186e8e13 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Color-Curves.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Color-Curves.md
@@ -13,8 +13,6 @@ Grading curves are an advanced way to adjust specific ranges in hue, saturation,
## Properties
-
-
| **Curve** | **Description** |
| -------------- | ------------------------------------------------------------ |
| **Master** | This curve affects the luminance across the whole image. The x-axis of the graph represents input luminance and the y-axis represents output luminance. You can use this to further adjust the appearance of basic attributes such as contrast and brightness across all color channels at the same time. |
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Lens-Distortion.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Lens-Distortion.md
index d6f20c488bb..b9775b7c866 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Lens-Distortion.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Lens-Distortion.md
@@ -13,8 +13,6 @@ The **Lens Distortion** effect distorts the final rendered picture to simulate t
## Properties
-
-
| **Property** | **Description** |
| ---------------- | ------------------------------------------------------------ |
| **Intensity** | Use the slider to set the overall strength of the distortion effect. |
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Lift-Gamma-Gain.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Lift-Gamma-Gain.md
index b955b368bd9..cc903028e4d 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Lift-Gamma-Gain.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Lift-Gamma-Gain.md
@@ -13,8 +13,6 @@ This effect allows you to perform three-way color grading. The **Lift Gamma Gain
## Properties
-
-
| **Property** | **Description** |
| ------------ | ------------------------------------------------------------ |
| **Lift** | Use this to control the dark tones. This has a more exaggerated effect on shadows. • Use the trackball to select which color HDRP should shift the hue of the dark tones to. •Use the slider to offset the color lightness of the trackball color. |
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Tonemapping.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Tonemapping.md
index ae993f53aca..aa740b15553 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Tonemapping.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Tonemapping.md
@@ -15,7 +15,7 @@ To use Tonemapping, refer to [High Dynamic Range (HDR) and tonemapping](HDR-Outp
## Properties
-
+
| **Property** | **Description** |
| --------------------- | ------------------------------------------------------------ |
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Traced-Ambient-Occlusion.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Traced-Ambient-Occlusion.md
index 527cdf92abd..26229ec3049 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Traced-Ambient-Occlusion.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Traced-Ambient-Occlusion.md
@@ -2,11 +2,11 @@
Ray-Traced Ambient Occlusion is a ray tracing feature in the High Definition Render Pipeline (HDRP). It is an alternative to HDRP's s [screen space ambient occlusion](Override-Ambient-Occlusion.md), with a more accurate ray-traced solution that can use off-screen data.
-
+
**Screen space ambient occlusion**
-
+
**Ray-traced ambient occlusion**
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Traced-Global-Illumination.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Traced-Global-Illumination.md
index a245b1cfe3a..32e00661871 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Traced-Global-Illumination.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Traced-Global-Illumination.md
@@ -2,11 +2,11 @@
Ray-Traced Global Illumination is a ray tracing feature in the High Definition Render Pipeline (HDRP). It's a more accurate alternative to [Screen Space Global Illumination](Override-Screen-Space-GI.md), Light Probes and lightmaps.
-
+
**Ray-Traced Global Illumination off**
-
+
**Ray-Traced Global Illumination on**
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Traced-Reflections.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Traced-Reflections.md
index 22a69bb3d5a..64ec340cf6b 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Traced-Reflections.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Traced-Reflections.md
@@ -2,11 +2,11 @@
Ray-Traced Reflections is a ray tracing feature in the High Definition Render Pipeline (HDRP). It's an alternative, more accurate, ray-traced solution to [Screen Space Reflection](Override-Screen-Space-Reflection.md) that can make use of off screen data.
-
+
Screen-space reflections
-
+
Ray-traced reflections
@@ -34,13 +34,16 @@ The same principle applies to the [StackLit Shader Graph](stacklit-master-stack-
For an example of a 75% smooth Lit material with different **Coat Mask** values, see the following images:
-
+
+
A Lit material with a Coat Mask value of 0.
-
+
+
A Lit material with a Coat Mask value of 0.1.
-
+
+
A Lit material with a Coat Mask value of 1.0.
## Properties
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Getting-Started.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Getting-Started.md
index 95df44b565b..dafd7e9b49b 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Getting-Started.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Getting-Started.md
@@ -56,9 +56,7 @@ HDRP enables DirextX12 by default. To enable DirectX 12 manually:
4. Unity uses Direct3d11 by default. To make Unity use Direct3d12, move **Direct3d12 (Experimental)** to the top of the list.
5. To apply the changes, you may need to restart the Unity Editor. If a window prompt appears telling you to restart the Editor, click **Restart Editor** in the window.
-The Unity Editor window should now include the <DX12> tag in the title bar:
-
-
+The Unity Editor window should now include the <DX12> tag in the title bar.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Light-Cluster.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Light-Cluster.md
index 7930ed2241f..8cb9d9685a5 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Light-Cluster.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Light-Cluster.md
@@ -6,7 +6,7 @@ To create a light cluster HDRP builds an axis-aligned grid which, in each cell,
In the rasterization rendering step, HDRP uses the tile structure for opaque objects and the cluster structure for transparent objects. The main difference between these two structures and this one used for ray tracing is that the light cluster structure is not based on the Camera frustum.
-
+
**Light Cluster [Debug Mode](Ray-Tracing-Debug.md#debug-modes)**
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Recursive-Rendering.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Recursive-Rendering.md
index c840766eebf..83f32b878a3 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Recursive-Rendering.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Recursive-Rendering.md
@@ -6,7 +6,7 @@ Rays ignore the smoothness of a Material when being reflected or refracted, whic
HDRP might display the sky color instead of a GameObject that has ray tracing applied. This happens when the GameObject is further away from the Camera than the Max Ray Length value set in the volume component. To make the GameObject appear correctly, increase the value of the Max Ray Length property.
-
+
**Car gear shift rendered with recursive ray tracing**
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Reflection-Proxy-Volume.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Reflection-Proxy-Volume.md
index 329adb65bf9..e71095e74ed 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Reflection-Proxy-Volume.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Reflection-Proxy-Volume.md
@@ -35,7 +35,7 @@ You can reuse the same Proxy Volume with other Reflection Probes, as long as the
Use the Scene view Gizmo to visually modify the size of the **Box** and **Sphere** shapes. Click and drag the handles to move the boundaries of the Proxy Volume.
-
+
## Properties
| **Property** | **Description**|
@@ -45,4 +45,4 @@ Use the Scene view Gizmo to visually modify the size of the **Box** and **Sphere
## Additional resources
- [Reflection in HDRP](Reflection-in-HDRP.md)
-- [Use the appropriate Proxy Volume in refraction](create-a-refractive-material.md#use-proxy-volume).
\ No newline at end of file
+- [Use the appropriate Proxy Volume in refraction](create-a-refractive-material.md#use-proxy-volume).
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Render-Pipeline-Wizard.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Render-Pipeline-Wizard.md
index f39f5927414..1ac76e1b877 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Render-Pipeline-Wizard.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Render-Pipeline-Wizard.md
@@ -4,8 +4,6 @@ The High Definition Render Pipeline (HDRP) includes the **HDRP Wizard** to help
To open the **Render Pipeline Wizard**, go to **Window > Rendering** and select **HDRP Wizard**.
-
-
## Packages
At the top of the window, there is an information text that shows you the currently installed version of HDRP. The **Package Manager** button provides a shortcut to the HDRP package in the Package Manager window.
@@ -36,8 +34,6 @@ Each configuration is separated into two scopes:
This section provides you with configuration options to help you make your Unity Project use HDRP.
-
-
#### Global
| **Configuration Option** | **Description** |
| -------------------------------- | ------------------------------------------------------------ |
@@ -64,51 +60,18 @@ This section provides you with configuration options to help you make your Unity
This section provides extra configuration options to help you set up your HDRP Project to support virtual reality. If you can't find an option in this section of the documentation, refer to the [HDRP section](#HDRPTab) options. This is only supported on Windows OS. You can adjust the extra configuration options in the **Global** scope.
-
-
-
-
-
-
Configuration Option
-
-
Description
-
-
-
-
-
Legacy VR System
-
-
Checks that Virtual Reality Supported is disabled. This is the deprecated system. Select the Fix button to disable Virtual Reality Supported.
-
-
-
XR Management Package
-
-
Checks that the XR Management Package is installed. Select the Fix button to install it.
-
-
-
-
Oculus Plugin
-
The wizard can't check this directly. This option gives information on the procedure to follow to check it. To install the plugin manually, go to Edit > Project Settings > XR Plugin Manager
-
-
-
-
Single-Pass Instancing
-
The wizard can't check this directly. This option gives information on the procedure to follow to check it. Go to Edit > Project Settings > XR Plugin Manager > Oculus and make sure Stereo Rendering Mode uses Single-Pass Instancing
-
-
-
XR Legacy Helpers Package
-
-
Checks that the XR Legacy Helpers Package is installed. It's required to handle inputs with the TrackedPoseDriver component. Select the Fix button to install it.
-
-
-
+| **Configuration Option** | **Suboption** | **Description** |
+|-------------------------------|----------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Legacy VR System** | | Checks that Virtual Reality Supported is disabled. This is the deprecated system. Select the Fix button to disable Virtual Reality Supported. |
+| **XR Management Package** | | Checks that the XR Management Package is installed. Select the Fix button to install it. |
+| **XR Management Package** | **Oculus Plugin** | The wizard can't check this directly. This option gives information on the procedure to follow to check it. To install the plugin manually, go to Edit > Project Settings > XR Plugin Manager |
+| **XR Management Package** | **Single-Pass Instancing** | The wizard can't check this directly. This option gives information on the procedure to follow to check it. Go to Edit > Project Settings > XR Plugin Manager > Oculus and make sure Stereo Rendering Mode uses Single-Pass Instancing |
+| **XR Legacy Helpers Package** | | Checks that the XR Legacy Helpers Package is installed. It's required to handle inputs with the TrackedPoseDriver component. Select the Fix button to install it. |
### HDRP + DXR
This section provides extra configuration options to help you set up your HDRP Project to support ray tracing. If you can't find an option in this section of the documentation, refer to the [HDRP tab](#HDRPTab) options. This is only supported on Windows OS.
-
-
**Note**: Every **Fix** will be disabled if your hardware or OS doesn't support DXR.
#### Global
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Renderer-And-Material-Priority.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Renderer-And-Material-Priority.md
index 5dbbd6c45c6..459ec3c59fb 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Renderer-And-Material-Priority.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Renderer-And-Material-Priority.md
@@ -18,24 +18,21 @@ The resulting queue is a list of GameObjects that are first sorted by their Mate
Materials with a **Transparent Surface Type** have a **Sorting Priority** property that you can use to sort groups of Meshes that use different Materials. This property is an integer value clamped between -50 and 50.
-
-
HDRP supports negative values so that you can easily assign new Materials to the lowest priority. This is helpful if you want to assign a new Material to the lowest priority when the lowest priority is already being used for another Material. In this case, you can just assign the new Material’s priority to a negative value, instead of increasing every other Material’s sorting priority by one to accommodate the new Material.
HDRP uses the **Sorting Priority** to sort GameObjects that use different Materials in your Scene. HDRP renders Materials with lower **Sorting Priority** values first. This means that Meshes using Materials with a higher **Sorting Priority** value appear in front of those using Materials with lower ones, even if Meshes using the first Material are further away from the Camera.
-For example, the following Scene includes two spheres (**Sphere 1** and **Sphere 2**) that use two different Materials. As you can see, **Sphere 1** is closer to the **Camera** than **Sphere 2**.
-
+For example, the following Scene includes two spheres (**Sphere 1** and **Sphere 2**) that use two different Materials. **Sphere 1** is closer to the **Camera** than **Sphere 2**.
-
+
When the **Sort Priority** of each Material is the same, HDRP treats them with equal importance, and bases the rendering order on the Material's distance from the Camera. In this example, the **Sort Priority** of both Materials is set to **0**, so HDRP renders them in the order defined by their distance from the Camera, which means **Sphere 1** appears in front of **Sphere 2**.
-
+
When the **Sort Priority** properties of different Materials aren't the same, HDRP displays Meshes using Materials with a higher priority in front of those using Materials with a lower priority. To achieve this, HDRP draws Meshes using lower priority Materials first and draws Meshes using the higher priority Materials later, on top of the Meshes it’s already drawn. In the example, setting the **Sort Priority** of **Sphere 2** to **1** means that HDRP renders **Sphere 1** first, then renders **Sphere 2** (drawing it over **Sphere 1**). This makes **Sphere 2** appear in front of **Sphere 1**, despite **Sphere 1** being closer to the **Camera**.
-
+
**Note**: When you enable **Depth Write** on the material, the **Sort Priority** is ignored. This is because the **Depth Test** performed in the Shader overwrites the **Sort Priority** of the material.
@@ -45,8 +42,6 @@ When the **Sort Priority** properties of different Materials aren't the same, HD
Mesh Renderers have a **Priority** property to sort Renderers using the same Material in your Scene.
-
-
To modify the render order for GameObjects using the same Material, use the **Priority** property in the Mesh Renderer’s Inspector. The **Priority** is a per-Renderer property that allows you to influence the rendering order for Renderers in your Scene.
HDRP displays Renderers with higher **Priority** values in front of those with lower **Priority** values.
@@ -55,15 +50,15 @@ You can also edit the Renderer **Priority** for Mesh Renderers in scripts by set
## Example usage
-The following Scene includes two spheres (**Sphere 1** and **Sphere 2**) that use the same Material. As you can see, **Sphere 1** is closer to the **Camera** than **Sphere 2**.
+The following Scene includes two spheres (**Sphere 1** and **Sphere 2**) that use the same Material. **Sphere 1** is closer to the **Camera** than **Sphere 2**.
-
+
When the Renderer **Priority** of each Mesh Renderer is the same, HDRP treats them with equal importance, and bases the rendering order on each Mesh Renderer’s distance from the Camera. In this example, the Renderer **Priority** of both Mesh Renderers is set to **0**, so HDRP renders them in the order defined by their distance from the Camera, which means **Sphere 1** appears in front of **Sphere 2**.
-
+
When the **Renderer Priority** properties of different Mesh Renderers aren't the same, HDRP displays Mesh Renderers with a higher priority in front of those with a lower priority. To achieve this, HDRP draws lower priority Meshes first and then draws higher priority Meshes on top of the Meshes it’s already drawn. In the example, setting the **Renderer Priority** of **Sphere 2** to **1** means that HDRP renders **Sphere 1** first, then renders **Sphere 2** (drawing it over **Sphere 1**). This makes **Sphere 2** appear in front of **Sphere 1** despite **Sphere 1** being closer to the **Camera**.
-
+
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/SGNode-Raytracing-Quality.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/SGNode-Raytracing-Quality.md
index d3124b3495f..7464c643089 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/SGNode-Raytracing-Quality.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/SGNode-Raytracing-Quality.md
@@ -15,8 +15,6 @@ To add the Raytracing Quality Keyword node to the graph:
To use this keyword in the graph, you need to create a [Keyword Node](https://docs.unity3d.com/Packages/com.unity.shadergraph@latest/index.html?subfolder=/manual/Keyword-Node.html). To do this, drag the **Raytracing Quality** Keyword node from the Blackboard to the graph or open the Create Node Menu and search for **Raytracing Quality** .
-
-
### Available Ports
| Name | Direction | Type | Description |
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md
index ea38b46f2a5..2b721cd1d73 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md
@@ -207,7 +207,7 @@
* [Quality and performance decisions](water-quality-and-performance-decisions.md)
* [Water Override for Volumes](water-the-water-system-volume-override.md)
* [Water surface fluctuations](water-decals-masking-landing.md)
- * [Enable mask and water decals](enable-mask-and-water-decals.md)
+ * [Enable mask and current water decals](enable-mask-and-current-water-decals.md)
* [Configure swell, agitation, or ripples](add-swell-agitation-or-ripples.md)
* [Simulating currents with water decals](simulating-currents-with-water-decals.md)
* [Simulating ripples with masks](simulating-foam-or-ripples-with-masks.md)
@@ -222,7 +222,7 @@
* [Add foam with a script](add-foam-with-script.md)
* [Customize caustics in the water system](water-caustics-in-the-water-system.md)
* [Create a current in the Water System](water-create-a-current-in-the-water-system.md)
- * [Deform a water surface](water-deform-a-water-surface.md)
+ * [Deform a water surface vertically](water-deform-a-water-surface.md)
* [Exclude part of a water surface](water-exclude-part-of-the-water-surface.md)
* [Underwater view](water-underwater-view.md)
* [Materials in the Water System](water-materials-in-the-water-system.md)
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/arnold-standard-surface-material-inspector-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/arnold-standard-surface-material-inspector-reference.md
index b0bb90f5929..b063030fef3 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/arnold-standard-surface-material-inspector-reference.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/arnold-standard-surface-material-inspector-reference.md
@@ -4,11 +4,12 @@ The Arnold Standard Surface shader replicates the Arnold Standard Surface shader
**Note**: There are slight differences between what you see in Autodesk® Maya or Autodesk® 3DsMax and what you see in HDRP and HDRP doesn't support some material features.
-
+
Arnold Standard materials seen in **Autodesk® Maya** viewport.
-
+
+
The same materials imported from FBX seen in Unity.
Note that the HDRP implementation of this shader uses a Shader Graph.
@@ -30,8 +31,8 @@ When Unity imports an FBX with a compatible Arnold shader, it automatically crea
Property
-
-
+
Option
+
Suboption
Description
@@ -98,6 +99,7 @@ When Unity imports an FBX with a compatible Arnold shader, it automatically crea
Property
+
Option
Description
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/convert-from-built-in-convert-lighting-and-shadows.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/convert-from-built-in-convert-lighting-and-shadows.md
index c9aa3bcb095..3ffb6891bdb 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/convert-from-built-in-convert-lighting-and-shadows.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/convert-from-built-in-convert-lighting-and-shadows.md
@@ -43,8 +43,6 @@ To set up lighting in your HDRP Project:
3. Enable **Mode** and set it to **Automatic**.
4. To refresh the exposure, go to the Scene view and enable **Always Refresh**.
- 
-
7. HDRP supports colored light cookies, whereas the Built-in light cookies use a texture format that only contains alpha. To correct the Light cookie:
1. In the Project window, select your Light cookie from your **Assets** folder.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-a-refractive-material.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-a-refractive-material.md
index fe4079d38d4..0b4bd73bddc 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-a-refractive-material.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-a-refractive-material.md
@@ -85,7 +85,7 @@ To set color tint and light absorption:
- In a Material's settings, in the [Transparency Inputs](Surface-Type.md#transparency-inputs) section, set **Transmission Color** and **Absorption Distance**.
- In Shader Graph, in the Master Stack, input a value or map into **Absorption Distance** and **Transmittance Color**.
-
+
Materials with the same transmission color but decreasing absorption distance from left to right.
@@ -98,8 +98,7 @@ To set smoothness:
- In a Material's settings, in the [Surface Inputs](Surface-Type.md) section, set **Smoothness**.
- In Shader Graph, in the Master Stack, set **Smoothness**.
-
-Materials with decreasing smoothness from left to right.
+
### Add a Reflection Probe
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-an-hdri-sky.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-an-hdri-sky.md
index 7114b93b2da..3626d7c5f40 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-an-hdri-sky.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-an-hdri-sky.md
@@ -1,6 +1,6 @@
# Create an HDRI sky
-A High-dynamic-range imaging (HDRI) Sky is a simple sky representation that uses a cubemap texture. You can define how HDRP updates the indirect lighting the sky generates in the Scene.
+A High-dynamic-range imaging (HDRI) Sky is a simple sky representation that uses a cubemap texture. You can define how HDRP updates the indirect lighting the sky generates in the Scene. For information on supported cubemap layouts, refer to [Create a cubemap](xref:um-class-cubemap-create).
Tip: [Unity HDRI Pack](https://assetstore.unity.com/packages/essentials/beta-projects/unity-hdri-pack-72511) is available on the Unity Asset Store and provides 7 pre-converted HDR Cubemaps ready for use within your Project.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-realistic-clouds-volumetric-clouds.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-realistic-clouds-volumetric-clouds.md
index 7ad835a9d3f..fa6524bdeee 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-realistic-clouds-volumetric-clouds.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-realistic-clouds-volumetric-clouds.md
@@ -23,7 +23,7 @@ The [**Volumetric Clouds** Volume component override](volumetric-clouds-volume-o
**Note**: When editing Volumetric Cloud properties in the Editor, set **Temporal Accumulation Factor** to a lower value. This allows you to see changes instantly, rather than blended over time.
-
+
Refer to the [Volumetric Clouds Volume Override reference](volumetric-clouds-volume-override-reference.md) for more information.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-simple-clouds-cloud-layer.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-simple-clouds-cloud-layer.md
index a8c2020b13a..4ce8a8f5da3 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-simple-clouds-cloud-layer.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-simple-clouds-cloud-layer.md
@@ -4,7 +4,7 @@ A Cloud Layer is a simple representation of clouds in the High Definition Render
Refer to [Understand clouds](understand-clouds.md) for more information about clouds in the High Definition Render Pipeline (HDRP).
-
+
## Using the Cloud Layer
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-material-inspectors.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-material-inspectors.md
index ac90208d53b..b2b2e65329f 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-material-inspectors.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-material-inspectors.md
@@ -8,8 +8,6 @@ This page contains information about how to create custom Material Inspectors in
Material Inspectors for most HDRP shaders are made of UI blocks. A UI block is a foldable section that contains a named group of properties. For example, **Surface Options** and **Surface Inputs** in the below image are both UI blocks.
-
-
The order of the UI block list defines the display order of the UI blocks in the Inspector. The first item in the list renders at the top and the last item renders at the bottom.
For information about how to create UI Blocks for a custom Material Inspector, see [UI blocks](#creating-ui-blocks).
@@ -69,11 +67,11 @@ This code sample fetches the `_MyColor` property in the shader and displays it.
2. Select the property to display and view it in the Node Settings tab of the Graph Inspector.
3. Set **Reference** to the name `FindProperty` uses. In this example, it's **_MyColor**.
-
+
The following image shows how the Inspector looks for the UI block in the code sample.
-
+
#### Implementing a foldout section
@@ -117,7 +115,7 @@ You can also hardcode the bit in a UI block but this isn't best practice especia
The following image shows how the Inspector looks for the UI block in the above code sample.
-
+
#### Block cross-reference
@@ -165,7 +163,7 @@ public class LightingInspectorExample : LightingShaderGraphGUI
This code sample produces the following Inspector:
-
+
### Custom Unlit Material Inspector
@@ -192,7 +190,7 @@ public class UnlitExampleGUI : UnlitShaderGraphGUI
This code sample produces the following Inspector:
-
+
### Custom Decal Material Inspector
@@ -220,7 +218,7 @@ public class DecalGUIExample : DecalShaderGraphGUI
This code sample produces the following Inspector:
-
+
### Bespoke Material Inspector
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-pass-create-gameobject.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-pass-create-gameobject.md
index f817fbe5af7..ae94a5da675 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-pass-create-gameobject.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-pass-create-gameobject.md
@@ -135,7 +135,7 @@ To create and assign a full-screen shader graph that reads from the custom colo
This copies the contents of the custom color buffer to the Camera color buffer.
-
+
For more information on how to modify a full-screen Shader Graph, see the [full-screen master stack](fullscreen-master-stack-reference.md).
@@ -153,9 +153,7 @@ For information about the Draw renderers Custom Pass properties, refer to [Custo
Unity uses the **Pass Name** to select which pass of the shader it renders on an HDRP material. To render the object color, select **Forward** or **ForwardOnly**. Use the **DepthForwardOnly** Pass Name if you only want to render the depth of the object.
-If you see the following warning when you create a new draw renderers CustomPass, this might be due to your [HDRP Asset](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@10.3/manual/HDRP-Asset.md) settings:
-
-
+If you see the "Your HDRP settings do not support ForwardOnly, some objects might not render." warning when you create a new draw renderers CustomPass, this might be due to your [HDRP Asset](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@10.3/manual/HDRP-Asset.md) settings.
To fix this, navigate to your HDRP Asset in your **Assets** folder (if you are using the HDRP template, this is in **Assets > Settings**) and change the **Lit Shader Mode** to **Both**. For more information, see [Changing the depth of a renderer in your scene](#Changing-Renderer-Depth).
@@ -168,13 +166,14 @@ To fix this, navigate to your HDRP Asset in your **Assets** folder (if you are u
However, HDRP does not support all materials in every injection point in a draw renderers Custom Pass. The following table lists the materials that each injection point supports for a draw renderers Custom Pass:
| **Injection Point** | **Material Type(s) supported** |
-| ----------------------------- | ----------------------------------------------------- |
+|-------------------------------|-------------------------------------------------------|
| Before Rendering | Unlit forward but without writing to the Camera color |
-| After Opaque Depth And Normal | Unlit |
-| Before PreRefraction | Unlit and Lit, `ForwardOnly` |
-| Before Transparent | Unlit and Lit, `ForwardOnly` |
-| Before Post Process | Unlit and Lit, `ForwardOnly` |
-| After Post Process | Unlit and Lit, `ForwardOnly` |
+| After Opaque Depth And Normal | Unlit |
+| After Opaque Color | Unlit and Lit, `ForwardOnly` |
+| Before PreRefraction | Unlit and Lit, `ForwardOnly` |
+| Before Transparent | Unlit and Lit, `ForwardOnly` |
+| Before Post Process | Unlit and Lit, `ForwardOnly` |
+| After Post Process | Unlit and Lit, `ForwardOnly` |
**Note**: You can only render Materials that use refraction at the Before Transparent, Before Post Process, and After Post Process injection points because this is when the refraction color pyramid is available.
@@ -485,8 +484,6 @@ To change the buffer format of the Custom Pass component in your HDRP asset, go
| R11G11B10 | 32 | This format has a higher precision than R8G8B8A8 but does not support alpha channels. |
| R16G16B16A16 | 64 | This format has the highest precision but uses twice as much memory as R8G8B8A8 and R11G11B10. |
-
-
You can sample the custom color and depth buffers in Shader Graph using the following nodes:
- [Custom Color Node](https://docs.unity3d.com/Packages/com.unity.shadergraph@latest?subfolder=/manual/HD-Custom-Color-Node.html)
- [Custom Depth Node](https://docs.unity3d.com/Packages/com.unity.shadergraph@latest?subfolder=/manual/HD-Custom-Depth-Node.html)
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-post-processing-scripts.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-post-processing-scripts.md
index 4e0d0351a64..c8debeed51c 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-post-processing-scripts.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-post-processing-scripts.md
@@ -61,7 +61,7 @@ This example code uses a `ClampedFloatParameter` that you can clamp to a range.
- The second parameter represents the minimum value to clamp the property to.
- The third parameter represents the maximum value to clamp the property to.
-HDRP calls the `IsActive()` function before the `Render` function to process the effect. If this function returns `false`, HDRP doesn't process the effect. It's good practice to check every property configuration where the effect either breaks or doesn'thing. In this example, `IsActive()` makes sure that HDRP can find the `GrayScale.shader` and that the intensity is greater than 0.
+HDRP calls the `IsActive()` function before the `Render` function to process the effect. If this function returns `false`, HDRP doesn't process the effect. It's good practice to check every property configuration where the effect either breaks or doesn't do anything. In this example, `IsActive()` makes sure that HDRP can find the `GrayScale.shader` and that the intensity is greater than 0.
The **injectionPoint** override allows you to specify where in the pipeline HDRP executes the effect. Choose from the following injection points:
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/decal-material-inspector-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/decal-material-inspector-reference.md
index a135f9a3f47..65b1846ef88 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/decal-material-inspector-reference.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/decal-material-inspector-reference.md
@@ -1,6 +1,6 @@
# Decal Material Inspector reference
-
+
You can modify the properties of a Decal material in the Decal Material Inspector.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/decal-projector-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/decal-projector-reference.md
index cc3a32db9e3..b2ae3dc79f0 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/decal-projector-reference.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/decal-projector-reference.md
@@ -10,15 +10,15 @@ The Decal Projector includes a Scene view representation of its bounds and proje
* An arrow that indicates the direction the projector faces. The base of this arrow is on the pivot point.
-
+
The decal Projector also includes three gizmos. The first two add handles on every face for you to click and drag to alter the size of the projector's bounds.
|**Button**|**Gizmo**|**Description**|
|-----|-----|-----|
-||**Scale**|Scales the decal with the projector box. This changes the UVs of the Material to match the size of the projector box. This stretches the decal. The Pivot remains still.|
-||**Crop**|Crops the decal with the projector box. This changes the size of the projector box but not the UVs of the Material. This crops the decal. The Pivot remains still.|
-||**Pivot / UV**|Moves the decal's pivot point without moving the projection box. This changes the transform position. Note this also sets the UV used on the projected texture.|
+||**Scale**|Scales the decal with the projector box. This changes the UVs of the Material to match the size of the projector box. This stretches the decal. The Pivot remains still.|
+||**Crop**|Crops the decal with the projector box. This changes the size of the projector box but not the UVs of the Material. This crops the decal. The Pivot remains still.|
+||**Pivot / UV**|Moves the decal's pivot point without moving the projection box. This changes the transform position. Note this also sets the UV used on the projected texture.|
The color of the gizmos can be set up in the Preference window inside Color panel.
@@ -28,8 +28,6 @@ Using the Inspector allows you to change all of the Decal Projector properties,
## Properties
-
-
| **Property** | **Description** |
| ----------------------- | ------------------------------------------------------------ |
| **Scale Mode** | The scaling mode to apply to decals that use this Decal Projector. The options are: • **Scale Invariant**: Ignores the transformation hierarchy and uses the scale values in this component directly. • **Inherit from Hierarchy**: Multiplies the [lossy scale](https://docs.unity3d.com/ScriptReference/Transform-lossyScale.html) of the Transform with the Decal Projector's own scale then applies this to the decal. Note that since the Decal Projector uses orthogonal projection, if the transformation hierarchy is [skewed](https://docs.unity3d.com/Manual/class-Transform.html), the decal does not scale correctly. |
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/deep-learning-super-sampling-in-hdrp.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/deep-learning-super-sampling-in-hdrp.md
index 2060e0af1db..ef76e23a274 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/deep-learning-super-sampling-in-hdrp.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/deep-learning-super-sampling-in-hdrp.md
@@ -78,10 +78,9 @@ If you need a specific custom mip bias for a Texture, create a custom sampler th
Out = SAMPLE_TEXTURE2D_BIAS(TextureInput, SamplerInput, UV, MipBias);
```
-
+
-
-
+
## Additional resources
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/enable-mask-and-water-decals.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/enable-mask-and-current-water-decals.md
similarity index 66%
rename from Packages/com.unity.render-pipelines.high-definition/Documentation~/enable-mask-and-water-decals.md
rename to Packages/com.unity.render-pipelines.high-definition/Documentation~/enable-mask-and-current-water-decals.md
index 3e61be290e2..0a0ddea964b 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/enable-mask-and-water-decals.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/enable-mask-and-current-water-decals.md
@@ -1,6 +1,6 @@
-# Enable mask and water decals
+# Enable mask and current water decals
-To enable mask and water decals, follow these steps:
+To enable mask and current water decals, follow these steps:
1. Go to **Edit** > **Project Settings**.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/fullscreen-master-stack-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/fullscreen-master-stack-reference.md
index 283234a4404..2bb33f7fab9 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/fullscreen-master-stack-reference.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/fullscreen-master-stack-reference.md
@@ -2,7 +2,7 @@
You can modify the properties of a Fullscreen Shader Graph in the Fullscreen Master Stack.
-
+
A full-screen shader that applies a raindrop effect to the screen.
@@ -76,40 +76,23 @@ The properties in the Graph Settings window control the overall appearance of th
### Surface Options
-| **Property** | **Description** | |
-| --------------------------- | ------------------------------------------------------------ | ------------------------------------------------------------ |
-| **Allow Override Material** | Exposes the Graph Settings properties in the Material’s **Surface Options**. **Note:** You can only expose properties that you enable in **Graph Settings.** If you enable one of these properties, you can’t disable it in the Inspector under the Material’s **Surface Options.** | |
-| **Blend Mode** | Specifies the blend mode to use when Unity renders the full-screen shader. Each option has an equivalent [`BlendMode`](https://docs.unity3d.com/ScriptReference/Rendering.BlendMode.html) operation. **Note**: When you write to a Blit shader, disable this property to avoid undesired effects. | |
-| | **Alpha** | Uses the shader’s alpha value to control its opacity. `BlendMode` operation: `Blend SrcAlpha OneMinusSrcAlpha` |
-| | **Premultiply** | Multiplies the RGB values of the transparent shader by its alpha value, then applies a similar effect to the shader as **Alpha**. `BlendMode` operation: `Blend One OneMinusSrcAlpha` |
-| | **Additive** | Adds the color values of the full-screen shader and the Camera output together. `BlendMode` operation: `Blend One One` |
-| | **Multiply** | Multiplies the color of the full-screen shader with the color of the Camera’s output. `BlendMode` operation: `Blend DstColor Zero` |
-| | **Custom** | Set every parameter of the blending equation manually. For more information, see [Custom blend modes](#custom-blend-modes). |
-| **Depth Test ** | Specifies the function this shader uses to perform the depth test. | |
-| | **Disabled** | Does not perform a depth test. |
-| | **Never** | The depth test never passes. |
-| | **Less** | The depth test passes if the pixel's depth value is less than the value stored in the depth buffer. |
-| | **Equal** | The depth test passes if the pixel's depth value is equal to the value stored in the depth buffer. |
-| | **Less Equal** | The depth test passes if the pixel's depth value is less than or equal to the value stored in the depth buffer. This renders the tested pixel in front of other pixels. |
-| | **Greater** | The depth test passes if the pixel's depth value is greater than the value stored in the depth buffer. |
-| | **Not Equal** | The depth test passes if the pixel's depth value is not equal to the value stored in the depth buffer. |
-| | **Greater Equal** | The depth test passes if the pixel's depth value is greater than or equal to the value stored in the depth buffer. |
-| | **Always** | The depth test always passes, and Unity does not compare the pixel's depth value to the value it has stored in the depth buffer. |
-| **Depth Write** | Indicates whether HDRP writes depth values for GameObjects that use this shader. Enable this property to write the depth value to the depth buffer and use a [depth Fragment](#graph-settings) block. | |
-| **Depth Write Mode** | Determines the depth value’s input format before Unity passes it to the depth buffer. This property determines which Depth block you can use in the [Fragment context](#fragment-context).This property appears when you enable **Depth Write**. | |
-| | **LinearEye** | Converts the depth value into a value scaled to world space. This new value represents the depth (in meters) from the near to the far plane of the Camera. |
-| | **Linear01** | Uses a linear depth value range between 0 and 1. |
-| | **Raw** | Does not convert the depth buffer value. Use this setting with a nonlinear depth value or when you directly sample the depth value from the depth buffer. |
-| Enable Stencil | This property gives you control over all stencil fields. See [Stencil properties](#stencil-properties) for information about the options that become available when you enable this property. | |
-| Custom Editor GUI | Accepts the full name of a C# class that inherits [`FullscreenShaderGUI`](https://docs.unity3d.com/Packages/com.unity.shadergraph@15.0/api/UnityEditor.Rendering.Fullscreen.ShaderGraph.FullscreenShaderGUI.html). For information on how to use a custom editor, see [ShaderLab: assigning a custom editor](https://docs.unity3d.com/2021.2/Documentation/Manual/SL-CustomEditor.html). | |
+| **Property** | **Description** |
+|-----------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Allow Override Material** | Exposes the Graph Settings properties in the Material's **Surface Options**. **Note:** You can only expose properties that you enable in **Graph Settings.** If you enable one of these properties, you can't disable it in the Inspector under the Material's **Surface Options.** |
+| **Blend Mode** | Specifies the blend mode to use when Unity renders the full-screen shader. Each option has an equivalent [`BlendMode`](https://docs.unity3d.com/ScriptReference/Rendering.BlendMode.html) operation. **Note**: When you write to a Blit shader, disable this property to avoid undesired effects.
Alpha: Uses the shader's alpha value to control its opacity. `BlendMode` operation: `Blend SrcAlpha OneMinusSrcAlpha`.
Premultiply: Multiplies the RGB values of the transparent shader by its alpha value, then applies a similar effect to the shader as Alpha. `BlendMode` operation: `Blend One OneMinusSrcAlpha`.
Additive: Adds the color values of the full-screen shader and the Camera output together. `BlendMode` operation: `Blend One One`.
Multiply: Multiplies the color of the full-screen shader with the color of the Camera's output. `BlendMode` operation: `Blend DstColor Zero`.
Custom: Set every parameter of the blending equation manually. For more information, see Custom blend modes.
|
+| **Depth Test** | Specifies the function this shader uses to perform the depth test.
Disabled: Does not perform a depth test.
Never: The depth test never passes.
Less: The depth test passes if the pixel's depth value is less than the value stored in the depth buffer.
Equal: The depth test passes if the pixel's depth value is equal to the value stored in the depth buffer.
Less Equal: The depth test passes if the pixel's depth value is less than or equal to the value stored in the depth buffer. This renders the tested pixel in front of other pixels.
Greater: The depth test passes if the pixel's depth value is greater than the value stored in the depth buffer.
Not Equal: The depth test passes if the pixel's depth value is not equal to the value stored in the depth buffer.
Greater Equal: The depth test passes if the pixel's depth value is greater than or equal to the value stored in the depth buffer.
Always: The depth test always passes, and Unity does not compare the pixel's depth value to the value it has stored in the depth buffer.
|
+| **Depth Write** | Indicates whether HDRP writes depth values for GameObjects that use this shader. Enable this property to write the depth value to the depth buffer and use a [depth Fragment](#graph-settings) block. |
+| **Depth Write Mode** | Determines the depth value's input format before Unity passes it to the depth buffer. This property determines which Depth block you can use in the [Fragment context](#fragment-context).This property appears when you enable **Depth Write**.
LinearEye: Converts the depth value into a value scaled to world space. This new value represents the depth (in meters) from the near to the far plane of the Camera.
Linear01: Uses a linear depth value range between 0 and 1.
Raw: Does not convert the depth buffer value. Use this setting with a nonlinear depth value or when you directly sample the depth value from the depth buffer.
|
+| Enable Stencil | This property gives you control over all stencil fields. See [Stencil properties](#stencil-properties) for information about the options that become available when you enable this property. |
+| Custom Editor GUI | Accepts the full name of a C# class that inherits [`FullscreenShaderGUI`](https://docs.unity3d.com/Packages/com.unity.shadergraph@15.0/api/UnityEditor.Rendering.Fullscreen.ShaderGraph.FullscreenShaderGUI.html). For information on how to use a custom editor, see [ShaderLab: assigning a custom editor](https://docs.unity3d.com/2021.2/Documentation/Manual/SL-CustomEditor.html). |
## Custom Blend Mode
-Use the **Custom** blend mode to create a blend mode different from those available in [Surface Options](#surface-options). To show these options, set **Blend Mode** to **Custom**. The Custom blend mode properties specify the blending operation to use for this full-screen shader’s alpha and color channels.
+Use the **Custom** blend mode to create a blend mode different from those available in [Surface Options](#surface-options). To show these options, set **Blend Mode** to **Custom**. The Custom blend mode properties specify the blending operation to use for this full-screen shader's alpha and color channels.
-In the blend mode properties, **Src** (source) refers to the full-screen shader itself. **Dst** (destination) refers to the Scene camera’s raw output, which this shader doesn't affect. The blending operation applies the source contents to the destination contents to produce a rendering result.
+In the blend mode properties, **Src** (source) refers to the full-screen shader itself. **Dst** (destination) refers to the Scene camera's raw output, which this shader doesn't affect. The blending operation applies the source contents to the destination contents to produce a rendering result.
For more information on the blending equation, see [ShaderLab command: Blend](https://docs.unity3d.com/Manual/SL-Blend.html).
@@ -139,37 +122,16 @@ Determines the blending equation HDRP uses for the alpha channel. Each setting d
These properties affect how this full-screen Shader Graph uses the stencil buffer. For more information on the stencil buffer, see [SL-Stencil](https://docs.unity3d.com/Manual/SL-Stencil.html).
-| **Property** | **Description** | |
-| ------------------- | ------------------------------------------------------------ | ------------------------------------------------------------ |
-| **Show Only HDRP Bits** | Determines whether you can set a custom value for the **Reference**, **Read Mask** and **Write Mask** properties. | |
-| **Reference** | Determines the stencil reference value this shader uses for all stencil operations. When you disable **Show Only HDRP Bits,** you can set a custom value for this property. **Important:** If you override a bit HDRP has reserved (bits 0, 1, 2, 3, 4, and 5), it can cause rendering artefacts. | |
-| | **None** | Assigns no reference value. |
-| | **UserBit0** | Stencil bit number 6. |
-| | **UserBit1** | Stencil bit number 7. |
-| | **AllUserBits** | Stencil bit numbers 6 and 7. |
-| **Read Mask** | Determines which bits this shader can read during the stencil test. When you enable **Show Only HDRP Bits,** you can set a custom value for this property. **Important**: If you override a bit HDRP has reserved (bits 0,1,2,3,4, and 5), it can cause rendering artefacts. | |
-| | **None** | Assigns no reference value. |
-| | **UserBit0** | Stencil bit number 6. |
-| | **UserBit1** | Stencil bit number 7. |
-| | **AllUserBits** | Stencil bit numbers 6 and 7. |
-| **Write Mask ** | Determines which bits this shader can write to during the stencil test. When you enable **Show Only HDRP Bits,** you can set a custom value for this property. **Important**: If you override a bit HDRP has reserved (bits 0,1,2,3,4, and 5), it can cause rendering artefacts. | |
-| | **None** | Assigns no reference value. |
-| | **UserBit0** | Stencil bit number 6. |
-| | **UserBit1** | Stencil bit number 7. |
-| | **AllUserBits** | Stencil bit numbers 6 and 7. |
-| **Comparison** | Determines the comparison function this shader uses during the stencil test. | |
-| | **Disabled** | Does not perform a stencil test. |
-| | **Never** | The stencil test never passes. |
-| | **Less** | The stencil test passes if the pixel's depth value is less than the value stored in the depth buffer. |
-| | **Equal** | The stencil test passes if the pixel's depth value is equal to the value stored in the depth buffer. |
-| | **Less Equal** | The stencil test passes if the pixel's depth value is less than or equal to than the depth buffer value. This renders the tested pixel in front of other pixels. |
-| | **Greater** | The stencil test passes if the pixel's depth value is greater than the value stored in the depth buffer. |
-| | **Not Equal** | The stencil test passes if the pixel's depth value is not equal to the value stored in the depth buffer. |
-| | **Greater Equal** | The stencil test passes if the pixel's depth value is greater than or equal to the value stored in the depth buffer. |
-| | **Always** | The stencil test always passes,and Unity does not compare the pixel's depth value to the value it has stored in the depth buffer. |
-| **Pass** | Determines the operation this shader executes if the stencil test succeeds. For more information on this property’s options, see [pass and fail options](#stencil-pass-fail). | |
-| **Fail** | Determines the operation this shader executes if the stencil test fails. For more information on this property’s options, see [pass and fail options](#stencil-pass-fail). | |
-| **Depth Fail** | Determines the operation this shader executes if the depth test fails. This option has no effect if the depth test **Comparison** value is **Never** or **Disabled.** For more information on this property’s options, see [pass and fail options](#stencil-pass-fail). | |
+| **Property** | **Description** |
+|-------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Show Only HDRP Bits** | Determines whether you can set a custom value for the **Reference**, **Read Mask** and **Write Mask** properties. |
+| **Reference** | Determines the stencil reference value this shader uses for all stencil operations. When you disable **Show Only HDRP Bits,** you can set a custom value for this property. **Important:** If you override a bit HDRP has reserved (bits 0, 1, 2, 3, 4, and 5), it can cause rendering artefacts.
None: Assigns no reference value.
UserBit0: Stencil bit number 6.
UserBit1: Stencil bit number 7.
AllUserBits: Stencil bit numbers 6 and 7.
|
+| **Read Mask** | Determines which bits this shader can read during the stencil test. When you enable **Show Only HDRP Bits,** you can set a custom value for this property. **Important**: If you override a bit HDRP has reserved (bits 0,1,2,3,4, and 5), it can cause rendering artefacts.
None: Assigns no reference value.
UserBit0: Stencil bit number 6.
UserBit1: Stencil bit number 7.
AllUserBits: Stencil bit numbers 6 and 7.
|
+| **Write Mask ** | Determines which bits this shader can write to during the stencil test. When you enable **Show Only HDRP Bits,** you can set a custom value for this property. **Important**: If you override a bit HDRP has reserved (bits 0,1,2,3,4, and 5), it can cause rendering artefacts.
None: Assigns no reference value.
UserBit0: Stencil bit number 6.
UserBit1: Stencil bit number 7.
AllUserBits: Stencil bit numbers 6 and 7.
|
+| **Comparison** | Determines the comparison function this shader uses during the stencil test.
Disabled: Does not perform a stencil test.
Never: The stencil test never passes.
Less: The stencil test passes if the pixel's depth value is less than the value stored in the depth buffer.
Equal: The stencil test passes if the pixel's depth value is equal to the value stored in the depth buffer.
Less Equal: The stencil test passes if the pixel's depth value is less than or equal to the depth buffer value. This renders the tested pixel in front of other pixels.
Greater: The stencil test passes if the pixel's depth value is greater than the value stored in the depth buffer.
Not Equal: The stencil test passes if the pixel's depth value is not equal to the value stored in the depth buffer.
Greater Equal: The stencil test passes if the pixel's depth value is greater than or equal to the value stored in the depth buffer.
Always: The stencil test always passes, and Unity does not compare the pixel's depth value to the value it has stored in the depth buffer.
|
+| **Pass** | Determines the operation this shader executes if the stencil test succeeds. For more information on this property's options, see [pass and fail options](#stencil-pass-fail). |
+| **Fail** | Determines the operation this shader executes if the stencil test fails. For more information on this property's options, see [pass and fail options](#stencil-pass-fail). |
+| **Depth Fail** | Determines the operation this shader executes if the depth test fails. This option has no effect if the depth test **Comparison** value is **Never** or **Disabled.** For more information on this property's options, see [pass and fail options](#stencil-pass-fail). |
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/gradient-sky-volume-override-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/gradient-sky-volume-override-reference.md
index ae498dbc8bc..78250794bcc 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/gradient-sky-volume-override-reference.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/gradient-sky-volume-override-reference.md
@@ -1,61 +1,19 @@
# Gradient Sky Volume Override reference
-
-
The Gradient Sky Volume Override component exposes options that you can use to define how the High Definition Render Pipeline (HDRP) updates the indirect lighting the sky generates in the Scene.
[!include[](snippets/Volume-Override-Enable-Properties.md)]
Refer to [Create a gradient sky](create-a-gradient-sky.md) for more information.
-
-
-
-
Property
-
-
Description
-
-
-
-
-
Top
-
-
Use the color picker to select the color of the upper hemisphere of the sky.
-
-
-
Middle
-
-
Use the color picker to select the color of the horizon.
-
-
-
Bottom
-
-
Use the color picker to select the color of the lower hemisphere of the sky. This is below the horizon.
-
-
-
Gradient Diffusion
-
-
Set the size of the Middle property in the Skybox. Higher values make the gradient thinner, shrinking the size of the Middle section. Low values make the gradient thicker, increasing the size of the Middle section.
-
-
-
Intensity Mode
-
-
Use the drop-down to select the method that HDRP uses to calculate the sky intensity.
• Exposure: HDRP calculates intensity from an exposure value in EV100. • Multiplier: HDRP calculates intensity from a flat multiplier.
-
-
-
-
Exposure
-
Set the amount of light per unit area that HDRP applies to the HDRI Sky cubemap. This property only appears when you select Exposure from the Intensity Mode drop-down.
-
-
-
-
Multiplier
-
Set the multiplier for HDRP to apply to the Scene as environmental light. HDRP multiplies the environment light in your Scene by this value. This property only appears when you select Multiplier from the Intensity Mode drop-down.
-
-
-
Update Mode
-
-
Use the drop-down to set the rate at which HDRP updates the sky environment (using Ambient and Reflection Probes).
• On Changed: HDRP updates the sky environment when one of the sky properties changes. • On Demand: HDRP waits until you manually call for a sky environment update from a script. • Realtime: HDRP updates the sky environment at regular intervals defined by the Update Period.
-
-
-
+| **Property** | **Sub-property** | **Description** |
+|------------------------|------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Top** | N/A | Use the color picker to select the color of the upper hemisphere of the sky. |
+| **Middle** | N/A | Use the color picker to select the color of the horizon. |
+| **Bottom** | N/A | Use the color picker to select the color of the lower hemisphere of the sky. This is below the horizon. |
+| **Gradient Diffusion** | N/A | Set the size of the Middle property in the Skybox. Higher values make the gradient thinner, shrinking the size of the Middle section. Low values make the gradient thicker, increasing the size of the Middle section. |
+| **Intensity Mode** | N/A | Use the drop-down to select the method that HDRP uses to calculate the sky intensity.
Exposure: HDRP calculates intensity from an exposure value in EV100.
Multiplier: HDRP calculates intensity from a flat multiplier.
|
+| **Intensity Mode** | **Exposure** | Set the amount of light per unit area that HDRP applies to the HDRI Sky cubemap. This property only appears when you select Exposure from the Intensity Mode drop-down. |
+| **Intensity Mode** | **Multiplier** | Set the multiplier for HDRP to apply to the Scene as environmental light. HDRP multiplies the environment light in your Scene by this value. This property only appears when you select Multiplier from the Intensity Mode drop-down. |
+| **Update Mode** | N/A | Use the drop-down to set the rate at which HDRP updates the sky environment (using Ambient and Reflection Probes).
On Changed: HDRP updates the sky environment when one of the sky properties changes.
On Demand: HDRP waits until you manually call for a sky environment update from a script.
Realtime: HDRP updates the sky environment at regular intervals defined by the Update Period.
|
+
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/hair-master-stack-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/hair-master-stack-reference.md
index 60086f16791..41aab9c46fc 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/hair-master-stack-reference.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/hair-master-stack-reference.md
@@ -34,7 +34,7 @@ The Approximate model doesn’t automatically look realistic in every lighting s
The Approximate model is best for darker hair tones. For best results with lighter hair tones, use the Physical model.
-
+
@@ -61,7 +61,7 @@ Change the [**Base Color** block](hair-master-stack-reference.md#fragment-contex
The Physical Material Type is based on the [Marschner](http://www.graphics.stanford.edu/papers/hair/hair-sg03final.pdf) human hair fiber reflectance model.
-
+
@@ -84,7 +84,7 @@ The Scattering Mode options appear when you select the **Physical** material typ
| **Physical** | Physically simulates light transport through a volume of hair (multiple scattering). This feature is not available for public use yet. |
| **Approximate** | Estimates the appearance of light transport through a volume of hair (multiple scattering). This mode does not take into account how transmittance affects the way light travels and slows through a volume of hair. It also ignores the effect that a hair's roughness has on the spread of light. |
-
+
## Geometry Type
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/hdrp-scene-view-camera-settings-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/hdrp-scene-view-camera-settings-reference.md
index d2680be0fef..1ed2e3eae0f 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/hdrp-scene-view-camera-settings-reference.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/hdrp-scene-view-camera-settings-reference.md
@@ -6,29 +6,9 @@ For information on the Scene view Camera settings menu and how to use it, refer
## Properties
-
-
-
-
Property
-
Description
-
-
-
-
-
Camera Anti-aliasing
-
Specifies the method the Scene view Camera uses for post-process anti-aliasing. The options are: • No Anti-aliasing: This Camera can process MSAA but doesn't process any post-process anti-aliasing. • Fast Approximate Anti-aliasing (FXAA): Smooths edges on a per-pixel level. This is the least resource-intensive anti-aliasing technique in HDRP. • Temporal Anti-aliasing (TAA): Uses frames from a history buffer to smooth edges more effectively than fast approximate anti-aliasing. • Subpixel Morphological Anti-aliasing (SMAA): Finds patterns in borders of the image and blends the pixels on these borders according to the pattern.
-
-
-
Camera Stop NaNs
-
Makes the Scene view Camera replace values that aren't a number (NaN) with a black pixel. This stops certain effects from breaking but is a resource-intensive process.
-
-
-
Override Exposure
-
Specifies whether to override the scene's exposure with a specific value.
-
-
-
Scene Exposure
-
The exposure value the Scene view Camera uses to override the scene's exposure. This property only appears when you enable Override Exposure.
-
-
-
+| **Property** | **Description** |
+|--------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Camera Anti-aliasing** | Specifies the method the Scene view Camera uses for post-process anti-aliasing. The options are:
No Anti-aliasing: This Camera can process MSAA but doesn't process any post-process anti-aliasing.
Fast Approximate Anti-aliasing (FXAA): Smooths edges on a per-pixel level. This is the least resource-intensive anti-aliasing technique in HDRP.
Temporal Anti-aliasing (TAA): Uses frames from a history buffer to smooth edges more effectively than fast approximate anti-aliasing.
Subpixel Morphological Anti-aliasing (SMAA): Finds patterns in borders of the image and blends the pixels on these borders according to the pattern.
|
+| **Camera Stop NaNs** | Makes the Scene view Camera replace values that aren't a number (NaN) with a black pixel. This stops certain effects from breaking but is a resource-intensive process. |
+| **Override Exposure** | Specifies whether to override the scene's exposure with a specific value.
Scene Exposure: The exposure value the Scene view Camera uses to override the scene's exposure. This property only appears when you enable Override Exposure.
|
+
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/index.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/index.md
index afe17158768..050880fb16f 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/index.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/index.md
@@ -1,6 +1,6 @@
# High Definition Render Pipeline overview
-
+
The High Definition Render Pipeline (HDRP) is a high-fidelity Scriptable Render Pipeline built by Unity to target modern (Compute Shader compatible) platforms.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/layered-lit-material.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/layered-lit-material.md
index da537265156..1f76decce95 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/layered-lit-material.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/layered-lit-material.md
@@ -4,7 +4,7 @@ The Layered Lit Shader allows you to stack up to four Materials on the same Game
The Layered Lit Shader is perfect for photogrammetry. For a tutorial on how to use it in a photogrammetry workflow, see the [Photogrammetry with the Layered Shader Expert Guide]().
-
+
## Creating a Layered Lit Material
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/lit-material.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/lit-material.md
index 9ede05fa899..250c3f645e6 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/lit-material.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/lit-material.md
@@ -2,7 +2,7 @@
The Lit Shader and the Lit Master Stack lets you easily create realistic materials in the High Definition Render Pipeline (HDRP). They include options for effects like subsurface scattering, iridescence, vertex or pixel displacement, and decal compatibility. For more information about Materials, Shaders, and Textures, see the [Unity User Manual](https://docs.unity3d.com/Manual/Shaders.html).
-
+
@@ -10,8 +10,6 @@ The Lit Shader and the Lit Master Stack lets you easily create realistic materia
To create a new Lit Material, navigate to your Project's Asset window, right-click in the window and select **Create > Material**. This adds a new Material to your Unity Project’s Asset folder. When you create new Materials in HDRP, they use the Lit Shader by default.
-
-
Refer to [Lit Material Inspector reference](lit-material-inspector-reference.md) for more information.
## Creating a Lit Shader Graph
@@ -25,4 +23,4 @@ To create a Lit material in Shader Graph, use one of the following methods:
* Create a new Shader Graph. Go to **Assets** > **Create** > **Shader Graph** > **HDRP** and select **Lit Shader Graph**.
-Refer to [Lit Master Stack reference](lit-master-stack-reference.md) for more information.
\ No newline at end of file
+Refer to [Lit Master Stack reference](lit-master-stack-reference.md) for more information.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/lit-tessellation-material.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/lit-tessellation-material.md
index 00c7bfbe389..ad0702c1b02 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/lit-tessellation-material.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/lit-tessellation-material.md
@@ -2,11 +2,11 @@
The Lit Tessellation Shader allows you to create Materials that use tessellation to provide adaptive vertex density for meshes. This means that you can render more detailed geometry without the need to create a model that contains a lot of vertices. This Shader also includes options for effects like subsurface scattering, iridescence, vertex or pixel displacement, and decal compatibility. For more information about Materials, Shaders, and Textures, see the[ Unity User Manual](https://docs.unity3d.com/Manual/Shaders.html).
-
+
**Tessellation Mode** set to **None** (off).
-
+
**Tessellation Mode** set to **Phong** (on).
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/modify-materials-at-runtime.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/modify-materials-at-runtime.md
index 8a6aaf65250..da3bc995135 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/modify-materials-at-runtime.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/modify-materials-at-runtime.md
@@ -77,6 +77,4 @@ To make all Shader Variants you need available at runtime, you need to ensure Un
1. You can record the shader variants used during a play session and store them in a **Shader Variant Collection** asset. To do that, navigate to the Project Settings window, open the Graphics tab and select **Save to asset…** This will build a collection containing all Shader Variants currently in use and save them out as an asset. You must then add this asset to the list of Preloaded Shaders for the variants to be included in a build.
-
-
2. You can include at least one Material using each variant in your Assets folder. You must use this Material in a scene or place it in your Resources Folder, otherwise Unity ignores this Material when it builds the project.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/path-tracing-understand.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/path-tracing-understand.md
index d3c355a9660..7551b698ad0 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/path-tracing-understand.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/path-tracing-understand.md
@@ -6,11 +6,11 @@ It enables HDRP to compute various effects (such as hard or soft shadows, mirror
A notable downside to path tracing is noise. Noise results from the random nature of the path tracing process; at each surface interaction, a new direction is chosen randomly. Noise vanishes as more paths accumulate and converge toward a clean image. For more information about path tracing limitations in HDRP, see [Unsupported features of path tracing](Ray-Tracing-Getting-Started.md#unsupported-features-of-path-tracing).
-
+
Noisy image with **Maximum Samples** set to 1
-
+
Clean image with **Maximum Samples** set to 256
@@ -30,11 +30,11 @@ This is because path tracing in HDRP implements more precise light transport sim
The images below display the difference between transparent, double-sided materials in a rasterized and a path-traced scene:
-
+
GameObjects without path tracing (rasterized).
-
+
GameObjects with path tracing enabled.
@@ -46,11 +46,11 @@ When you use path tracing, the **Double-Sided** property (menu: **Inspector** >
The following images display the same GameObjects with a single-sided Material and a double-sided material:
-
+
GameObjects with a single-sided Material and path tracing enabled
-
+
GameObjects with a double-sided Material and path tracing enabled
@@ -62,7 +62,7 @@ Path tracing changes the way refraction models on a Lit Material behave.
To change the refraction model a Lit Material uses, in the **Transparency Inputs** section, select a model from the **Refraction model** dropdown, displayed in the following image:
-
+
The following table describes how each refraction model behaves when you enable path tracing:
@@ -72,11 +72,11 @@ The following table describes how each refraction model behaves when you enable
| **Thin** | A thin surface type with [infinitesimal]() thickness. Select this for thin, window-like surfaces. When you enable path tracing, the behavior of the **Thin** refraction model behaves the same as in rasterization. | This refraction model is compatible with a double-sided Material that has its **Normal mode** set to **Flip** or **Mirror**. |
| **None** | A thin, refractive surface hardcoded to be smooth to simulate alpha blending. When you enable path tracing, the behavior of the **None** refraction model behaves the same as in rasterization. | This refraction model is compatible with a double-sided Material that has its **Normal mode** set to **Flip** or **Mirror**. |
-
+
From left to right, a GameObject with **Sphere**, **Box,** and **Thin** mode without path tracing (rasterized).
-
+
From left to right, a GameObject with **Sphere**, **Box,** and **Thin** mode with path tracing enabled.
@@ -87,15 +87,15 @@ For [subsurface scattering's](skin-and-diffusive-surfaces-subsurface-scattering.
1. Open the **Surface Options** window.
2. Enable the **Double-Sided** property (B).
-
+
The following example images display a sheet of fabric lit from below by a point light. The first image shows a single-sided surface, and the second shows a double-sided surface:
-
+
A single-sided surface with Transmission disabled.
-
+
A double-sided surface with Transmission enabled.
@@ -179,4 +179,4 @@ public class exampleBehavior : MonoBehaviour
}
```
-This script allows you to render a first batch of images with 512 samples and a seed offset of zero by setting the Seed mode to custom. A second batch of images is then generated with the Seed offset set to 512 to ensure different random numbers. By doing this, the images are added together and the end result is more converged.
\ No newline at end of file
+This script allows you to render a first batch of images with 512 samples and a seed offset of zero by setting the Seed mode to custom. A second batch of images is then generated with the Seed offset set to 512 to ensure different random numbers. By doing this, the images are added together and the end result is more converged.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-concept.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-concept.md
index 797503a0456..10d43f02325 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-concept.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-concept.md
@@ -26,7 +26,7 @@ Adaptive Probe Volumes have the following advantages:

The car model is made up of separate GameObjects. The left scene uses Light Probe Groups, which use per-object lighting, so each part of the car samples a single blended probe value. The right scene uses Adaptive Probe Volumes, which use per-pixel lighting, so each part of the car samples its nearest probes. This image uses the ArchVizPRO Photostudio HDRP asset from the Unity Asset Store.
-
+
In the left scene, Reflection Probe Normalization is disabled. In the right scene, Reflection Probe Normalization is enabled, and there's less specular light leaking on the kitchen cabinet. This image uses the ArchVizPRO Interior Vol.5 HDRP asset from the Unity Asset Store.
Adaptive Probe Volumes have the following limitations:
@@ -42,7 +42,7 @@ HDRP uses bricks with different sizes to match the amount of geometry in differe
The default Light Probe spacing is 1, 3, 9, or 27 m.
-
+
In this screenshot from the Rendering Debugger, the small purple bricks contain Light Probes spaced 1 meter apart, to capture data from high-geometry areas. The large blue bricks contain Light Probes spaced 3 meters apart, to capture data from areas with less geometry.
Each pixel of a GameObject samples lighting data from the eight closest Light Probes around it.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-showandadjust.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-showandadjust.md
index eed706afcce..41f55e0dfc1 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-showandadjust.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-showandadjust.md
@@ -15,16 +15,20 @@ You can do the following:
If the Rendering Debugger displays invalid probes when you select **Display Probes**, refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md).
-
+
+
The Rendering Debugger with **Display Probes** enabled.
-
+
+
The Rendering Debugger with **Display Bricks** enabled.
-
+
+
The Rendering Debugger with **Display Cells** enabled.
-
+
+
The Rendering Debugger with **Debug Probe Sampling** enabled
Refer to [Rendering Debugger window reference](rendering-debugger-window-reference.md#ProbeVolume) for more information.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/reference-path-tracing.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/reference-path-tracing.md
index fdb2786fdde..452a561fef4 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/reference-path-tracing.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/reference-path-tracing.md
@@ -14,22 +14,22 @@
| **Seed Mode** | Set how the path tracer generates random numbers. The seed is the pattern the noise has. When accumulating samples, every frame needs a different seed. Otherwise, the same noisy image gets accumulated over and over. **Seed Mode** has the following options: • **Non Repeating**: This is the default option. The seed is chosen based on the camera frame count. When the accumulation resets, it is not reset to zero. • **Repeating**: The seed is reset every time the accumulation is reset. Rendering of every image is done using the same random numbers. • **Custom**: Set the seed using a custom script. For more information, see the example in [Understand path tracing](path-tracing-understand.md).|
| **Denoising** | Denoises the output of the path tracer. This setting is only available when you install the **Unity Denoising** Package. **Denoising** has the following options: • **None**: Does not denoise (this is the default option). • **Intel Open Image Denoise** : Uses the Intel Open Image Denoise library to denoise the frame. • **NVIDIA OptiX** : Uses NVIDIA OptiX to denoise the frame.
You can also enable the following additional settings: • **Use AOVs** (Arbitrary Output Variables): Increases the amount of detail kept in the frame after HDRP denoises it. • **Temporal**: Improves the temporal consistency of denoised frame sequences. • **Separate Volumetrics**: Denoises the volumetric scattering effect separately for a smoother fog. When Separate Volumetrics is enabled, the Temporal setting will not improve volumetric fog temporal stability. |
-
+
**Minimum Depth** set to 1, **Maximum Depth** set to 2: direct and indirect lighting (1 bounce)
-
+
**Minimum Depth** set to 1, **Maximum Depth** set to 1: direct lighting only
-
+
**Minimum Depth** set to 2, **Maximum Depth** set to 2: indirect lighting only (1 bounce)
-
+
**Denoising** set to NVIDIA Optix, **Separate Volumetrics** set to Off
-
+
**Denoising** set to NVIDIA Optix, **Separate Volumetrics** set to On
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/reference-shadows-volume-override.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/reference-shadows-volume-override.md
index e51e6ac9b5f..1ee2738b7c7 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/reference-shadows-volume-override.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/reference-shadows-volume-override.md
@@ -2,8 +2,6 @@
## Properties
-
-
[!include[](snippets/Volume-Override-Enable-Properties.md)]
| **Property** | **Description** |
@@ -18,4 +16,4 @@
| **Border 1** | Set the size of the border between the first and second cascade split. HDRP fades the shadow cascades between these two sections over this border. The **Working Unit** defines the unit this property uses. |
| **Border 2** | Set the size of the border between the second and third cascade split. HDRP fades the shadow cascades between these two sections over this border. The **Working Unit** defines the unit this property uses. |
| **Border 3** | Set the size of the border between the third and final cascade split. HDRP fades the shadow cascades between these two sections over this border. The **Working Unit** defines the unit this property uses. |
-| **Border 4** | Set the size of the border at the end of the last cascade split. HDRP fades the final shadow cascade out over this distance. The **Working Unit** defines the unit this property uses. |
\ No newline at end of file
+| **Border 4** | Set the size of the border at the end of the last cascade split. HDRP fades the final shadow cascade out over this distance. The **Working Unit** defines the unit this property uses. |
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md
index 0b3bb1b5be1..b8bfd77fc98 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md
@@ -22,10 +22,10 @@ Refer to [Use the Rendering debugger](use-the-rendering-debugger.md) for more in
The **Decals** panel has tools that you can use to debug [decals](decal-material-inspector-reference.md) affecting transparent objects in your project.
-| **Property** | **Description** |
-| ----------------- | ------------------------------------------------------------ |
-| **Display Atlas** | Enable the checkbox to display the decal atlas for a Camera in the top left of that Camera's view. |
-| **Mip Level** | Use the slider to select the mip level for the decal atlas. The higher the mip level, the blurrier the decal atlas. |
+| **Property** | **Description** |
+|-------------------|---------------------------------------------------------------------------------------------------------------------|
+| **Display Atlas** | Enable the checkbox to display the decal atlas for a Camera in the top left of that Camera's view. |
+| **Mip Level** | Use the slider to select the mip level for the decal atlas. The higher the mip level, the blurrier the decal atlas. |
@@ -41,27 +41,18 @@ Use the [runtime shortcuts](#Navigation at runtime) to open the Display stats wi
The Frame Stats section displays the average, minimum, and maximum value of each property. HDRP calculates each Frame Stat value over the 30 most recent frames.
-| **Property** | | **Description** |
-| ---------------------------- | ----------------------- | ------------------------------------------------------------ |
-| **Frame Rate** | | The frame rate (in frames per second) for the current camera view. |
-| **Frame Time** | | The total frame time for the current camera view. |
-| **CPU Main Thread Frame** | | The total time (in milliseconds) between the start of the frame and the time when the Main Thread finished the job. |
-| **CPU Render Thread Frame** | | The time (in milliseconds) between the start of the work on the Render Thread and the time Unity waits to render the present frame ([Gfx.PresentFrame](https://docs.unity3d.com/2022.1/Documentation/Manual/profiler-markers.html)). |
-| **CPU Present Wait** | | The time (in milliseconds) that the CPU spent waiting for Unity to render the present frame ([Gfx.PresentFrame](https://docs.unity3d.com/2022.1/Documentation/Manual/profiler-markers.html)) during the last frame. |
-| **GPU Frame** | | The amount of time (in milliseconds) the GPU takes to render a given frame. |
-| **RT Mode** | | When you [enable ray tracing](Ray-Tracing-Getting-Started.md), this property shows the ray tracing quality mode that HDRP uses during rendering. HDRP updates this value once every frame based on the previous frame. |
-| **Count Rays** | | Count the number of traced rays for each effect (in MRays / frame). This property only appears when you enable ray tracing. |
-| | **Ambient Occlusion** | The number of rays that HDRP traced for [Ambient Occlusion (AO)](Ambient-Occlusion.md) computations, when you enable realtime ambient occlusion (RT AO). |
-| | **Shadows Directional** | The number of rays that HDRP traced for [directional lights](Light-Component.md) when you enable ray-traced shadows. |
-| | **Shadows Area** | The number of rays that HDRP traced towards area lights when you enable [ray-traced shadows](Ray-Traced-Shadows.md). |
-| | **Shadows Point/Spot** | The number of rays that HDRP traced towards point and spot lights when you enable ray-traced shadows. |
-| | **Reflection Forward** | The number of rays that HDRP traced for reflection computations that use [forward shading](Forward-And-Deferred-Rendering.md). |
-| | **Reflection Deferred** | The number of rays that HDRP traced for reflection computations that use [deferred shading](Forward-And-Deferred-Rendering.md). |
-| | **Diffuse GI Forward** | The number of rays that HDRP traced for diffuse [Global Illumination (GI)](Ray-Traced-Global-Illumination.md) computations that use forward shading. |
-| | **Diffuse GI Deferred** | The number of rays that HDRP traced for diffuse Global Illumination (GI) computations that use deferred shading. |
-| | **Recursive** | The number of rays that HDRP traced for diffuse Global Illumination (GI) computations when you enable recursive ray tracing. |
-| | **Total** | The total number of rays that HDRP traced. |
-| **Debug XR Layout** | | Display debug information for XR passes. This mode is only available in editor and development builds. |
+| **Property** | **Description** |
+|-----------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Frame Rate** | The frame rate (in frames per second) for the current camera view. |
+| **Frame Time** | The total frame time for the current camera view. |
+| **CPU Main Thread Frame** | The total time (in milliseconds) between the start of the frame and the time when the Main Thread finished the job. |
+| **CPU Render Thread Frame** | The time (in milliseconds) between the start of the work on the Render Thread and the time Unity waits to render the present frame ([Gfx.PresentFrame](https://docs.unity3d.com/2022.1/Documentation/Manual/profiler-markers.html)). |
+| **CPU Present Wait** | The time (in milliseconds) that the CPU spent waiting for Unity to render the present frame ([Gfx.PresentFrame](https://docs.unity3d.com/2022.1/Documentation/Manual/profiler-markers.html)) during the last frame. |
+| **GPU Frame** | The amount of time (in milliseconds) the GPU takes to render a given frame. |
+| **RT Mode** | When you [enable ray tracing](Ray-Tracing-Getting-Started.md), this property shows the ray tracing quality mode that HDRP uses during rendering. HDRP updates this value once every frame based on the previous frame. |
+| **Count Rays** | Count the number of traced rays for each effect (in MRays / frame). This property only appears when you enable ray tracing.
Ambient Occlusion: The number of rays that HDRP traced for Ambient Occlusion (AO) computations when you enable realtime ambient occlusion (RT AO).
Shadows Directional: The number of rays that HDRP traced for directional lights when you enable ray-traced shadows.
Shadows Area: The number of rays that HDRP traced towards area lights when you enable ray-traced shadows.
Shadows Point/Spot: The number of rays that HDRP traced towards point and spot lights when you enable ray-traced shadows.
Reflection Forward: The number of rays that HDRP traced for reflection computations that use forward shading.
Reflection Deferred: The number of rays that HDRP traced for reflection computations that use deferred shading.
Diffuse GI Forward: The number of rays that HDRP traced for diffuse Global Illumination (GI) computations that use forward shading.
Diffuse GI Deferred: The number of rays that HDRP traced for diffuse Global Illumination (GI) computations that use deferred shading.
Recursive: The number of rays that HDRP traced for diffuse Global Illumination (GI) computations when you enable recursive ray tracing.
Total: The total number of rays that HDRP traced.
|
+| **Debug XR Layout** | Display debug information for XR passes. This mode is only available in editor and development builds. |
+
@@ -75,12 +66,12 @@ The **Bottlenecks** section describes the distribution of the last 60 frames acr
#### Bottleneck categories
-| **Category** | **Description** |
-| ------------------- | ------------------------------------------------------------ |
-| **CPU** | The percentage of the last 60 frames in which the CPU limited the frame time. |
-| **GPU** | The percentage of the last 60 frames in which the GPU limited the frame time. |
-| **Present limited** | The percentage of the last 60 frames in which the frame time was limited by the following presentation constraints: • Vertical Sync (Vsync): Vsync synchronizes rendering to the refresh rate of your display. •[Target framerate]([Application.targetFrameRate](https://docs.unity3d.com/ScriptReference/Application-targetFrameRate.html)): A function that you can use to manually limit the frame rate of an application. If a frame is ready before the time you specify in targetFrameRate, Unity waits before presenting the frame. |
-| **Balanced** | This percentage of the last 60 frames in which the frame time was not limited by any of the above categories. A frame that is 100% balanced indicates the processing time for both CPU and GPU is approximately equal. |
+| **Category** | **Description** |
+|---------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **CPU** | The percentage of the last 60 frames in which the CPU limited the frame time. |
+| **GPU** | The percentage of the last 60 frames in which the GPU limited the frame time. |
+| **Present limited** | The percentage of the last 60 frames in which the frame time was limited by the following presentation constraints:
Vertical Sync (Vsync): Vsync synchronizes rendering to the refresh rate of your display.
Target framerate: A function that you can use to manually limit the frame rate of an application. If a frame is ready before the time you specify in targetFrameRate, Unity waits before presenting the frame.
|
+| **Balanced** | This percentage of the last 60 frames in which the frame time was not limited by any of the above categories. A frame that is 100% balanced indicates the processing time for both CPU and GPU is approximately equal. |
#### Bottleneck example
@@ -99,12 +90,12 @@ In this example, the bottleneck is the GPU.
The Detailed Stats section displays the amount of time in milliseconds that each rendering step takes on the CPU and GPU. HDRP updates these values once every frame based on the previous frame.
-| **Property** | **Description** |
-| -------------------------------- | ------------------------------------------------------------ |
-| Update every second with average | Calculate average values over one second and update every second. |
-| Hide empty scopes | Hide profiling scopes that use 0.00ms of processing time on the CPU and GPU. |
-| Count Rays | Count the number of traced rays for each effect (in MRays / frame). This mode only appears when you enable ray tracing. |
-| Debug XR Layout | Enable to display debug information for [XR](https://docs.unity3d.com/Manual/XR.html) passes. This mode only appears in the editor and development builds. |
+| **Property** | **Description** |
+|----------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| Update every second with average | Calculate average values over one second and update every second. |
+| Hide empty scopes | Hide profiling scopes that use 0.00ms of processing time on the CPU and GPU. |
+| Count Rays | Count the number of traced rays for each effect (in MRays / frame). This mode only appears when you enable ray tracing. |
+| Debug XR Layout | Enable to display debug information for [XR](https://docs.unity3d.com/Manual/XR.html) passes. This mode only appears in the editor and development builds. |
@@ -112,95 +103,26 @@ The Detailed Stats section displays the amount of time in milliseconds that each
The **Material** panel has tools that you can use to visualize different Material properties.
-
-
-
-
Property
-
Description
-
-
-
-
-
Common Material Property
-
Use the drop-down to select a Material property to visualize on every GameObject on screen. All HDRP Materials share the properties available.
-
-
-
Material
-
Use the drop-down to select a Material property to visualize on every GameObject on screen using a specific Shader. The properties available depend on the HDRP Material type you select in the drop-down.
-
-
-
Rendering Layer Mask
-
These parameters only appear when you set the Material Debug Option to Rendering Layers.
-
-
-
Filter Light Layers by Light
-
Enable the checkbox to visualize GameObjects that the selected light affects.
-
-
-
Use Light's Shadow Layer Mask
-
Enable the checkbox to visualize GameObjects that cast shadows for the selected light.
-
-
-
Filter Layers
-
Use the drop-down to filter layers that you want to display. GameObjects that have a matching layer appear in a specific color. Use **Layers Color** to define this color.
-
-
-
Layers Color
-
Use the color pickers to select the display color of each rendering layer.
-
-
-
Engine
-
Use the drop-down to select a Material property to visualize on every GameObject on a screen that uses a specific Shader. The properties available are the same as Material but are in the form that the lighting engine uses them (for example, Smoothness is Perceptual Roughness).
-
-
-
Attributes
-
Use the drop-down to select a 3D GameObject attribute, like Texture Coordinates or Vertex Color, to visualize on screen.
-
-
-
-
Properties
-
Use the drop-down to select a property that the debugger uses to highlight GameObjects on screen. The debugger highlights GameObjects that use a Material with the property that you select.
-
-
-
-
GBuffer
-
Use the drop-down to select a property to visualize from the GBuffer for deferred Materials.
-
-
-
Material Validator
-
Use the drop-down to select properties to display validation colors for:
Diffuse Color: Select this option to check if the diffuse colors in your Scene adheres to an acceptable PBR range. If the Material color is out of this range, the debugger displays it in the Too High Color color if it's above the range, or in the Too Low Color if it's below the range.
-
Metal or SpecularColor: Select this option to check if a pixel contains a metallic or specular color that adheres to an acceptable PBR range. If it doesn't, the debugger highlights it in the Not A Pure Metal Color. For information about the acceptable PBR ranges in Unity, see the Material Charts documentation.
-
-
-
Too High Color
-
Use the color picker to select the color that the debugger displays when a Material's diffuse color is above the acceptable PBR range. This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
-
-
-
Too Low Color
-
Use the color picker to select the color that the debugger displays when a Material's diffuse color is below the acceptable PBR range. This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
-
-
-
Not A Pure Metal Color
-
Use the color picker to select the color that the debugger displays if a pixel defined as metallic has a non-zero albedo value. The debugger only highlights these pixels if you enable the True Metals checkbox. This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
-
-
Pure Metals
-
Enable the checkbox to make the debugger highlight any pixels which Unity defines as metallic, but which have a non-zero albedo value. The debugger uses the Not A Pure Metal Color to highlight these pixels. This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
-
-
-
Override Global Material Texture Mip Bias
-
Enable the checkbox to override the mipmap level bias of texture samplers in material shaders. Use the Debug Global Material Texture Mip Bias Value to control the mipmap level bias override. When using this feature, be aware of the following:
-
-
It only affects gbuffer, forward opaque, transparency and decal passes.
-
It doesn't affect virtual texturing sampling.
-
It doesn't affect custom passes.
-
-
-
-
Debug Global Material Texture Mip Bias Value
-
Use the slider to control the amount of mip bias of texture samplers in material shaders.
-
-
-
+| **Property** | **Sub-property** | **Description** |
+|--------------------------------------------------|-----------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Common Material Property** | N/A | Use the drop-down to select a Material property to visualize on every GameObject on screen. All HDRP Materials share the properties available. |
+| **Material** | N/A | Use the drop-down to select a Material property to visualize on every GameObject on screen using a specific Shader. The properties available depend on the HDRP Material type you select in the drop-down. |
+| **Rendering Layer Mask** | N/A | These parameters only appear when you set the Material Debug Option to Rendering Layers. |
+| **Rendering Layer Mask** | **Filter Light Layers by Light** | Enable the checkbox to visualize GameObjects that the selected light affects. |
+| **Rendering Layer Mask** | **Use Light's Shadow Layer Mask** | Enable the checkbox to visualize GameObjects that cast shadows for the selected light. |
+| **Rendering Layer Mask** | **Filter Layers** | Use the drop-down to filter layers that you want to display. GameObjects that have a matching layer appear in a specific color. Use **Layers Color** to define this color. |
+| **Rendering Layer Mask** | **Layers Color** | Use the color pickers to select the display color of each rendering layer. |
+| **Engine** | N/A | Use the drop-down to select a Material property to visualize on every GameObject on a screen that uses a specific Shader. The properties available are the same as Material but are in the form that the lighting engine uses them (for example, Smoothness is Perceptual Roughness). |
+| **Attributes** | N/A | Use the drop-down to select a 3D GameObject attribute, like Texture Coordinates or Vertex Color, to visualize on screen. |
+| **Properties** | N/A | Use the drop-down to select a property that the debugger uses to highlight GameObjects on screen. The debugger highlights GameObjects that use a Material with the property that you select. |
+| **GBuffer** | N/A | Use the drop-down to select a property to visualize from the GBuffer for deferred Materials. |
+| **Material Validator** | N/A | Use the drop-down to select properties to display validation colors for:
Diffuse Color: Select this option to check if the diffuse colors in your Scene adhere to an acceptable PBR range. If the Material color is out of this range, the debugger displays it in the Too High Color color if it's above the range, or in the Too Low Color if it's below the range.
Metal or SpecularColor: Select this option to check if a pixel contains a metallic or specular color that adheres to an acceptable PBR range. If it doesn't, the debugger highlights it in the Not A Pure Metal Color. For information about the acceptable PBR ranges in Unity, see the Material Charts documentation.
|
+| **Material Validator** | **Too High Color** | Use the color picker to select the color that the debugger displays when a Material's diffuse color is above the acceptable PBR range. |
+| **Material Validator** | **Too Low Color** | Use the color picker to select the color that the debugger displays when a Material's diffuse color is below the acceptable PBR range.
This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
|
+| **Material Validator** | **Not A Pure Metal Color** | Use the color picker to select the color that the debugger displays if a pixel defined as metallic has a non-zero albedo value. The debugger only highlights these pixels if you enable the True Metals checkbox.
This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
|
+| **Material Validator** | **Pure Metals** | Enable the checkbox to make the debugger highlight any pixels which Unity defines as metallic, but which have a non-zero albedo value. The debugger uses the Not A Pure Metal Color to highlight these pixels.
This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
|
+| **Override Global Material Texture Mip Bias** | N/A | Enable the checkbox to override the mipmap level bias of texture samplers in material shaders. Use the Debug Global Material Texture Mip Bias Value to control the mipmap level bias override.
When using this feature, be aware of the following:
It only affects gbuffer, forward opaque, transparency, and decal passes.
It doesn't affect virtual texturing sampling.
It doesn't affect custom passes.
|
+| **Debug Global Material Texture Mip Bias Value** | N/A | Use the slider to control the amount of mip bias of texture samplers in material shaders. |
If the geometry or the shading normal is denormalized, the view renders the target pixel red.
@@ -210,266 +132,57 @@ If the geometry or the shading normal is denormalized, the view renders the targ
The **Lighting** panel has tools that you can use to visualize various components of the lighting system in your Scene, like, shadowing and direct/indirect lighting.
-
-
-
-
-
Shadow Debug Option
-
Description
-
-
-
-
-
Debug Mode
-
Use the drop-down to select which shadow debug information to overlay on the screen:
-
-
None: Select this mode to remove the shadow debug information from the screen.
-
VisualizePunctualLightAtlas: Select this mode to overlay the shadow atlas for Punctual Lights in your Scene.
-
VisualizeDirectionalLightAtlas: Select this mode to overlay the shadow atlas for Directional Lights in your Scene.
-
VisualizeAreaLightAtlas: Select this mode to overlay the shadow atlas for area Lights in your Scene.
-
VisualizeShadowMap: Select this mode to overlay a single shadow map for a Light in your Scene.
-
SingleShadow: Select this mode to replace the Scene's lighting with a single Light. To select which Light to isolate, see Use Selection or Shadow Map Index.
-
-
-
-
Use Selection
-
Enable the checkbox to display the shadow map for the Light you select in the Scene. This property only appears when you select VisualizeShadowMap or SingleShadow from the Shadow Debug Mode drop-down.
-
-
-
Shadow Map Index
-
Use the slider to select the index of the shadow map to view. To use this property correctly, you must have at least one Light in your Scene that uses shadow maps.
-
-
-
Global Scale Factor
-
Use the slider to set the global scale that HDRP applies to the shadow rendering resolution.
-
-
-
Clear Shadow Atlas
-
Enable the checkbox to clear the shadow atlas every frame.
-
-
-
Range Minimum Value
-
Set the minimum shadow value to display in the various shadow debug overlays.
-
-
-
Range Maximum Value
-
Set the maximum shadow value to display in the various shadow debug overlays.
-
-
-
Log Cached Shadow Atlas Status
-
Set the maximum shadow value to display in the various shadow debug overlays.
-
-
-
-
-
-
-
-
Lighting Debug Option
-
Description
-
-
-
-
Show Lights By Type
-
Allows the user to enable or disable lights in the scene based on their type.
-
-
-
Directional Lights
-
Enable the checkbox to see Directional Lights in your Scene. Disable this checkbox to remove Directional Lights from your Scene's lighting.
-
-
-
Punctual Lights
-
Enable the checkbox to see Punctual Lights in your Scene. Disable this checkbox to remove Punctual Lights from your Scene's lighting.
-
-
-
Area Lights
-
Enable the checkbox to see Area Lights in your Scene. Disable this checkbox to remove Aera Lights from your Scene's lighting.
-
-
-
Reflection Probes
-
Enable the checkbox to see Reflection Probes in your Scene. Disable this checkbox to remove Reflection Probes from your Scene's lighting.
-
-
-
Exposure
-
Allows you to select an Exposure debug mode to use.
-
-
-
Debug Mode
-
Use the drop-down to select a debug mode. See Exposure documentation for more information.
-
-
-
Show Tonemap curve
-
Enable the checkbox to overlay the tonemap curve to the histogram debug view. This property only appears when you select HistogramView from Debug Mode.
-
-
-
Center Around Exposure
-
Enable the checkbox to center the histogram around the current exposure value. This property only appears when you select HistogramView from Debug Mode.
-
-
-
Display RGB Histogram
-
Enable the checkbox to display the Final Image Histogram as an RGB histogram instead of just luminance. This property only appears when you select FinalImageHistogramView from Debug Mode.
-
-
-
Display Mask Only
-
Enable the checkbox to display only the metering mask in the picture-in-picture. When disabled, the mask displays after weighting the scene color instead. This property only appears when you select MeteringWeighted from Debug Mode.
-
-
-
Debug Exposure Compensation
-
Set an additional exposure compensation for debug purposes.
-
-
-
Debug Mode
-
Use the drop-down to select a lighting mode to debug. For example, you can visualize diffuse lighting, specular lighting, direct diffuse lighting, direct specular lighting, indirect diffuse lighting, indirect specular lighting, emissive lighting and Directional Light shadow cascades.
-
-
-
Hierarchy Debug Mode
-
Use the drop-down to select a light type to display the direct lighting for or a Reflection Probe type to display the indirect lighting for.
-
-
-
-
-
-
-
Material Overrides
-
Description
-
-
-
-
-
Override Smoothness
-
Enable the checkbox to override the smoothness for the entire Scene.
-
-
-
Smoothness
-
Use the slider to set the smoothness override value that HDRP uses for the entire Scene.
-
-
-
Override Albedo
-
Enable the checkbox to override the albedo for the entire Scene.
-
-
-
Albedo
-
Use the color picker to set the albedo color that HDRP uses for the entire Scene.
-
-
-
Override Normal
-
Enable the checkbox to override the normals for the entire Scene with object normals for lighting debug.
-
-
-
Override Specular Color
-
Enable the checkbox to override the specular color for the entire Scene.
-
-
-
Specular Color
-
Use the color picker to set the specular color that HDRP uses for the entire Scene.
-
-
-
Override Ambient Occlusion
-
Enable the checkbox to override the ambient occlusion for the entire Scene.
-
-
-
Ambient Occlusion
-
Use the slider to set the Ambient Occlusion override value that HDRP uses for the entire Scene.
-
-
-
Override Emissive Color
-
Enable the checkbox to override the emissive color for the entire Scene.
-
-
-
Emissive Color
-
Use the color picker to set the emissive color that HDRP uses for the entire Scene.
-
-
-
-
-
-
-
-
Property
-
Description
-
-
-
-
-
Fullscreen Debug Mode
-
Use the drop-down to select a fullscreen lighting effect to debug. For example, you can visualize Contact Shadows, the depth pyramid, and indirect diffuse lighting. You can also use some of those Lighting Fullscreen Debug Modes to debug Ray-Traced effects.
-
-
-
Tile/Cluster Debug
-
Use the drop-down to select an internal HDRP lighting structure to visualize on screen.
-
-
None: Select this option to disable this debug feature.
-
Tile: Select this option to display an overlay of each lighting tile, and the number of lights in them.
-
Cluster: Select this option to display an overlay of each lighting cluster that intersects opaque geometry, and the number of lights in them.
-
Material Feature Variants: Select this option to display the index of the lighting Shader variant that HDRP uses for a tile. You can find variant descriptions in the lit.hlsl file.
-
-
-
-
Tile/Cluster Debug By Category
-
Use the drop-down to select the Light type that you want to display the Tile/Cluster debug information for. The options include Light Types, Decals, and Local Volumetric Fog. This property only appears when you select Tile or Cluster from the Tile/Cluster Debug drop-down.
-
-
-
Cluster Debug Mode
-
Use the drop-down to select the visualization mode for the cluster. The options are: VisualizeOpaque: displays cluster information on opaque geometry. VisualizeSlice: Displays cluster information at a set distance from the camera. This property only appears when you select Cluster from the Tile/Cluster Debug drop-down..
-
-
-
Cluster Distance
-
Use this slider to set the distance from the camera at which to display the cluster slice. This property only appears when you select VisualizeSlice from the Cluster Debug Mode drop-down.
-
-
-
Display Sky Reflection
-
Enable the checkbox to display an overlay of the cube map that the current sky generates and HDRP uses for lighting.
-
-
-
Sky Reflection Mipmap
-
Use the slider to set the mipmap level of the sky reflection cubemap. Use this to view the sky reflection cubemap's different mipmap levels. This property only appears when you enable the Display Sky Reflection checkbox.
-
-
-
Display Light Volumes
-
Enable the checkbox to display an overlay of all light bounding volumes.
-
-
-
Light Volume Debug Type
-
Use the drop-down to select the method HDRP uses to display the light volumes.
-
-
Gradient: Select this option to display the light volumes as a gradient.
-
ColorAndEdge: Select this option to display the light volumes as a plain color (a different color for each Light Type) with a red border for readability. This property only appears when you enable the Display Light Volumes checkbox.
-
-
-
-
Max Debug Light Count
-
Use the slider to rescale the gradient. Lower this value to make the screen turn red faster. Use this property to change the maximum acceptable number of lights for your application and still see areas in red. This property only appears when you set the Display Light Volumes mode to Gradient.
-
-
-
Display Cookie Atlas
-
Enable the checkbox to display an overlay of the cookie atlas.
-
-
-
Mip Level
-
Use the slider to set the mipmap level of the cookie atlas. This property only appears when you enable the Display Cookie Atlas checkbox.
-
-
-
Clear Cookie Atlas
-
Enable the checkbox to clear the cookie atlas at each frame. This property only appears when you enable the Display Cookie Atlas checkbox.
-
-
-
Display Planar Reflection Atlas
-
Enable the checkbox to display an overlay of the planar reflection atlas.
-
-
-
Mip Level
-
Use the slider to set the mipmap level of the planar reflection atlas. This property only appears when you enable the Display Planar Reflection Atlas checkbox.
-
-
-
Clear Planar Atlas
-
Enable the checkbox to clear the planar reflection atlas at each frame. This property only appears when you enable the Display Planar Reflection Atlas checkbox.
-
-
-
Debug Overlay Screen Ratio
-
Set the size of the debug overlay textures with a ratio of the screen size. The default value is 0.33 which is 33% of the screen size.
-
-
-
+| **Shadow Debug Option** | **Sub-option** | **Description** |
+|------------------------------------|----------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Debug Mode** | N/A | Use the drop-down to select which shadow debug information to overlay on the screen:
None: Select this mode to remove the shadow debug information from the screen.
VisualizePunctualLightAtlas: Select this mode to overlay the shadow atlas for Punctual Lights in your Scene.
VisualizeDirectionalLightAtlas: Select this mode to overlay the shadow atlas for Directional Lights in your Scene.
VisualizeAreaLightAtlas: Select this mode to overlay the shadow atlas for Area Lights in your Scene.
VisualizeShadowMap: Select this mode to overlay a single shadow map for a Light in your Scene.
SingleShadow: Select this mode to replace the Scene's lighting with a single Light. To select which Light to isolate, see Use Selection or Shadow Map Index.
|
+| **Debug Mode** | **Use Selection** | Enable the checkbox to display the shadow map for the Light you select in the Scene.
This property only appears when you select VisualizeShadowMap or SingleShadow from the Shadow Debug Mode drop-down.
|
+| **Debug Mode** | **Shadow Map Index** | Use the slider to select the index of the shadow map to view. To use this property correctly, you must have at least one Light in your Scene that uses shadow maps. |
+| **Global Scale Factor** | N/A | Use the slider to set the global scale that HDRP applies to the shadow rendering resolution. |
+| **Clear Shadow Atlas** | N/A | Enable the checkbox to clear the shadow atlas every frame. |
+| **Range Minimum Value** | N/A | Set the minimum shadow value to display in the various shadow debug overlays. |
+| **Range Maximum Value** | N/A | Set the maximum shadow value to display in the various shadow debug overlays. |
+| **Log Cached Shadow Atlas Status** | N/A | Set the maximum shadow value to display in the various shadow debug overlays. |
+
+| **Lighting Debug Option** | **Description** |
+|---------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Show Lights By Type** | Allows the user to enable or disable lights in the scene based on their type.
Directional Lights: Enable the checkbox to see Directional Lights in your Scene. Disable this checkbox to remove Directional Lights from your Scene's lighting.
Punctual Lights: Enable the checkbox to see Punctual Lights in your Scene. Disable this checkbox to remove Punctual Lights from your Scene's lighting.
Area Lights: Enable the checkbox to see Area Lights in your Scene. Disable this checkbox to remove Area Lights from your Scene's lighting.
Reflection Probes: Enable the checkbox to see Reflection Probes in your Scene. Disable this checkbox to remove Reflection Probes from your Scene's lighting.
|
+| **Exposure** | Allows you to select an Exposure debug mode to use.
Debug Mode: Use the drop-down to select a debug mode. See Exposure documentation for more information.
Show Tonemap Curve: Enable the checkbox to overlay the tonemap curve to the histogram debug view. This property only appears when you select HistogramView from Debug Mode.
Center Around Exposure: Enable the checkbox to center the histogram around the current exposure value. This property only appears when you select HistogramView from Debug Mode.
Display RGB Histogram: Enable the checkbox to display the Final Image Histogram as an RGB histogram instead of just luminance. This property only appears when you select FinalImageHistogramView from Debug Mode.
Display Mask Only: Enable the checkbox to display only the metering mask in the picture-in-picture. When disabled, the mask displays after weighting the scene color instead. This property only appears when you select MeteringWeighted from Debug Mode.
Debug Exposure Compensation: Set an additional exposure compensation for debug purposes.
|
+| **Debug Mode** | Use the drop-down to select a lighting mode to debug. For example, you can visualize diffuse lighting, specular lighting, direct diffuse lighting, direct specular lighting, indirect diffuse lighting, indirect specular lighting, emissive lighting and Directional Light shadow cascades. |
+| **Hierarchy Debug Mode** | Use the drop-down to select a light type to display the direct lighting for or a Reflection Probe type to display the indirect lighting for. |
+
+| **Material Overrides** | **Suboption** | **Description** |
+|--------------------------------|-----------------------|----------------------------------------------------------------------------------------------------------|
+| **Override Smoothness** | N/A | Enable the checkbox to override the smoothness for the entire Scene. |
+| **Override Smoothness** | **Smoothness** | Use the slider to set the smoothness override value that HDRP uses for the entire Scene. |
+| **Override Albedo** | N/A | Enable the checkbox to override the albedo for the entire Scene. |
+| **Override Albedo** | **Albedo** | Use the color picker to set the albedo color that HDRP uses for the entire Scene. |
+| **Override Normal** | N/A | Enable the checkbox to override the normals for the entire Scene with object normals for lighting debug. |
+| **Override Specular Color** | N/A | Enable the checkbox to override the specular color for the entire Scene. |
+| **Override Specular Color** | **Specular Color** | Use the color picker to set the specular color that HDRP uses for the entire Scene. |
+| **Override Ambient Occlusion** | N/A | Enable the checkbox to override the ambient occlusion for the entire Scene. |
+| **Override Ambient Occlusion** | **Ambient Occlusion** | Use the slider to set the Ambient Occlusion override value that HDRP uses for the entire Scene. |
+| **Override Emissive Color** | N/A | Enable the checkbox to override the emissive color for the entire Scene. |
+| **Override Emissive Color** | **Emissive Color** | Use the color picker to set the emissive color that HDRP uses for the entire Scene. |
+
+| **Property** | **Sub-propery** | **Description** |
+|-------------------------------------|------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Fullscreen Debug Mode** | N/A | Use the drop-down to select a fullscreen lighting effect to debug. For example, you can visualize Contact Shadows, the depth pyramid, and indirect diffuse lighting. You can also use some of those Lighting Fullscreen Debug Modes to debug Ray-Traced effects. |
+| **Tile/Cluster Debug** | N/A | Use the drop-down to select an internal HDRP lighting structure to visualize on screen.
None: Select this option to disable this debug feature.
Tile: Select this option to display an overlay of each lighting tile, and the number of lights in them.
Cluster: Select this option to display an overlay of each lighting cluster that intersects opaque geometry, and the number of lights in them.
Material Feature Variants: Select this option to display the index of the lighting Shader variant that HDRP uses for a tile. You can find variant descriptions in the lit.hlsl file.
|
+| **Tile/Cluster Debug** | **Tile/Cluster Debug By Category** | Use the drop-down to select the Light type that you want to display the Tile/Cluster debug information for. The options include Light Types, Decals, and Local Volumetric Fog. This property only appears when you select Tile or Cluster from the Tile/Cluster Debug drop-down. |
+| **Tile/Cluster Debug** | **Cluster Debug Mode** | Use the drop-down to select the visualization mode for the cluster. The options are:
Visualize Opaque: Displays cluster information on opaque geometry.
Visualize Slice: Displays cluster information at a set distance from the camera.
This property only appears when you select Cluster from the Tile/Cluster Debug drop-down.
|
+| **Tile/Cluster Debug** | **Cluster Distance** | Use this slider to set the distance from the camera at which to display the cluster slice. This property only appears when you select VisualizeSlice from the Cluster Debug Mode drop-down. |
+| **Display Sky Reflection** | N/A | Enable the checkbox to display an overlay of the cube map that the current sky generates and HDRP uses for lighting. |
+| **Display Sky Reflection** | **Sky Reflection Mipmap** | Use the slider to set the mipmap level of the sky reflection cubemap. Use this to view the sky reflection cubemap's different mipmap levels.
This property only appears when you enable the Display Sky Reflection checkbox.
|
+| **Display Light Volumes** | N/A | Enable the checkbox to display an overlay of all light bounding volumes. |
+| **Display Light Volumes** | **Light Volume Debug Type** | Use the drop-down to select the method HDRP uses to display the light volumes.
Gradient: Select this option to display the light volumes as a gradient.
ColorAndedge: Select this option to display the light volumes as a plain color (a different color for each Light Type) with a red border for readability.
This property only appears when you enable the Display Light Volumes checkbox.
|
+| **Display Light Volumes** | **Max Debug Light Count** | Use the slider to rescale the gradient. Lower this value to make the screen turn red faster. Use this property to change the maximum acceptable number of lights for your application and still see areas in red. This property only appears when you set the Display Light Volumes mode to Gradient. |
+| **Display Cookie Atlas** | N/A | Enable the checkbox to display an overlay of the cookie atlas. |
+| **Display Cookie Atlas** | **Mip Level** | Use the slider to set the mipmap level of the cookie atlas. This property only appears when you enable the Display Cookie Atlas checkbox. |
+| **Display Cookie Atlas** | **Clear Cookie Atlas** | Enable the checkbox to clear the cookie atlas at each frame. This property only appears when you enable the Display Cookie Atlas checkbox. |
+| **Display Planar Reflection Atlas** | N/A | Enable the checkbox to display an overlay of the planar reflection atlas. |
+| **Display Planar Reflection Atlas** | **Mip Level** | Use the slider to set the mipmap level of the planar reflection atlas. This property only appears when you enable the Display Planar Reflection Atlas checkbox. |
+| **Display Planar Reflection Atlas** | **Clear Planar Atlas** | Enable the checkbox to clear the planar reflection atlas at each frame. This property only appears when you enable the Display Planar Reflection Atlas checkbox. |
+| **Debug Overlay Screen Ratio** | N/A | Set the size of the debug overlay textures with a ratio of the screen size. The default value is 0.33 which is 33% of the screen size. |
@@ -477,13 +190,13 @@ The **Lighting** panel has tools that you can use to visualize various component
The **Volume** panel has tools that you can use to visualize the Volume Components affecting a camera.
-| **Property** | **Description** |
-| ---------------------- | ---------------------------------------------------- |
-| **Component** | Use the drop-down to select which volume component to visualize. |
-| **Camera** | Use the drop-down to select which camera to use as volume anchor. |
-| **Parameter** | List of parameters for the selected component. |
-| **Interpolated Value** | Current value affecting the choosen camera for each parameter. |
-| **Other columns** | Each one of the remaining columns display the parameter values of a volume affecting the selected **Camera**. They're sorted from left to right by decreasing influence. |
+| **Property** | **Description** |
+|------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Component** | Use the drop-down to select which volume component to visualize. |
+| **Camera** | Use the drop-down to select which camera to use as volume anchor. |
+| **Parameter** | List of parameters for the selected component. |
+| **Interpolated Value** | Current value affecting the choosen camera for each parameter. |
+| **Other columns** | Each one of the remaining columns display the parameter values of a volume affecting the selected **Camera**. They're sorted from left to right by decreasing influence. |
@@ -493,57 +206,57 @@ These settings make it possible for you to visualize [Adaptive Probe Volumes](pr
### Subdivision Visualization
-| **Property** | **Sub-property** | **Description** |
-|-|-|-|
-| **Display Cells** || Display cells. Refer to [Understanding Adaptive Probe Volumes](probevolumes-concept.md) for more information. |
-| **Display Bricks** || Display bricks. Refer to [Understanding Adaptive Probe Volumes](probevolumes-concept.md) for more information. |
-| **Live Subdivision Preview** || Enable a preview of Adaptive Probe Volume data in the scene without baking. This might make the Editor slower. This setting appears only if you select **Display Cells** or **Display Bricks**. |
-|| **Cell Updates Per Frame** | Set the number of cells, bricks, and probe positions to update per frame. Higher values might make the Editor slower. The default value is 4. This property appears only if you enable **Live Subdivision Preview**. |
-|| **Update Frequency** | Set how frequently Unity updates cell, bricks, and probe positions, in seconds. The default value is 1. This property appears only if you enable **Live Subdivision Preview**. |
-| **Debug Draw Distance** || Set how far from the scene camera Unity draws debug visuals for cells and bricks, in meters. The default value is 500. |
+| **Property** | **Sub-property** | **Description** |
+|------------------------------|----------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Display Cells** | N/A | Display cells. Refer to [Understanding Adaptive Probe Volumes](probevolumes-concept.md) for more information. |
+| **Display Bricks** | N/A | Display bricks. Refer to [Understanding Adaptive Probe Volumes](probevolumes-concept.md) for more information. |
+| **Live Subdivision Preview** | N/A | Enable a preview of Adaptive Probe Volume data in the scene without baking. This might make the Editor slower. This setting appears only if you select **Display Cells** or **Display Bricks**. |
+| **Live Subdivision Preview** | **Cell Updates Per Frame** | Set the number of cells, bricks, and probe positions to update per frame. Higher values might make the Editor slower. The default value is 4. This property appears only if you enable **Live Subdivision Preview**. |
+| **Live Subdivision Preview** | **Update Frequency** | Set how frequently Unity updates cell, bricks, and probe positions, in seconds. The default value is 1. This property appears only if you enable **Live Subdivision Preview**. |
+| **Debug Draw Distance** | N/A | Set how far from the scene camera Unity draws debug visuals for cells and bricks, in meters. The default value is 500. |
### Probe Visualization
-| **Property** | **Sub-property** | **Description** |
-|-|-|-|
-| **Display Probes** || Display probes. |
-|| **Probe Shading Mode** | Set what the Rendering Debugger displays. The options are:
SH: Display the [spherical harmonics (SH) lighting data](https://docs.unity3d.com/Manual/LightProbes-TechnicalInformation.html) for the final color calculation. The number of bands depends on the **SH Bands** setting in the active [HDRP Asset](HDRP-Asset.md).
SHL0: Display the spherical harmonics (SH) lighting data with only the first band.
SHL0L1: Display the spherical Harmonics (SH) lighting data with the first two bands.
Validity: Display whether probes are valid, based on the number of backfaces the probe samples. Refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) for more information about probe validity.
Probe Validity Over Dilation Threshold: Display red if a probe samples too many backfaces, based on the **Validity Threshold** set in the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md). This means the probe can't be baked or sampled.
Invalidated By Touchup Volumes: Display probes that a [Probe Adjustment Volume component](probevolumes-adjustment-volume-component-reference.md) has made invalid.
Size: Display a different color for each size of [brick](probevolumes-concept.md).
Sky Occlusion SH: If you enable [sky occlusion](probevolumes-skyocclusion.md), this setting displays the amount of indirect light the probe receives from the sky that bounced off static GameObjects. The value is a scalar, so it displays as a shade of gray.
Sky Direction: Display a green circle that represents the direction from the probe to the sky. This setting displays a red circle if Unity can't calculate the direction, or **Sky Direction** in the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md) is disabled.
|
-|| **Debug Size** | Set the size of the displayed probes. The default is 0.3. |
-|| **Exposure Compensation** | Set the brightness of the displayed probes. Decrease the value to increase brightness. The default is 0. This property appears only if you set **Probe Shading Mode** to **SH**, **SHL0**, or **SHL0L1**. |
-|| **Max Subdivisions Displayed** | Set the lowest probe density to display. For example, set this to 0 to display only the highest probe density. |
-|| **Min Subdivisions Displayed** | Set the highest probe density to display. |
-| **Debug Probe Sampling** || Display how probes are sampled for a pixel. In the Scene view, in the **Adaptive Probe Volumes** overlay, select **Select Pixel** to change the pixel. |
-|| **Debug Size** | Set the size of the **Debug Probe Sampling** display. |
-|| **Debug With Sampling Noise** | Enable sampling noise for this debug view. Enabling this gives more accurate information, but makes the information more difficult to read. |
-| **Virtual Offset Debug** || Display the offsets Unity applies to Light Probe capture positions. |
-|| **Debug Size** | Set the size of the arrows that represent Virtual Offset values. |
-| **Debug Draw Distance** || Set how far from the scene camera Unity draws debug visuals for cells and bricks, in meters. The default is 200. |
-| **Auto Display Probes** || Display probes in the Scene view, if you select a volume with a Probe Adjustment Volume component in the Hierarchy window. |
-| **Isolate Affected** || Display only probes affected by a volume with a Probe Adjustment Volume component, if you select the volume in the Hierarchy window. |
+| **Property** | **Sub-property** | **Description** |
+|--------------------------|--------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Display Probes** | N/A | Display probes. |
+| **Display Probes** | **Probe Shading Mode** | Set what the Rendering Debugger displays. The options are:
SH: Display the [spherical harmonics (SH) lighting data](https://docs.unity3d.com/Manual/LightProbes-TechnicalInformation.html) for the final color calculation. The number of bands depends on the **SH Bands** setting in the active [HDRP Asset](HDRP-Asset.md).
SHL0: Display the spherical harmonics (SH) lighting data with only the first band.
SHL0L1: Display the spherical Harmonics (SH) lighting data with the first two bands.
Validity: Display whether probes are valid, based on the number of backfaces the probe samples. Refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) for more information about probe validity.
Probe Validity Over Dilation Threshold: Display red if a probe samples too many backfaces, based on the **Validity Threshold** set in the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md). This means the probe can't be baked or sampled.
Invalidated By Touchup Volumes: Display probes that a [Probe Adjustment Volume component](probevolumes-adjustment-volume-component-reference.md) has made invalid.
Size: Display a different color for each size of [brick](probevolumes-concept.md).
Sky Occlusion SH: If you enable [sky occlusion](probevolumes-skyocclusion.md), this setting displays the amount of indirect light the probe receives from the sky that bounced off static GameObjects. The value is a scalar, so it displays as a shade of gray.
Sky Direction: Display a green circle that represents the direction from the probe to the sky. This setting displays a red circle if Unity can't calculate the direction, or **Sky Direction** in the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md) is disabled.
|
+| **Display Probes** | **Debug Size** | Set the size of the displayed probes. The default is 0.3. |
+| **Display Probes** | **Exposure Compensation** | Set the brightness of the displayed probes. Decrease the value to increase brightness. The default is 0. This property appears only if you set **Probe Shading Mode** to **SH**, **SHL0**, or **SHL0L1**. |
+| **Display Probes** | **Max Subdivisions Displayed** | Set the lowest probe density to display. For example, set this to 0 to display only the highest probe density. |
+| **Display Probes** | **Min Subdivisions Displayed** | Set the highest probe density to display. |
+| **Debug Probe Sampling** | N/A | Display how probes are sampled for a pixel. In the Scene view, in the **Adaptive Probe Volumes** overlay, select **Select Pixel** to change the pixel. |
+| **Debug Probe Sampling** | **Debug Size** | Set the size of the **Debug Probe Sampling** display. |
+| **Debug Probe Sampling** | **Debug With Sampling Noise** | Enable sampling noise for this debug view. Enabling this gives more accurate information, but makes the information more difficult to read. |
+| **Virtual Offset Debug** | N/A | Display the offsets Unity applies to Light Probe capture positions. |
+| **Virtual Offset Debug** | **Debug Size** | Set the size of the arrows that represent Virtual Offset values. |
+| **Debug Draw Distance** | N/A | Set how far from the scene camera Unity draws debug visuals for cells and bricks, in meters. The default is 200. |
+| **Auto Display Probes** | N/A | Display probes in the Scene view, if you select a volume with a Probe Adjustment Volume component in the Hierarchy window. |
+| **Isolate Affected** | N/A | Display only probes affected by a volume with a Probe Adjustment Volume component, if you select the volume in the Hierarchy window. |
### Streaming
Use the following properties to control how HDRP streams Adaptive Probe Volumes. Refer to [Streaming Adaptive Probe Volumes](probevolumes-streaming.md) for more information.
-| **Property** | **Description** |
-| ------------ | --------------- |
-| **Freeze Streaming** | Stop Unity from streaming probe data. |
-| **Display Streaming Score** | If you enable **Display Cells**, this setting darkens cells that have a lower priority for streaming. Cells closer to the camera usually have the highest priority. |
-| **Maximum cell streaming** | Stream as many cells as possible every frame. |
+| **Property** | **Description** |
+|---------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Freeze Streaming** | Stop Unity from streaming probe data. |
+| **Display Streaming Score** | If you enable **Display Cells**, this setting darkens cells that have a lower priority for streaming. Cells closer to the camera usually have the highest priority. |
+| **Maximum cell streaming** | Stream as many cells as possible every frame. |
| **Display Index Fragmentation** | Open an overlay that displays how fragmented the streaming memory is. A green square is an area of used memory. The more spaces between the green squares, the more fragmented the memory. |
-| **Index Fragmentation Rate** | Displays the amount of fragmentation as a numerical value, where 0 is no fragmentation. |
-| **Verbose Log** | Log information about streaming. |
+| **Index Fragmentation Rate** | Displays the amount of fragmentation as a numerical value, where 0 is no fragmentation. |
+| **Verbose Log** | Log information about streaming. |
### Scenario Blending
Use the following properties to control how HDRP blends Lighting Scenarios. Refer to [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) for more information.
-| **Property** | **Description** |
-| - | - |
-| **Number of Cells Blended Per Frame** | Determines the maximum number of cells Unity blends per frame. The default is 10,000. |
-| **Turnover Rate** | Set the blending priority of cells close to the camera. The range is 0 to 1, where 0 sets the cells close to the camera with high priority, and 1 sets all cells with equal priority. Increase **Turnover Rate** to avoid cells close to the camera blending too frequently. |
-| **Scenario To Blend With** | Select a Lighting Scenario to blend with the active Lighting Scenario. |
-| **Scenario Blending Factor** | Set how far to blend from the active Lighting Scenario to the **Scenario To Blend With**. The range is 0 to 1, where 0 is fully the active Lighting Scenario, and 1 is fully the **Scenario To Blend With**. |
+| **Property** | **Description** |
+|---------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Number of Cells Blended Per Frame** | Determines the maximum number of cells Unity blends per frame. The default is 10,000. |
+| **Turnover Rate** | Set the blending priority of cells close to the camera. The range is 0 to 1, where 0 sets the cells close to the camera with high priority, and 1 sets all cells with equal priority. Increase **Turnover Rate** to avoid cells close to the camera blending too frequently. |
+| **Scenario To Blend With** | Select a Lighting Scenario to blend with the active Lighting Scenario. |
+| **Scenario Blending Factor** | Set how far to blend from the active Lighting Scenario to the **Scenario To Blend With**. The range is 0 to 1, where 0 is fully the active Lighting Scenario, and 1 is fully the **Scenario To Blend With**. |
@@ -555,130 +268,97 @@ The **Rendering** panel has tools that you can use to visualize various HDRP ren
Use the drop-down to select a rendering mode to display as an overlay on the screen.
-| Property | Description |
-|-|-|
-| **Motion Vectors** | Select this option to display motion vectors. Note that object motion vectors aren't visible in the Scene view. |
-| **World Space Position** | Select this option to display world space positions. |
-| **NaN Tracker** | Select this option to display an overlay that highlights [NaN](https://en.wikipedia.org/wiki/NaN) values. |
-| **ColorLog** | Select this option to display how the raw, log-encoded buffer looks before color grading takes place. |
-| **DepthOfFieldCoc** | Select this option to display the circle of confusion for the depth of field effect. The circle of confusion displays how much the depth of field effect blurs a given pixel/area. |
-| **Quad Overdraw** | Select this option to display an overlay that highlights gpu quads running multiple fragment shaders. This is mainly caused by small or thin triangles. Use LODs to reduce the amount of overdraw when objects are far away. (This mode is currently not supported on Metal and PS4). |
-| **Vertex Density** | Select this option to display an overlay that highlights pixels running multiple vertex shaders. A vertex can be run multiple times when part of different triangles. This helps finding models that need LODs. (This mode is currently not supported on Metal). |
-| **TransparencyOverdraw** | Select this option to view the number of transparent pixels that draw over one another. This represents the amount of on-screen overlapping of transparent pixel. This is useful to see the amount of pixel overdraw for transparent GameObjects from different points of view in the Scene. This debug option displays each pixel as a heat map going from black (which represents no transparent pixels) through blue to red (at which there are **Max Pixel Cost** number of transparent pixels). |
-| **RequestedVirtualTextureTiles** | Select this option to display what texture tile each pixel uses. Pixels that this debug view renders with the same color request the same texture tile to be streamed into video memory by the streaming virtual texturing system. This debug view is useful to see which areas of the screen use textures that the virtual texturing system steams into video memory. It can help to identify issues with the virtual texture streaming system. |
-| **LensFlareScreenSpace** | Display the lens flares that the [Screen Space Lens Flare](shared/lens-flare/Override-Screen-Space-Lens-Flare.html) override generates. |
-| **Compute Thickness** | Select this option to display thickness for each layer selected in the current HDRP Asset and configure the following properties: • **Layer Mask**: Set the layer number to visualize in the debug view. • **Show Overlap Count**: Highlight the triangles that intersect for each pixel. • **Thickness Scale**: Set the range (in meters) of the ComputeThickness debug view. When you enable Show Overlap Count, this setting affects the Overlap Count debug view. For more information on how to debug compute thickness, refer to [Sample and use material thickness](Compute-Thickness.md). |
-| **Max Pixel Cost** | The scale of the transparency overdraw heat map. For example, a value of 10 displays a red pixel if 10 transparent pixels overlap. Any number of overdraw above this value also displays as red. This property only appears if you set **Fullscreen Debug Mode** to **TransparencyOverdraw**. |
-| **High Quality Lines** | Select this option to view underlying data used by tile-based software rasterizer for the [High Quality Line Rendering](Override-High-Quality-Lines.md) feature.
**Segments per Tile** displays a heatmap representing the number of segments in each tile.
**Tile Processor UV** displays the uv coordinate for each tile.
**Cluster Depth** displays segments based on their depth in the cluster structure that's used for transparent sorting.
|
+| **Property** | **Description** |
+|----------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Motion Vectors** | Select this option to display motion vectors. Note that object motion vectors aren't visible in the Scene view. |
+| **World Space Position** | Select this option to display world space positions. |
+| **NaN Tracker** | Select this option to display an overlay that highlights [NaN](https://en.wikipedia.org/wiki/NaN) values. |
+| **ColorLog** | Select this option to display how the raw, log-encoded buffer looks before color grading takes place. |
+| **DepthOfFieldCoc** | Select this option to display the circle of confusion for the depth of field effect. The circle of confusion displays how much the depth of field effect blurs a given pixel/area. |
+| **Quad Overdraw** | Select this option to display an overlay that highlights gpu quads running multiple fragment shaders. This is mainly caused by small or thin triangles. Use LODs to reduce the amount of overdraw when objects are far away. (This mode is currently not supported on Metal and PS4). |
+| **Vertex Density** | Select this option to display an overlay that highlights pixels running multiple vertex shaders. A vertex can be run multiple times when part of different triangles. This helps finding models that need LODs. (This mode is currently not supported on Metal). |
+| **TransparencyOverdraw** | Select this option to view the number of transparent pixels that draw over one another. This represents the amount of on-screen overlapping of transparent pixel. This is useful to see the amount of pixel overdraw for transparent GameObjects from different points of view in the Scene. This debug option displays each pixel as a heat map going from black (which represents no transparent pixels) through blue to red (at which there are **Max Pixel Cost** number of transparent pixels). |
+| **RequestedVirtualTextureTiles** | Select this option to display what texture tile each pixel uses. Pixels that this debug view renders with the same color request the same texture tile to be streamed into video memory by the streaming virtual texturing system. This debug view is useful to see which areas of the screen use textures that the virtual texturing system steams into video memory. It can help to identify issues with the virtual texture streaming system. |
+| **LensFlareScreenSpace** | Display the lens flares that the [Screen Space Lens Flare](shared/lens-flare/Override-Screen-Space-Lens-Flare.html) override generates. |
+| **Compute Thickness** | Select this option to display thickness for each layer selected in the current HDRP Asset and configure the following properties:
Layer Mask: Set the layer number to visualize in the debug view.
Show Overlap Count: Highlight the triangles that intersect for each pixel.
Thickness Scale: Set the range (in meters) of the Compute Thickness debug view. When you enable Show Overlap Count, this setting affects the Overlap Count debug view.
|
+| **Max Pixel Cost** | The scale of the transparency overdraw heat map. For example, a value of 10 displays a red pixel if 10 transparent pixels overlap. Any number of overdraw above this value also displays as red. This property only appears if you set **Fullscreen Debug Mode** to **TransparencyOverdraw**. |
+| **High Quality Lines** | Select this option to view underlying data used by tile-based software rasterizer for the [High Quality Line Rendering](Override-High-Quality-Lines.md) feature.
**Segments per Tile** displays a heatmap representing the number of segments in each tile.
**Tile Processor UV** displays the uv coordinate for each tile.
**Cluster Depth** displays segments based on their depth in the cluster structure that's used for transparent sorting.
|
### Mipmap Streaming
-| **Property** | **Description** |
-|-|-|
-| **Disable Mip Caching** | If you enable **Disable Mip Caching**, Unity doesn't cache mipmap levels in GPU memory, and constantly discards mipmap levels from GPU memory when they're no longer needed. This means the mipmap streaming debug views more accurately display which mipmap levels Unity uses at the current time. Enabling this setting increases the amount of data Unity transfers from disk to the CPU and the GPU. |
-| **Debug View** | Set a mipmap streaming debug view. Options:
**None**: Display the normal view.
**Mip Streaming Performance**: Use color to indicate which textures use mipmap streaming, and whether mipmap streaming limits the number of mipmap levels Unity loads.
**Mip Streaming Status**: Use color on materials to indicate whether their textures use mipmap streaming. Diagonal stripes mean some of the textures use a [`requestedMipmapLevel`](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Texture2D-requestedMipmapLevel.html) that overrides mipmap streaming. Yellow means Unity can't stream the texture, or the texture is assigned to terrain.
**Mip Streaming Activity**: Use color to indicate whether Unity recently streamed the textures.
**Mip Streaming Priority**: Use color to indicate the streaming priority of the textures. Set streaming priority for a texture in the [**Texture Import Settings** window](https://docs.unity3d.com/6000.0/Documentation/Manual/class-TextureImporter.html).
**Mip Count**: Display the number of mipmap levels Unity loads for the textures.
**Mip Ratio**: Use color to indicate the pixel density of the highest-resolution mipmap levels Unity uploads for the textures.
|
-| **Debug Opacity** | Set the opacity of the **Debug View** you select. 0 means not visible and 1 means fully visible. This property is visible only if **Debug View** is not set to **None**. |
-| **Combined Per Material** | Set the **Debug View** to display debug information of all the textures on a material, not individual texture slots. This property is only visible if **Debug View** is set to **Mip Streaming Status** or **Mip Streaming Activity**. |
-| **Material Texture Slot** | Set which texture Unity uses from each material to display debug information. For example, set **Material Texture Slot** to **Slot 3** to display debug information for the fourth texture. If a material has fewer textures than the **Material Texture Slot** value, Unity uses no texture. This property is visible only if **Combined Per Material** is disabled, and **Debug View** is not set to **None**. |
-| **Display Status Codes** | Display more detailed statuses for textures that display as **Not streaming** or **Warning** in the **Mip Streaming Status** debug view. This property is visible only if **Debug View** is set to **Mip Streaming Status**. |
-| **Activity Timespan** | Set how long a texture displays as **Just streamed**, in seconds. This property is visible only if **Debug View** is set to **Mip Streaming Activity**. |
-| **Terrain Texture** | Set which terrain texture Unity displays. You can select either **Control** for the control texture, or one of the diffuse textures. This property is visible only if **Debug View** is not set to **None**. |
+| **Property** | **Description** |
+|---------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Disable Mip Caching** | If you enable **Disable Mip Caching**, Unity doesn't cache mipmap levels in GPU memory, and constantly discards mipmap levels from GPU memory when they're no longer needed. This means the mipmap streaming debug views more accurately display which mipmap levels Unity uses at the current time. Enabling this setting increases the amount of data Unity transfers from disk to the CPU and the GPU. |
+| **Debug View** | Set a mipmap streaming debug view. Options:
**None**: Display the normal view.
**Mip Streaming Performance**: Use color to indicate which textures use mipmap streaming, and whether mipmap streaming limits the number of mipmap levels Unity loads.
**Mip Streaming Status**: Use color on materials to indicate whether their textures use mipmap streaming. Diagonal stripes mean some of the textures use a [`requestedMipmapLevel`](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Texture2D-requestedMipmapLevel.html) that overrides mipmap streaming. Yellow means Unity can't stream the texture, or the texture is assigned to terrain.
**Mip Streaming Activity**: Use color to indicate whether Unity recently streamed the textures.
**Mip Streaming Priority**: Use color to indicate the streaming priority of the textures. Set streaming priority for a texture in the [**Texture Import Settings** window](https://docs.unity3d.com/6000.0/Documentation/Manual/class-TextureImporter.html).
**Mip Count**: Display the number of mipmap levels Unity loads for the textures.
**Mip Ratio**: Use color to indicate the pixel density of the highest-resolution mipmap levels Unity uploads for the textures.
|
+| **Debug Opacity** | Set the opacity of the **Debug View** you select. 0 means not visible and 1 means fully visible. This property is visible only if **Debug View** is not set to **None**. |
+| **Combined Per Material** | Set the **Debug View** to display debug information of all the textures on a material, not individual texture slots. This property is only visible if **Debug View** is set to **Mip Streaming Status** or **Mip Streaming Activity**. |
+| **Material Texture Slot** | Set which texture Unity uses from each material to display debug information. For example, set **Material Texture Slot** to **Slot 3** to display debug information for the fourth texture. If a material has fewer textures than the **Material Texture Slot** value, Unity uses no texture. This property is visible only if **Combined Per Material** is disabled, and **Debug View** is not set to **None**. |
+| **Display Status Codes** | Display more detailed statuses for textures that display as **Not streaming** or **Warning** in the **Mip Streaming Status** debug view. This property is visible only if **Debug View** is set to **Mip Streaming Status**. |
+| **Activity Timespan** | Set how long a texture displays as **Just streamed**, in seconds. This property is visible only if **Debug View** is set to **Mip Streaming Activity**. |
+| **Terrain Texture** | Set which terrain texture Unity displays. You can select either **Control** for the control texture, or one of the diffuse textures. This property is visible only if **Debug View** is not set to **None**. |
### Color Picker
The **Color Picker** works with whichever debug mode HDRP displays at the time. This means that you can see the values of various components of the rendering like Albedo or Diffuse Lighting. By default, this displays the value of the main High Dynamic Range (HDR) color buffer.
-| **Property** | **Description** |
-| --------------------- | ------------------------------------------------------------ |
-| **Debug Mode** | Use the drop-down to select the format of the color picker display. |
+| **Property** | **Description** |
+|----------------|-------------------------------------------------------------------------------------------------|
+| **Debug Mode** | Use the drop-down to select the format of the color picker display. |
| **Font Color** | Use the color picker to select a color for the font that the Color Picker uses for its display. |
### False Color Mode and Freeze Camera For Culling
-
-
-
-
Property
-
Description
-
-
-
-
-
False Color Mode
-
Enable the checkbox to define intensity ranges that the debugger uses to display a color temperature gradient for the current frame. The color temperature gradient goes from blue, to green, to yellow, to red.
-
-
-
Range Threshold 0
-
Set the first split for the intensity range. This property only appears when you enable the False Color Mode checkbox.
-
-
-
Range Threshold 1
-
Set the second split for the intensity range. This property only appears when you enable the False Color Mode checkbox.
-
-
-
Range Threshold 2
-
Set the third split for the intensity range. This property only appears when you enable the False Color Mode checkbox.
-
-
-
Range Threshold 3
-
Set the final split for the intensity range. This property only appears when you enable the False Color Mode checkbox.
-
-
-
MSAA Samples
-
Use the drop-down to select the number of samples the debugger uses for MSAA.
-
-
-
Freeze Camera for Culling
-
Use the drop-down to select a Camera to freeze to check its culling. To check if the Camera's culling works correctly, freeze the Camera and move occluders around it.
-
-
-
+| **Property** | **Description** |
+|-------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **False Color Mode** | Enable the checkbox to define intensity ranges that the debugger uses to display a color temperature gradient for the current frame. The color temperature gradient goes from blue, to green, to yellow, to red.
Range Threshold 0: Set the first split for the intensity range. This property only appears when you enable the False Color Mode checkbox.
Range Threshold 1: Set the second split for the intensity range. This property only appears when you enable the False Color Mode checkbox.
Range Threshold 2: Set the third split for the intensity range. This property only appears when you enable the False Color Mode checkbox.
Range Threshold 3: Set the final split for the intensity range. This property only appears when you enable the False Color Mode checkbox.
|
+| **MSAA Samples** | Use the drop-down to select the number of samples the debugger uses for MSAA. |
+| **Freeze Camera for Culling** | Use the drop-down to select a Camera to freeze to check its culling. To check if the Camera's culling works correctly, freeze the Camera and move occluders around it. |
### Color Monitors
The **Color monitors** are a set of industry-standard monitors to help artists control the overall look and exposure of a scene.
-| **Property** | **Description** |
-|------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| **Waveform** | Displays the full range of luma (brightness) information in the Camera’s output. The horizontal axis of the graph corresponds to the render (from left to right) and the vertical axis indicates the brightness value. |
-| **Exposure** | Determines the exposure multiplier HDRP applies to the waveform values.
This property only appears when you enable the **Waveform** checkbox. |
-| **Parade mode** | Splits the image into red, green and blue separately. You can use this to visualise the RGB balance of the Camera's image. This helps you to see large offsets in one particular channel, or to determine if GameObjects are true black or true white. A true black, white, or grey GameObject has equal values across all channels.
This property only appears when you enable the **Waveform** checkbox. |
-| **Vectorscope** | The Vectorscope monitor measures the overall range of hue and saturation within the Camera’s image in real-time. To display the data, it uses a scatter graph relative to the center of the Vectorscope.
The Vectorscope measures hue values between yellow, red, magenta, blue, cyan and green. The center of the Vectorscope represents absolute zero saturation and the edges represent the highest level of saturation. To determine the hues in your scene and their saturation, look at the distribution of the Vectorscope’s scatter graph. To identify whether there is a color imbalance in the image, look at how close the middle of the Vectorscope graph is to the absolute center. If the Vectorscope graph is off-center, this indicates that there is a color cast (tint) in the image. |
-| **Exposure** | Determines the exposure multiplier HDRP applies to the vectorscope values.
This property only appears when you enable the **Vectorscope** checkbox. |
-| **Size** | The size ratio of the color monitors. |
-
-| **Property** | **Description** |
-| --- | --- |
-| **Clear Render Targets At Creation** | Clears render textures the first time the render graph system uses them. |
-| **Clear Render Targets When Freed** | Clears render textures when they're no longer used by render graph. |
-| **Disable Pass Culling** | Disables HDRP culling render passes that have no impact on the final render. |
-| **Disable Pass Merging** | Disables HDRP merging render passes. |
-| **Immediate Mode** | Enables the render graph system evaluating passes immediately after it creates them. |
-| **Enable Logging** | Enables logging to the **Console** window. |
-| **Log Frame Information** | Logs how HDRP uses the resources during the frame, in the **Console** window. |
-| **Log Resources** | Logs the resources HDRP uses during the frame, in the **Console** window. |
+| **Property** | **Description** |
+|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Waveform** | Displays the full range of luma (brightness) information in the Camera’s output. The horizontal axis of the graph corresponds to the render (from left to right) and the vertical axis indicates the brightness value. |
+| **Exposure** | Determines the exposure multiplier HDRP applies to the waveform values. This property only appears when you enable the **Waveform** checkbox. |
+| **Parade mode** | Splits the image into red, green and blue separately. You can use this to visualise the RGB balance of the Camera's image. This helps you to see large offsets in one particular channel, or to determine if GameObjects are true black or true white. A true black, white, or grey GameObject has equal values across all channels. This property only appears when you enable the **Waveform** checkbox. |
+| **Vectorscope** | The Vectorscope monitor measures the overall range of hue and saturation within the Camera’s image in real-time. To display the data, it uses a scatter graph relative to the center of the Vectorscope. The Vectorscope measures hue values between yellow, red, magenta, blue, cyan and green. The center of the Vectorscope represents absolute zero saturation and the edges represent the highest level of saturation. To determine the hues in your scene and their saturation, look at the distribution of the Vectorscope’s scatter graph. To identify whether there is a color imbalance in the image, look at how close the middle of the Vectorscope graph is to the absolute center. If the Vectorscope graph is off-center, this indicates that there is a color cast (tint) in the image. |
+| **Exposure** | Determines the exposure multiplier HDRP applies to the vectorscope values. This property only appears when you enable the **Vectorscope** checkbox. |
+| **Size** | The size ratio of the color monitors. |
+
+| **Property** | **Description** |
+|--------------------------------------|--------------------------------------------------------------------------------------|
+| **Clear Render Targets At Creation** | Clears render textures the first time the render graph system uses them. |
+| **Clear Render Targets When Freed** | Clears render textures when they're no longer used by render graph. |
+| **Disable Pass Culling** | Disables HDRP culling render passes that have no impact on the final render. |
+| **Disable Pass Merging** | Disables HDRP merging render passes. |
+| **Immediate Mode** | Enables the render graph system evaluating passes immediately after it creates them. |
+| **Enable Logging** | Enables logging to the **Console** window. |
+| **Log Frame Information** | Logs how HDRP uses the resources during the frame, in the **Console** window. |
+| **Log Resources** | Logs the resources HDRP uses during the frame, in the **Console** window. |
The **NVIDIA device debug view** is a panel that displays a list of the current feature states of NVIDIA Deep Learning Super Sampling (DLSS). Each row represents an active screen in which DLSS is running.
-| **Information** | **Description** |
-| ------------------------ | ------------------ |
-| **NVUnityPlugin Version**| Displays the current internal version id of the NVIDIA Unity Plugin that interacts with DLSS. |
-| **NGX API Version** | Displays the actual version which DLSS operates on. |
-| **Device Status** | Displays the current status of the NVIDIA driver. If an internal error occurred when initializing the driver, Unity displays the error here. |
-| **DLSS Supported** | Displays **True** if your project supports DLSS at runtime. Otherwise, displays **False**. |
-| **DLSS Slot ID** | Displays an internal ID for the particular DLSS view being displayed. |
-| **Status** | Displays whether the view is **valid** or **invalid**. A view is invalid if there is an internal error, or if the Scriptable Render Pipeline passes incorrect parameters. |
-| **Input resolution** | Displays the current input resolution. Unity calculates this from the screen percentage specified for dynamic resolution scaling. |
-| **Output resolution** | Displays the target resolution for this particular DLSS view. |
-| **Quality** | Displays the quality selected for this particular DLSS view. |
+| **Information** | **Description** |
+|---------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **NVUnityPlugin Version** | Displays the current internal version id of the NVIDIA Unity Plugin that interacts with DLSS. |
+| **NGX API Version** | Displays the actual version which DLSS operates on. |
+| **Device Status** | Displays the current status of the NVIDIA driver. If an internal error occurred when initializing the driver, Unity displays the error here. |
+| **DLSS Supported** | Displays **True** if your project supports DLSS at runtime. Otherwise, displays **False**. |
+| **DLSS Slot ID** | Displays an internal ID for the particular DLSS view being displayed. |
+| **Status** | Displays whether the view is **valid** or **invalid**. A view is invalid if there is an internal error, or if the Scriptable Render Pipeline passes incorrect parameters. |
+| **Input resolution** | Displays the current input resolution. Unity calculates this from the screen percentage specified for dynamic resolution scaling. |
+| **Output resolution** | Displays the target resolution for this particular DLSS view. |
+| **Quality** | Displays the quality selected for this particular DLSS view. |
The **History Buffers view** lets you display various render pipeline full screen buffers that persist across multiple frames.
-| **Property** | **Description** |
-| ------------------ | ------------------------------------------------------------ |
-| **Buffer** | Choose the history buffer to visualize as a full screen output. |
+| **Property** | **Description** |
+|--------------------|-----------------------------------------------------------------------------------------------------------------------|
+| **Buffer** | Choose the history buffer to visualize as a full screen output. |
| **Frame Index** | Choose what frame version of the history buffer to visualize. Certain buffers only have a limited number of versions. |
-| **Apply Exposure** | Enable exposure correction of the buffer. It is only available for certain history buffers. |
+| **Apply Exposure** | Enable exposure correction of the buffer. It is only available for certain history buffers. |
@@ -690,12 +370,12 @@ In the **Rendering Debugger**, each active Camera in the Scene has its own debug
The following columns are available for each Frame Setting:
-| **Column** | **Description** |
-| -------------- | ------------------------------------------------------------ |
-| **Debug** | Displays Frame Setting values you can modify for the selected Camera. You can use these to temporarily alter the Camera’s Frame Settings for debugging purposes. You can't enable Frame Setting features that your HDRP Asset doesn't support. |
-| **Sanitized** | Displays the Frame Setting values that the selected Camera uses after Unity checks to see if your HDRP Asset supports them. |
-| **Overridden** | Displays the Frame Setting values that the selected Camera overrides. If you don't check the **Custom Frame Settings** checkbox, check it and don't override any settings, this column is identical to the **Default** column. |
-| **Default** | Displays the default Frame Setting values in your current [HDRP Asset](HDRP-Asset.md). |
+| **Column** | **Description** |
+|----------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Debug** | Displays Frame Setting values you can modify for the selected Camera. You can use these to temporarily alter the Camera’s Frame Settings for debugging purposes. You can't enable Frame Setting features that your HDRP Asset doesn't support. |
+| **Sanitized** | Displays the Frame Setting values that the selected Camera uses after Unity checks to see if your HDRP Asset supports them. |
+| **Overridden** | Displays the Frame Setting values that the selected Camera overrides. If you don't check the **Custom Frame Settings** checkbox, check it and don't override any settings, this column is identical to the **Default** column. |
+| **Default** | Displays the default Frame Setting values in your current [HDRP Asset](HDRP-Asset.md). |
Unity processes **Sanitized**, **Overridden**, and **Default** in a specific order:
@@ -706,7 +386,7 @@ Unity processes **Sanitized**, **Overridden**, and **Default** in a specific ord
### Interpreting the Camera window
-
+
- In the image above, **Ray Tracing** is disabled at the **Sanitized** step, but enabled at the **Default** and **Overridden** steps. This means that, although **Ray Tracing** is enabled in the Frame Settings this Camera uses, it's not enabled in the HDRP Asset’s **Render Pipeline Supported Features**.
- Also in the image above, **Decals** is disabled at the **Overridden** step, but enabled at the **Default** step. This means that **Decals** is enabled in the default Camera Frame Settings but disabled for that specific Camera’s **Custom Frame Settings**.
@@ -717,10 +397,10 @@ Unity processes **Sanitized**, **Overridden**, and **Default** in a specific ord
You can use the **Virtual Texturing** panel to visualize [Streaming Virtual Texturing](https://docs.unity3d.com/Manual/svt-streaming-virtual-texturing.html).
-| **Property** | **Description** |
-| ------------------------------------ | ------------------------------------------------------------- |
-| **Debug disable Feedback Streaming** | Deactivate Streaming Virtual Texturing to quickly assess its cost in performance and memory at runtime. |
-| **Textures with Preloaded Mips** | Display the total number of virtual textures Unity has loaded into the scene. Unity tries to preload the least detailed mipmap level (least being 128x128) into GPU memory. This number increases every time a material is loaded. |
+| **Property** | **Description** |
+|--------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Debug disable Feedback Streaming** | Deactivate Streaming Virtual Texturing to quickly assess its cost in performance and memory at runtime. |
+| **Textures with Preloaded Mips** | Display the total number of virtual textures Unity has loaded into the scene. Unity tries to preload the least detailed mipmap level (least being 128x128) into GPU memory. This number increases every time a material is loaded. |
@@ -730,53 +410,54 @@ The properties in this section let you visualize settings that [reduce rendering
### Occlusion Culling
-|**Property**|**Sub-property**|**Description**|
-|-|-|-|
-| **Occlusion Test Overlay** || Display a heatmap of culled instances. The heatmap displays blue if there are few culled instances, through to red if there are many culled instances. If you enable this setting, culling might be slower. |
-| **Occlusion Test Overlay Count Visible** || Display a heatmap of instances that Unity doesn't cull. The heatmap displays blue if there are many culled instances, through to red if there are few culled instances. This setting only has an effect if you enable **Occlusion Test Overlay**. |
-| **Override Occlusion Test To Always Pass** || Set occluded objects as unoccluded. This setting affects both the Rendering Debugger and the scene. |
-| **Occluder Context Stats** || Display the [**Occlusion Context Stats**](#occlusion-context-stats) section. |
-| **Occluder Debug View** || Display an overlay with the occlusion textures and mipmaps Unity generates. |
-|| **Occluder Debug View Index** | Set the occlusion texture to display. |
-|| **Occluder Debug View Range Min** | Set the brightness of the minimum depth value. Increase this value to brighten objects that are far away from the view. |
-|| **Occluder Debug View Range Max** | Set the brightness of the maximum depth value. Decrease this value to darken objects that are close to the view. |
-
-
+| **Property** | **Sub-property** | **Description** |
+|--------------------------------------------|-----------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Occlusion Test Overlay** | N/A | Display a heatmap of culled instances. The heatmap displays blue if there are few culled instances, through to red if there are many culled instances. If you enable this setting, culling might be slower. |
+| **Occlusion Test Overlay Count Visible** | N/A | Display a heatmap of instances that Unity doesn't cull. The heatmap displays blue if there are many culled instances, through to red if there are few culled instances. This setting only has an effect if you enable **Occlusion Test Overlay**. |
+| **Override Occlusion Test To Always Pass** | N/A | Set occluded objects as unoccluded. This setting affects both the Rendering Debugger and the scene. |
+| **Occluder Context Stats** | N/A | Display the [**Occlusion Context Stats**](#occlusion-context-stats) section. |
+| **Occluder Debug View** | N/A | Display an overlay with the occlusion textures and mipmaps Unity generates. |
+| **Occluder Debug View** | **Occluder Debug View Index** | Set the occlusion texture to display. |
+| **Occluder Debug View** | **Occluder Debug View Range Min** | Set the brightness of the minimum depth value. Increase this value to brighten objects that are far away from the view. |
+| **Occluder Debug View** | **Occluder Debug View Range Max** | Set the brightness of the maximum depth value. Decrease this value to darken objects that are close to the view. |
+
+
+
The Rendering Debugger with **Occlusion Test Overlay** enabled. The red areas are where Unity culls many objects. The blue area is where Unity culls few objects.
-
+
+
The Rendering Debugger with **Occluder Debug View** enabled. The overlay displays each mipmap level of the occlusion texture.
### Occlusion Context Stats
The **Occlusion Context Stats** section lists the occlusion textures Unity generates.
-|**Property**|**Description**|
-|-|-|
-| **Active Occlusion Contexts** | The number of occlusion textures. |
-| **View Instance ID** | The instance ID of the camera Unity renders the view from, to create the occlusion texture. |
-| **Subview Count** | The number of subviews. The value might be 2 or more if you use XR. |
-| **Size Per Subview** | The size of the subview texture in bytes. |
+| **Property** | **Description** |
+|-------------------------------|---------------------------------------------------------------------------------------------|
+| **Active Occlusion Contexts** | The number of occlusion textures. |
+| **View Instance ID** | The instance ID of the camera Unity renders the view from, to create the occlusion texture. |
+| **Subview Count** | The number of subviews. The value might be 2 or more if you use XR. |
+| **Size Per Subview** | The size of the subview texture in bytes. |
### GPU Resident Drawer Settings
-|**Section**|**Property**|**Sub-property**|**Description**|
-|-|-|-|-|
-|**Display Culling Stats**|||Display information about the cameras Unity uses to create occlusion textures.|
-|**Instance Culler Stats**||||
-||**View Count**|| The number of views Unity uses for GPU culling. Unity uses one view per shadow cascade or shadow map. For example, Unity uses three views for a Directional Light that generates three shadow cascades. |
-||**Per View Stats**|||
-|||**View Type**| The object or shadow split Unity renders the view from. |
-|||**View Instance ID**| The instance ID of the camera or light Unity renders the view from. |
-|||**Split Index**| The shadow split index value. This value is 0 if the object doesn't have shadow splits. |
-|||**Visible Instances**| How many objects are visible in this split. |
-|||**Draw Commands**| How many draw commands Unity uses for this split. |
-|**Occlusion Culling Events**||||
-||**View Instance ID**|| The instance ID of the camera Unity renders the view from. |
-||**Event type**|| The type of render pass.
**OccluderUpdate**
The GPU samples the depth buffer and creates a new occlusion texture and its mipmap.
**OcclusionTest**
The GPU tests all the instances against the occlusion texture.
|
-||**Occluder Version**|| How many times Unity updates the occlusion texture in this frame. |
-||**Subview Mask**|| A bitmask that represents which subviews are affected in this frame. |
-||**Occlusion Test**|| Which test the GPU runs against the occlusion texture.
**TestNone**
Unity found no occluders, so all instances are visible.
**TestAll**: Unity tests all instances against the occlusion texture.
**TestCulled**: Unity tests only instances that the previous **TestAll** test culled.
|
-||**Visible Instances**|| The number of visible instances after occlusion culling. |
-||**Culled Instances**|| The number of culled instances after occlusion culling. |
-
+| **Section** | **Property** | **Sub-property** | **Description** |
+|------------------------------|-----------------------|-----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Display Culling Stats** | N/A | N/A | Display information about the cameras Unity uses to create occlusion textures. |
+| **Instance Culler Stats** | N/A | N/A | |
+| **Instance Culler Stats** | **View Count** | N/A | The number of views Unity uses for GPU culling. Unity uses one view per shadow cascade or shadow map. For example, Unity uses three views for a Directional Light that generates three shadow cascades. |
+| **Instance Culler Stats** | **Per View Stats** | N/A | |
+| **Instance Culler Stats** | N/A | **View Type** | The object or shadow split Unity renders the view from. |
+| **Instance Culler Stats** | N/A | **View Instance ID** | The instance ID of the camera or light Unity renders the view from. |
+| **Instance Culler Stats** | N/A | **Split Index** | The shadow split index value. This value is 0 if the object doesn't have shadow splits. |
+| **Instance Culler Stats** | N/A | **Visible Instances** | How many objects are visible in this split. |
+| **Instance Culler Stats** | N/A | **Draw Commands** | How many draw commands Unity uses for this split. |
+| **Occlusion Culling Events** | N/A | N/A | |
+| **Occlusion Culling Events** | **View Instance ID** | N/A | The instance ID of the camera Unity renders the view from. |
+| **Occlusion Culling Events** | **Event type** | N/A | The type of render pass.
**OccluderUpdate**
The GPU samples the depth buffer and creates a new occlusion texture and its mipmap.
**OcclusionTest**
The GPU tests all the instances against the occlusion texture.
|
+| **Occlusion Culling Events** | **Occluder Version** | N/A | How many times Unity updates the occlusion texture in this frame. |
+| **Occlusion Culling Events** | **Subview Mask** | N/A | A bitmask that represents which subviews are affected in this frame. |
+| **Occlusion Culling Events** | **Occlusion Test** | N/A | Which test the GPU runs against the occlusion texture.
**TestNone**
Unity found no occluders, so all instances are visible.
**TestAll**: Unity tests all instances against the occlusion texture.
**TestCulled**: Unity tests only instances that the previous **TestAll** test culled.
|
+| **Occlusion Culling Events** | **Visible Instances** | N/A | The number of visible instances after occlusion culling. |
+| **Occlusion Culling Events** | **Culled Instances** | N/A | The number of culled instances after occlusion culling. |
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/shadows-visualize-and-adjust.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/shadows-visualize-and-adjust.md
index 90a10986003..89986a1f7fe 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/shadows-visualize-and-adjust.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/shadows-visualize-and-adjust.md
@@ -9,9 +9,9 @@ In the Inspector, use the **Cascade Splits** bar to see the size of each cascade
In the Scene view and the Game view, the cascade visualization feature allows you to see the boundaries of each cascade in your Scene. Each color represents a separate cascade, and the colors match those in the **Cascade Splits** bar. This allows you to see which colored area matches which cascade.
-
+
To enable the cascade visualization feature, select **Show Cascades** at the top of the list of **Shadows** properties. You can now see the shadow maps in the Scene view and the Game view.
- You can use the Scene view Camera to move around your Scene and quickly visualize the shadow maps of different areas.
-- You can use the Game view Camera to visualize the shadow maps from the point of view of the end user. You can use the **Show Cascades** feature while in Play Mode, which is useful if you have some method of controlling the Camera’s position and rotation and want to see the shadow maps from different points of view in your Project.
\ No newline at end of file
+- You can use the Game view Camera to visualize the shadow maps from the point of view of the end user. You can use the **Show Cascades** feature while in Play Mode, which is useful if you have some method of controlling the Camera’s position and rotation and want to see the shadow maps from different points of view in your Project.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/Override-Screen-Space-Lens-Flare.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/Override-Screen-Space-Lens-Flare.md
index 2b0202333e4..ce4dd763fb4 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/Override-Screen-Space-Lens-Flare.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/Override-Screen-Space-Lens-Flare.md
@@ -1,6 +1,6 @@
# Add screen space lens flares
-
+
The **Screen Space Lens Flare** override adds lens flares to your scene.
@@ -29,7 +29,8 @@ You can create the following types of lens flare:
You can control which types of flares appear and how many there are. You can also control the chromatic aberration effect HDRP adds to the flares.
-
+
+
The left image shows an emissive cube with bloom but no lens flares. The right image shows the same cube and a regular flare (top-left), a reversed flare (bottom-right), a warped flare (top-right) and streaks (to the left and right of the cube).
## Enable screen space lens flares
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare-component.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare-component.md
index b78e3fa3881..f0d7e9a31c9 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare-component.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare-component.md
@@ -1,11 +1,9 @@
# Add lens flares
-
+
Unity’s Scriptable Render Pipeline (SRP) includes the Lens Flare (SRP) component which renders a lens flare in your scene. This is the SRP equivalent of the Built-in Render Pipeline's [Lens Flare](https://docs.unity3d.com/Manual/class-LensFlare.html) component, which is incompatible with SRPs. You can attach a Lens Flare (SRP) component to any GameObject, but some properties only appear when you attach a Lens Flare (SRP) component to a light.
-
-
Use the Lens Flare (SRP) component to create lens flares for lights that have specific locations in your scene, for example bright bulbs. You can also create lens flares using the [Screen Space Lens Flare](Override-Screen-Space-Lens-Flare.md) volume override, or use both the Lens Flare (SRP) component and the Screen Space Lens Flare override in the same scene.
## Create a lens flare in SRP
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/simulating-currents-with-water-decals.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/simulating-currents-with-water-decals.md
index 8c860962b1a..e3c314e2fd5 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/simulating-currents-with-water-decals.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/simulating-currents-with-water-decals.md
@@ -7,7 +7,7 @@ A water decal is a shader graph Master Stack. It's applied in world space, allow
By default, water decal regions are anchored to the camera. You can also anchor them to a GameObject.
> [!NOTE]
-> For backward compatibility, water decals are disabled by default.
+> For backward compatibility, [water masks and current water decals](enable-mask-and-current-water-decals.md) are disabled by default.
## Create a water decal
@@ -23,9 +23,8 @@ By default, the water decal shader graph Master Stack contains the following pro
- **SurfaceFoam**
- **DeepFoam**
-Once you have [enabled mask and current water decals](enable-mask-and-water-decals.md), you can add the following water features through the Graph Inspector:
+Once you have [enabled water mask and current water decals](enable-mask-and-current-water-decals.md), you can add the following water features through the Graph Inspector:
-- **HorizontalDeformation**.
- **SimulationMask**
- **SimulationFoamMask**
- **LargeCurrent**
@@ -39,8 +38,9 @@ To enable horizontal deformation, go to the active [HDRP Asset](hdrp-asset.md),
Enabling horizontal deformation has the following effects:
+- You can add a new **HorizontalDeformation** feature in the Graph Inspector of a water decal shader graph.
- HDRP creates a new buffer, which increases the amount of memory HDRP uses.
-- The results of water scripts and [underwater effects](water-underwater-view.md) might be less accurate.
+- The results of water scripts and [underwater effects](water-underwater-view.md) and [script interactions](float-objects-on-a-water-surface.md)might be less accurate.
## Additional resources
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/skin-and-diffusive-surfaces-subsurface-scattering.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/skin-and-diffusive-surfaces-subsurface-scattering.md
index 8782d61bcdf..b2680f20e4a 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/skin-and-diffusive-surfaces-subsurface-scattering.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/skin-and-diffusive-surfaces-subsurface-scattering.md
@@ -51,7 +51,7 @@ To add subsurface scattering to a Material:
The following image displays grass in an environment scene. In the left image the grass renders correctly. The grass in the right image has the bright green tint that HDRP applies to a Material that doesn't have a valid diffusion profile:
-
+
The Material appears bright green in the following cases:
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/fuzz-map.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/fuzz-map.md
index b87a141fffd..5b14ddf4efa 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/fuzz-map.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/fuzz-map.md
@@ -1,4 +1,4 @@
# Fuzz Maps
The fabric shaders can use a fuzz map to provide additional fuzz detail to the surface. A fuzz map is a single-channel texture. To apply it, the fabric shader mixes it in with the base color map to produce a final color used for shading the fabric material.
-
+
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/shader-properties/general/emission-global-illumination.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/shader-properties/general/emission-global-illumination.md
index b1dc13ca3d9..166fae00902 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/shader-properties/general/emission-global-illumination.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/shader-properties/general/emission-global-illumination.md
@@ -1,5 +1,5 @@
-
+
Emission
Global Illumination
The mode HDRP uses to determine how color emission interacts with global illumination. • Realtime: Select this option to make emission affect the result of real-time global illumination. • Baked: Select this option to make emission only affect global illumination during the baking process. • None: Select this option to make emission not affect global illumination.
Enable the checkbox to tell HDRP to render Meshes with the same geometry and Material in one batch when possible. This makes rendering faster. HDRP can't render Meshes in one batch if they have different Materials, or if the hardware doesn't support GPU instancing. For example, you can't static-batch GameObjects that have an animation based on the object pivot, but the GPU can instance them.
Enable the checkbox to make HDRP write motion vectors for GameObjects that use vertex animation. This removes the ghosting that vertex animation can cause.
Specifies the method HDRP uses to blend the color of each pixel of the material with the background pixels. The options are: • Alpha: Uses the Material’s alpha value to change how transparent an object is. 0 is fully transparent. 1 appears fully opaque, but the Material is still rendered during the Transparent render pass. This is useful for visuals that you want to be fully visible but to also fade over time, like clouds. • Additive: Adds the Material’s RGB values to the background color. The alpha channel of the Material modulates the intensity. A value of 0 adds nothing and a value of 1 adds 100% of the Material color to the background color. • Premultiply: Assumes that you have already multiplied the RGB values of the Material by the alpha channel. This gives better results than Alpha blending when filtering images or composing different layers. This property only appears if you set Surface Type to Transparent.
Specifies the face to cull for GameObjects that use this material. The options are: • Front: Culls the front face of the mesh. • Back: Culls the back face of the mesh. This property only appears if you disable Double Sided.
Determines how HDRP handles a material with regards to Double Sided GI. When selecting Auto, Double-Sided GI is enabled if the material is Double-Sided; otherwise selecting On or Off respectively enables or disables double sided GI regardless of the material's Double-Sided option. When enabled, the lightmapper accounts for both sides of the geometry when calculating Global Illumination. Backfaces aren't rendered or added to lightmaps, but get treated as valid when seen from other objects. When using the Progressive Lightmapper backfaces bounce light using the same emission and albedo as frontfaces. (Currently this setting is only available when baking with the Progressive Lightmapper backend.).
Specifies the mode HDRP uses to calculate the normals for back facing geometry. • Flip: The normal of the back face is 180° of the front facing normal. This also applies to the Material which means that it looks the same on both sides of the geometry. • Mirror: The normal of the back face mirrors the front facing normal. This also applies to the Material which means that it inverts on the back face. This is useful when you want to keep the same shapes on both sides of the geometry, for example, for leaves. • None: The normal of the back face is the same as the front face. This property only appears if you enable Double-Sided.
Indicates whether to make alpha blending not reduce the intensity of specular highlights. This preserves the specular elements on the transparent surface, such as sunbeams shining off glass or water. This property only appears if you set Surface Type to Transparent.
Indicates whether fog affects the transparent surface. When disabled, HDRP doesn't take this material into account when it calculates the fog in the Scene.
Indicates whether HDRP includes this material when it processes the screen space reflection pass. This property only appears if you set Surface Type to Transparent.
Indicates whether HDRP includes this material when it processes the screen space reflection pass. This property only appears if you set Surface Type to Opaque.
Specifies the rendering pass that HDRP processes this material in. • Before Refraction: Draws the GameObject before the refraction pass. This means that HDRP includes this Material when it processes refraction. To expose this option, select Transparent from the Surface Type drop-down. • Default: Draws the GameObject in the default opaque or transparent rendering pass pass, depending on the Surface Type. • Low Resolution: Draws the GameObject in half resolution after the Default pass. • After post-process: For Unlit Materials only. Draws the GameObject after all post-processing effects.
Allows you to change the rendering order of overlaid transparent surfaces. For more information and an example of usage, see the Material sorting documentation. This property only appears if you set Surface Type to Transparent.
Specifies whether the material supports transparency or not. Materials with a Transparent Surface Type are more resource intensive to render than Materials with an Opaque Surface Type. Depending on the option you select, HDRP exposes more properties. The options are: • Opaque: • Transparent: Simulates a translucent Material that light can penetrate, such as clear plastic or glass. For more information about the feature and for the list of properties each Surface Type exposes, see the Surface Type documentation.
Indicates whether HDRP adds polygons from the transparent surface to the depth buffer to improve their sorting. HDRP performs this operation before the lighting pass and this process improves GPU performance.
Indicates whether HDRP writes motion vectors for transparent GameObjects that use this Material. This allows HDRP to process effects like motion blur for transparent objects. For more information on motion vectors, see the motion vectors documentation. This property only appears if you set Surface Type to Transparent.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/thread-map.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/thread-map.md
index aafce7bbf03..20eabef7627 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/thread-map.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/thread-map.md
@@ -2,7 +2,7 @@
The Fabric shaders can use a thread map for the high-frequency details that fabrics exhibit. This is similar to the [detail map](../Mask-Map-And-Detail-Map.md#DetailMap) found in HDRP's Lit shaders. The Thread Map input is channel-packed to optimise memory and is arranged in a way to optimise precision for the normal map. The Fabric Material Sample includes some pre-authored Thread Maps for you.
-
+
The format of the thread map texture:
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/tracing-modes.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/tracing-modes.md
index 9fe88000daa..9217a01f749 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/tracing-modes.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/tracing-modes.md
@@ -14,15 +14,15 @@ In Mixed tracing mode, HDRP processes screen-space ray marching in the GBuffer.
In Mixed tracing mode, HDRP still uses ray tracing for any geometry inside the ray tracing acceleration structure, regardless of whether vertex animation or decals modify the geometry's surface. This means if HDRP fails to intersect the on-screen deformed geometry, it intersects the original mesh inside in the ray tracing acceleration structure. This may cause visual discrepancies between what you see and what you expect. For example, the following Scene contains a cliff that uses mesh deformation.
-
+
In this Scene, Mixed mode can include reflections for the opaque leaf particles, the white decal, and GameObjects that aren't visible in the cliff face's non-deformed geometry.
-
+
Reflection rays intersect with the original, non-deformed cliff face geometry. This means the rays can still be affected by the bush behind the rock. To view the Scene from the perspective of the ray tracing mode, refer to the following image.
-
+
This image shows the elements of the Scene that ray tracing takes into account. The non-deformed cliff face geometry reveals the bushes behind the rocks.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/stacklit-material.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/stacklit-material.md
index fab5a3682fb..d35992eb024 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/stacklit-material.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/stacklit-material.md
@@ -2,7 +2,7 @@
The StackLit Master Stack can render materials that are more complex than the [Lit Master Stack](lit-master-stack-reference.md). It includes all the features available in the Lit shader and, sometimes, provides more advanced or higher quality versions. For example, it uses a more advanced form of specular occlusion and also calculates anisotropic reflections for area lights in the same way the Lit shader does for other light types. It also takes into account light interactions between two vertically stacked physical layers, along with a more complex looking general base layer.
-
+
## Creating a StackLit Shader Graph
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/terrain-lit-material.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/terrain-lit-material.md
index 0c28b4a30bc..9febcdbb874 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/terrain-lit-material.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/terrain-lit-material.md
@@ -2,7 +2,7 @@
The High Definition Render Pipeline (HDRP) uses the Terrain Lit Shader for Unity Terrain. This Shader is a simpler version of the [Lit Shader](lit-material.md). A Terrain can use a Terrain Lit Material with up to eight [Terrain Layers](https://docs.unity3d.com/Manual/class-TerrainLayer.html).
-
+
## Creating a Terrain Lit Material
@@ -22,7 +22,7 @@ To use a Terrain Lit Material, you must assign it to a Terrain:
1. View the Terrain in the Inspector window and select **Terrain Settings**.
2. Either drag and drop or use the radio button to assign your Terrain Lit Material to the **Material** property.
-
+
## Using the Paint Holes Tool
@@ -31,4 +31,3 @@ If you use the **Paint Holes** tool on your terrain, enable the **Terrain Hole**
1. Open your HDRP Asset in the Inspector window.
2. Go to **Rendering** and enable **Terrain Hole**.
-
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/understand-fabrics.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/understand-fabrics.md
index 87f80130891..1981c13cb45 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/understand-fabrics.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/understand-fabrics.md
@@ -6,7 +6,7 @@ The Cotton/Wool shader is your starting point for rendering diffuse fabrics in t
The type of fibers that make up the fabric, as well as the fabric's knit or weave, influence the appearance of the fabric. Natural fibers are typically rougher so they diffuse light.
-
+
The Cotton/Wool shader is a pre-configured Shader Graph. To learn more about the Cotton/Wool shader implementation, or to create your own Fabric shader variant, see the Shader Graph documentation about the [Fabric Master Stack](fabric-master-stack-reference.mdd).
@@ -18,7 +18,7 @@ The Silk shader is your starting point for rendering anisotropic fabrics in the
Silk and other synthetic fibers are usually smoother than natural fibres because they're produced as a single smooth filament. When these fibres are woven together, it produces a fabric with anisotropic specular highlights.
-
+
The Silk shader is a pre-configured Shader Graph. To learn more about the Silk shader implementation, or to create your own Silk shader variant, see the Shader Graph documentation about the [Fabric Master Stack](fabric-master-stack-reference.md).
@@ -28,7 +28,7 @@ Refer to [Create a fabric material](create-a-fabric-material.md) for more inform
The Fabric Master Stack enables you to render various types of fabric in the High Definition Render Pipeline (HDRP). It uses either cotton wool or anisotropic silk as its base, and supports various additional effects such as [Subsurface Scattering](skin-and-diffusive-surfaces-subsurface-scattering.md) to create realistic looking fabrics.
-
+
Refer to [Create a fabric material](create-a-fabric-material.md) for more information.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/understand-refraction.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/understand-refraction.md
index 62af9a36908..5b7cda6fed1 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/understand-refraction.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/understand-refraction.md
@@ -2,7 +2,8 @@
Refraction is when light bends as it passes from one material ('medium') into another. Your eye can see refraction only through a transparent material, because most light is absorbed or reflected in opaque materials.
-
+
+
Light rays bend as they travel through the different mediums of air, water, and glass, so the pencil appears deformed.
## How refraction works in HDRP
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/use-decals.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/use-decals.md
index b7a7eb98b59..26d9f17088a 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/use-decals.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/use-decals.md
@@ -8,7 +8,7 @@ The High Definition Render Pipeline (HDRP) includes the following ways to create
To use these methods, you need to create a decal Material. A decal Material is a Material that uses the [Decal Shader](Decal-Shader.md) or [Decal Master Stack](master-stack-decal.md). You can then place or project your decal Material into a Scene.
-
+
## Decal Layers
@@ -69,7 +69,7 @@ HDRP renders Material depth in a Depth Prepass to apply decals to opaque Materia
You can use Additive normal blending to blend decal normals with the normals of a specific GameObject.
In the following image examples, the screenshot on the left doesn't use additive normal blending, and the screenshot on the right uses additive normal blending.
-
+
To use Additive Normal Blending:
1. Open your Project’s [HDRP Asset](HDRP-Asset.md).
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/use-the-graphics-compositor.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/use-the-graphics-compositor.md
index b18be1ab821..fb7dd385c99 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/use-the-graphics-compositor.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/use-the-graphics-compositor.md
@@ -17,11 +17,11 @@ If you set the Game view to show **Display 1**, , make sure the Graphics Composi
The following example uses the Graphics Compositor to render a watermark on top of a Unity Scene.
-
+
The composition graph.
-
+
The result.
@@ -50,7 +50,7 @@ When you create a Composition Graph, there are two main types of input property
The following graph contains examples of the property types described above. The **Logo** property is an example of a Composition Layer and the **Opacity** property is an example of an input property to control an aspect of the composition:
-
+
Unity saves the Graphics Compositor properties in a .asset file with the same name as the Composition Graph. When the Graphics Compositor loads a Composition Graph, it also loads the properties from the corresponding Asset file if one exists, otherwise, it creates a new Asset with default settings.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-create-a-current-in-the-water-system.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-create-a-current-in-the-water-system.md
index fe59abc85d1..5550abaeb3b 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-create-a-current-in-the-water-system.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-create-a-current-in-the-water-system.md
@@ -22,16 +22,16 @@ To open the River sample scene:
You can create a Current map texture in any image-editing software. The image can be in any non sRGB format. The resolution of a current map texture has a small impact on the current effect.
-
+
The Red and Green channels contain the 2D direction of the current and the Blue channel contains the influence of the current map.
The default direction is +X, as a result, the neutral value for a current map is (1, 0.5, 1).
When importing a current map in the editor, make sure to that the sRGB checkbox is disabled in the texture importer.
The following images display each channel of the current map included in the River sample scene.
-* The red channel of a current map: 
-* The green channel of a current map: 
-* The blue channel of a current map: 
+* The red channel of a current map: 
+* The green channel of a current map: 
+* The blue channel of a current map: 
## Apply a current Map to a water surface
@@ -61,4 +61,4 @@ To visualize the effect of a current map on a water surface:
- Open the **Miscellaneous** section.
- Locate **Debug Mode** and select **Current.**
-
+
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-decals-masking-landing.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-decals-masking-landing.md
index 7a6f35ba48a..4e0e4575a4c 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-decals-masking-landing.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-decals-masking-landing.md
@@ -8,7 +8,7 @@ You can also add detailed visual effects to localized water areas with water dec
| **Page** | **Description** |
|---------------------------------------------------------------------------------------|-----------------------------------------------------------------------------|
-| [Enable mask and water decals](enable-mask-and-water-decals.md) | Mask and current water decals are disabled by default. |
+| [Enable mask and current water decals](enable-mask-and-current-water-decals.md) | Mask and current water decals are disabled by default. |
| [Configure swell, agitation, or ripples with a water mask](add-swell-agitation-or-ripples.md) | Configure swell, agitation, or ripples across the water surface. |
| [Simulate currents with a water decal](simulating-currents-with-water-decals.md) | Simulate water currents by projecting textures. |
| [Simulate ripples with masks](simulating-foam-or-ripples-with-masks.md) | Create effects like ripples. |
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-deform-a-water-surface.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-deform-a-water-surface.md
index 2d61ea158c5..1460f36fc34 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-deform-a-water-surface.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-deform-a-water-surface.md
@@ -1,63 +1,36 @@
-# Deform a water surface
+# Deform a water surface vertically
-You can use a deformer to control the shape of a water surface. A Deformer is a GameObject that changes the shape of the water surface. You can create a deformer based on one of the predefined [shapes](#deformer-type), or use a shadergraph to make a completely custom Deformer.
+You can use water decals to achieve deformation effects on the water surface. Water decals use textures and Shader Graph materials to modify the water's appearance dynamically.
-Water deformers can affect each other additively if they are placed at the same location. For example, if you put two 1 meter box deformers on top of each other, it is the same as creating a single box deformer two meters in height.
+
-HDRP limits the maximum number of deformers active at the same time in a scene. You can control the maximum amount in the [HDRP Asset](HDRP-Asset.md) in **Rendering** > **Water** > **Deformation** > **Maximum Deformer Count**.
+To deform a water surface vertically:
-The region of a water surface that can receive deformation is limited. You can select the size and offset of the rectangular region that supports deformation in the **Deformation** section of the water surface Inspector window. To highlight this region for debug purposes, go to **Miscellaneous** > **Debug Mode** and select **Deformation**.
+1. Create a [water surface](water-use-the-water-system-in-your-project.md).
-Also, there is a maximum number of deformer you can have in a scene. This limit can be set directly in the HDRP Asset under **Water** > **Deformation** > **Maximum Deformer Count**.
+1. In the **Inspector** window of the water surface, under **Water Decals**, enable **Deformation**.
-
+ To add a deformation only, disable **Foam**.
-## Create a Water Deformer
+1. In the main menu, go to **GameObject** > **Water** > **Water Decal**.
-To create a Deformer:
+ Unity adds a **Water Decal** GameObject to your hierarchy and the water deformation appears in the **Scene** view.
-1. Go to **GameObject** > **Water** > **Deformer.**
-2. Select the type of deformer you want to use.
+1. In the **Inspector** window of the **Water Decal**, expand the **Water Decal (Script)** section.
-**Note**: You can also change the Deformer type in the Water Deformer inspector window.
+1. To create a new **Water Decal** material, do one of the following:
-To make a water deformer affect a water surface:
+ - To start from the template shader graph, select **New** > **Deformer and Foam Water Decal**.
-1. Select the Water Surface to open it in the Inspector.
-2. Select the Deformation drop-down.
-3. Select the **Enable** toggle.
+ - To create a new material from scratch, select **New** > **Empty Water Decal**.
-You must also make sure it is enabled in your Project’s HDRP Asset and in the Frame Settings:
+ A new material using the [water decal master stack](understand-decals.md) is created in your assets subfolder.
-1. Select the HDRP Asset in the Project window and, in the Inspector, go to **Rendering** > **Water** and enable the **Deformation** checkbox.
-2. To enable Deformation for all Cameras, Go to **Edit** > **Project Settings** > **Graphics** > **Pipeline Specific Settings** > **HDRP** > **Frame Settings (Default Values)** > **Camera** > **Rendering** > , then enable the **Water Deformation** checkbox.
+1. In the **Inspector** window of the **Water Decal**, select **Edit** next to the water decal shader graph, then edit the shader graph as needed.
-Lastly, make sure that your deformer is inside the deformation area. To see the area, you can select deformation in the debug dropdown in the Miscellaneous foldout at the bottom of the water surface inspector.
+## Configure the Deformer and Foam Water Decal template
-
-
-## Configure a Deformer type
-
-The properties in the Water Deformer inspector window change based on the type you select.
-
-**Note**: The **Move** tool only moves a water deformer along the X and Z axes. To make a deformer move above or below the water surface, change the **Amplitude** value.
-
-### Common properties
-
-The following properties exist in all Deformer types.
-
-| **Property** | | **Description** |
-| --------------- | -------------- | ------------------------------------------------------------ |
-| **Scale Mode** | | The scaling mode to apply to the deformer. The options are:• **Scale Invariant**: Ignores the scale set in the Transform component and uses the region size directly.• **Inherit from Hierarchy**: Multiplies the Deformer’s region size by the [lossy scale](https://docs.unity3d.com/ScriptReference/Transform-lossyScale.html) of the Transform. Note that since the Deformer uses a top down projection, if the transformation hierarchy is [skewed](https://docs.unity3d.com/Manual/class-Transform.html), the deformer does not scale correctly. |
-| **Region Size** | | Control the size of the deformer in meters. |
-| **Type** | | |
-| | **Sphere** | Create a deformer in the shape of a sphere. |
-| | **Box** | Create a deformer in the shape of a cube. For information about the properties specific to this type, see [Box](#deformer-type-box). |
-| | **Bow Wave** | Create a deformer in the shape of the front of a boat. For information about the properties specific to this type, see [Bow Wave](#deformer-type-bowwave). |
-| | **Shore Wave** | Create a deformer in the shape of waves that move in a specific direction. For information about the properties specific to this type, see [Shore Wave](#deformer-type-shorewave). |
-| | **Texture** | Customize the shape of a deformer with a texture. For information about the properties specific to this type, see [Texture](#deformer-type-texture). |
-| | **Material** | Customize the shape of a deformer with a ShaderGraph. For information about the properties specific to this type, see [Texture](#deformer-type-material). |
-| **Amplitude** | | Control the height of the water surface. |
+The properties used in the water decal material **Inspector** window change based on the type you select.
@@ -80,7 +53,7 @@ Use the following property to control the Bow Wave deformer type.
| ---------------------- | -------------------------------------------------------- |
| **Bow Wave Elevation** | Controls the maximum height, in meters, of the bow wave. |
-To make a bow wave move with a boat’s bow, set the Bow Wave as a child of the boat GameObject. However, the Bow Wave deformer can only move within the area defined in the Water surface Inspector in **Deformation** > **Area Size**. To preserve the deformation’s resolution, you can use a script to make the `deformationAreaOffset` follow the boat position.
+To make a bow wave move with a boat’s bow, set the Bow Wave as a child of the boat GameObject. However, the Bow Wave deformer can only move within the area defined in the **Inspector** window of the water surface, in **Deformation** > **Area Size**. To preserve the deformation’s resolution, you can use a script to make the `deformationAreaOffset` follow the boat position.
@@ -106,19 +79,11 @@ Use the following properties to control the Shore Wave deformer type.
These properties are specific to the Texture deformer type.
-| **Property** | **Description** |
-| ------------ | ------------------------------------------------------------ |
-| Range Remap | Specifies the range of the deformer in the [-1, 1] interval. The input texture values will be remapped from [0,1] to the specified range. |
-| Texture | The texture used by the deformer. This is a single channel texture that contains the amplitude of the deformation relative to the deformer’s amplitude. This texture can be a regular texture or a Render Texture, which can be updated at runtime by modifying a render target with a compute shader for example. For a Render Texture, use the R16_UNorm format . |
-
-
-
-## Material
+| **Property** | **Description** |
+|-----------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| **Range Remap** | Specifies the range of the deformer in the [-1, 1] interval. The input texture values will be remapped from [0,1] to the specified range. |
+| **Texture** | The texture used by the deformer. This is a single channel texture that contains the amplitude of the deformation relative to the deformer’s amplitude. This texture can be a regular texture or a Render Texture, which can be updated at runtime by modifying a render target with a compute shader for example. For a Render Texture, use the R16_UNorm format . |
-These properties are specific to the Material deformer type.
+## Additional resources
-| **Property** | **Description** |
-| ------------ | ------------------------------------------------------------ |
-| Resolution | The material specified by this deformer will be blit into the intermediate deformer atlas to be used later by the water system. This property specifies the size that it should occupy in the atlas. |
-| Update Mode | The frequency at which the material should be rendered inside the atlas. When update mode is **On Demand**, you can use the **RequestUpdate** function on the **Deformer** script to trigger an update. |
-| Material | The material used by the deformer. This should be a Material with a shader created from the ShaderGraph Water Decal master node. Use the **Deformation** output with values between [0,1] that can be remapped using **Range Remap** property and multliplied by the **Amplitude**. |
+[Materials and surfaces](materials-and-surfaces.md)
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-exclude-part-of-the-water-surface.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-exclude-part-of-the-water-surface.md
index cc2931feb86..7546a03057e 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-exclude-part-of-the-water-surface.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-exclude-part-of-the-water-surface.md
@@ -4,7 +4,7 @@ Sometimes you might want to prevent water from appearing on top of other surface
You can use a water excluder to remove a water surface inside a floating object. The following example shows a water excluder applied to the inside of a boat.
-
+
This image shows the following, from left to right:
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-materials-in-the-water-system.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-materials-in-the-water-system.md
index 97452202f63..aff01ecc678 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-materials-in-the-water-system.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-materials-in-the-water-system.md
@@ -17,7 +17,7 @@ As you increase the values of the first three properties in this section of the
**Direct Light Body Term** and **Direct Light Tip Term** (the second of these is only for **Ocean, Sea, or Lake** water surface types) increase the intensity of light visible through waves, as at the wave tips in the screenshot below. **Direct Light Tip Term** is most visible at grazing angles.
-
+
## Custom Materials
To create a custom water Material, copy the default water Material and adjust that copy. The [water ShaderGraph](master-stack-water.md) documentation provides more information about which properties you can adjust.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-use-the-water-system-in-your-project.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-use-the-water-system-in-your-project.md
index adda9ef0ccb..25fa182bfd8 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-use-the-water-system-in-your-project.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-use-the-water-system-in-your-project.md
@@ -15,7 +15,7 @@ This page provides an overview of the basic workflow to include a water surface
2. Go to **Edit** > **Project Settings** > **Graphics** > **Pipeline Specific Settings** > **HDRP** > **Frame Settings**, then enable water in three places:
* **Camera** > **Rendering**.
* **Realtime Reflection** > **Rendering**.
-* **Baked or Custom Reflection** > **Rendering**.
+* **Custom or Baked Reflection** > **Rendering**.
This is especially important when you upgrade your project from an earlier version of Unity, because water is inactive by default. If your project originates in HDRP 14 (Unity 2022.2) or later, the water implementation may work even if you only enable it in the **Quality** settings.
@@ -49,10 +49,7 @@ To simulate stormy conditions, you might:
* Choose darker **Color** values for **Refraction** and **Scattering**.
* You can also enable **Foam**.
-
-
-
-
+
### Dirty river
To simulate a polluted or silty river, you could:
@@ -61,10 +58,7 @@ To simulate a polluted or silty river, you could:
* Disable **Caustics**.
* You can also add a [Decal](decals.md) that resembles fragments of debris.
-
-
-
-
+
### Calm, clean swimming pool on a sunny day
To simulate a clean outdoor swimming pool on a clear day with little wind:
@@ -75,21 +69,14 @@ To simulate a clean outdoor swimming pool on a clear day with little wind:
* Adjust the **Virtual Plane Distance** to a value appropriate to the depth of your pool.
* In the **Refraction** properties, reduce **Absorption Distance**, to make the water more transparent. Increase **Maximum Distance** to extend the range of the refraction effect, especially if you have scenery in the water.
-
-
-
-
+
### A deep swimming pool
* Somewhat darken the **Color** properties for **Scattering** and **Refraction**.
* Reduce the **Absorption Distance** slightly.
* Increase **Maximum Distance** if there are caustics or objects in the water that make the refraction effect visible.
-
-
-
-
-
+
## Additional resources
* Settings and properties related to the water system
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-vfx-interaction.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-vfx-interaction.md
index 396ac631f6f..0c9c34a2146 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-vfx-interaction.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-vfx-interaction.md
@@ -2,8 +2,6 @@
The water system supports being evaluated from the VFX Graph, to access data such as the water height at a given point, the surface normal, or the current value.
-
-
However there are several simulations that are important to be aware of.
As the water surface gameobject is saved inside a scene, and the VFX graph is an asset on disk, it is not possible to directly reference the surface from within the graph. This means data of the water surface need to be set globally by the user before the VFX can sample the water.
As a result, only a single surface can be sampled from any VFX Graph at any given time.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-10.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-10.md
index 1e58f783baa..1a25db1bd97 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-10.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-10.md
@@ -14,7 +14,7 @@ This version of HDRP includes support for the Game Core Xbox Series platform as
### IES Profiles and light cookies
-
+
HDRP now supports the Illuminating Engineering Society's (IES) file format for describing the distribution of light from a light source. HDRP supports the IES profile for Point, Spot (Cone, Pyramid, and Box), and rectangular Area [Lights](Light-Component.md). You can also mix the IES profile with [cookies](https://docs.unity3d.com/Manual/Cookies.html) and even use the profile and cookie mix for [light map baking](https://docs.unity3d.com/Manual/LightMode-Baked.html).
@@ -45,7 +45,7 @@ This version of HDRP includes scalability settings for fog and subsurface scatte
### Screen-space global illumination
-
+
This version of HDRP introduces screen-space global illumination (SSGI). It is an algorithm that accesses indirect diffuse lighting the environment generates. It works in the same way as the [Screen Space Reflection](Override-Screen-Space-Reflection.md) in that it uses ray marching to calculate the result.
@@ -53,14 +53,14 @@ For more information, see [Screen Space Global Illumination](Override-Screen-Spa
### Custom Pass AOV Export
-
+
This feature allows you to export arbitrary data from custom pass injection points using an extension of the Arbitrary Output Variables (AOV) API in HDRP. An example use-case is for exporting “Object IDs” that are rendered with a custom pass. For information about the feature and example scripts, see the [AOV documentation](AOVs.md).
### Debug modes
#### Light debug view
-
+
To help you to debug lighting in your Scene, HDRP now includes various lighting debug view modes that allow you to separate the various components of the light into multiple parts. These debug modes are also available in the [AOV](AOVs.md) API to allow recorders to export them. The new lighting debug modes are:
@@ -79,13 +79,13 @@ HDRP now includes a new [light layer](Rendering-Layers.md) debug mode which can
For more information, see the Lighting panel section in the [Rendering Debugger](rendering-debugger-window-reference.md).
#### Volume debug mode
-
+
The Rendering Debugger window now has a new Volume panel which you can use to visualize the Volume components that affect a specific Camera. For each Volume that contributes to the final interpolated value, this panel shows the value of each property and whether or not it is overridden. It also calculates the Volume's influence percentage using the Volume's weight and blend distance. For more information, see the Volume panel section in the [Rendering Debugger](rendering-debugger-window-reference.md#VolumePanel).
#### Quad Overdraw and Vertex Density
-
+
To help you finding GameObjects in you scene that need LODs, HDRP includes two new full screen rendering debug modes to spot Meshes far away or with too many details.
- Quad Overdraw: highlights GPU quads running multiple fragment shaders, which is mainly caused by small or thin triangles. (Not supported on Metal and PS4)
@@ -131,7 +131,7 @@ For more information, see the [Create an HDRI sky](create-an-hdri-sky.md).
### Graphics Compositor
-
+
The Graphics Compositor allows real-time compositing operations between layers of 3D content, static images, and videos.
The tool support three types of compositing techniques:
@@ -148,7 +148,7 @@ For information about the feature, see the [HDRP Compositor documentation](graph
#### Path-traced depth of field
-
+
This version of HDRP includes a new depth of field mode for producing path-traced images with high-quality defocus blur. Compared to post-processed depth of field, this mode works correctly with multiple layers of transparency and does not produce any artifacts, apart from noise typical in path traced images (which you can mitigate by increasing the sample count and/or using an external denoising tool).
@@ -156,21 +156,21 @@ For more information about this feature, see [Depth-of-field](Post-Processing-De
#### Accumulation motion blur and path tracer convergence APIs
-
+
HDRP now includes a recording API which you can use to render effects such as high-quality accumulation motion blur and converged path-traced images. These techniques create the final "converged" frame by combining information from multiple intermediate sub-frames. The new API allows your scripts to extract the properly converged final frames and do further processing or save them to disk.
For information about this feature and for some example scripts, see [Multiframe rendering and accumulation documentation](Accumulation.md).
#### Path-traced sub-surface scattering
-
+
Path tracing now supports subsurface scattering (SSS), using a random walk approach. To use it, enable path tracing and set up SSS in the same way as you would for HDRP materials.
For information on SSS in HDRP, see [subsurface scattering](subsurface-scattering.md).
#### Path-traced fog
-
+
Path tracing now supports fog absorption. Like SSS, to use this feature, enable path tracing and set up fog in the same way as you would for standard fog in HDRP.
@@ -274,13 +274,13 @@ HDRP improves the Screen Space Reflection by providing a new implementation 'PBR
### Planar reflection probe filtering
-
+
Planar reflection probe filtering is a process that combines the result of planar reflection and surfaces smoothness. Up until this version, the implementation for planar reflection probe filtering did not always produce results of fantastic quality. This version of HDRP includes a new implementation that is closer to being physically-based and improves on the image quality significantly.
### Fake distance based roughness for reflection probe
-
+
Reflection Probe can now fake the increasing preceive bluriness of a surface reflection with distance from the object. This option is disabled by default and need to be enabled on the Reflection Probe.
@@ -324,7 +324,7 @@ More options are now available for [contact shadows](Override-Contact-Shadows.md
### Light component user experience
-
+
The [Light component](Light-Component.md) now includes a visualization to help you set the intensity of your lights using [physical light units](Physical-Light-Units.md).
@@ -340,7 +340,7 @@ Auto-exposure systems calculate the average scene luminance and try to map this
### Custom Pass API
-
+
From this version, within the rendering of your main Camera, you can now render GameObjects from another point of view (a disabled camera for example). The new API also comes with built-in support for rendering Depth, Normal and Tangent into an RTHandle.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-11.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-11.md
index bd2b3417abc..86323b707ab 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-11.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-11.md
@@ -36,15 +36,15 @@ From HDRP 11.0, [Planar Reflection Probes](Planar-Reflection-Probe.md) now consi
From HDRP 11.0, the AxF Material supports ray tracing. It also supports rasterized area light shadows.
-
+
### Decal widget
From HDRP 11.0, the decal widget includes more functionality to help you create decals in a Scene. It now includes pivot point manipulation (both in the Inspector and Scene view), UV manipulation (in the Scene view only), and color and intensity customization.
-
+
-
+
### Cubemap fields in Volume components
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-14.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-14.md
index 187673617f2..faf698a61d8 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-14.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-14.md
@@ -12,7 +12,7 @@ In HDRP 14, the [Ray Tracing Settings](Ray-Tracing-Settings.md) volume includes
### Fullscreen Shader Graph
-
+
HDRP 14 introduces a new **Fullscreen** Material type in ShaderGraph.
You can use Fullscreen shaders in fullscreen custom passes, custom post processes and C# scripting.
@@ -32,8 +32,8 @@ Each shader is associated with a prefab that demonstrates how to implement it in
### Transparency Material Sample Scene
-
-
+
+
HDRP 14 includes new sample scenes that demonstrates transparent Materials using different rendering methods:
- Rasterization.
@@ -55,15 +55,15 @@ HDRP 14 includes a set of industry-standard color monitors that you can use to c
You can find these monitors in the Rendering Debugger window (menu: **Windows > Analysis > Rendering Debugger**) in the **Rendering** tab.
- **Waveform**: Displays the full range of luma information in the render
-
+
- **Parade**: Splits the image into red, green and blue separately
-
+
- **Vectorscope**: Measures the overall range of hue and saturation within the image
-
+
### Denoising in Path Tracing
-
+
HDRP 14 adds denoising for frames that use path tracing. You can choose from the following denoising methods:
- Optix Denoiser.
@@ -74,7 +74,7 @@ HDRP 14 adds denoising for frames that use path tracing. You can choose from the
in HDRP 14, Local Volumetric Fog volume supports Materials you create in ShaderGraph.
You can use this to create dynamic fog effects. For more information, see [Volumetric Material](create-a-fog-volume-shader.md)
-
+
### Local Volumetric Fog blending
@@ -86,7 +86,7 @@ This version also adds a **Priority** property that you can use to control the o
### Screen Space Reflection
-
+
HDRP 14 adds new properties to the Screen Space Reflection component that you can use to control how the PBR Accumulation SSR algorithm behaves. For example, enable **World Space Speed Rejection** to reject samples based on speed in world space.
@@ -169,7 +169,7 @@ HDRP 14 makes the following improvements to the [Cloud Layer](create-simple-clou
- Changes the **Distortion** property name to **Wind**.
- Changes the raymarching algorithm to improve scattering, and to give more consistent results when you change the number of steps. Depending on your lighting conditions, you might have to tweak the **Density** and **Exposure** sliders to get the same result as earlier HDRP versions.
-
+
### Renderer bounds access in ShaderGraph
@@ -183,7 +183,7 @@ HDRP 14 adds the **Occlusion remap Curve** property to the Lens Flare component.
### New Eye Shader Subtype
-
+
HDRP 14.0 includes a new Eye Shader type called **Eye Cinematic with Caustic**. This Eye Shader uses caustics to give a more realistic effect. This makes it more resource-intensive than other HDRP Eye Shaders.
@@ -223,7 +223,7 @@ HDRP 14 improves the precision of the Decal Projector's **Angle Fade** property.
### Improve area light soft shadows
-
+
In HDRP 14, soft area shadows are more accurate to the raytraced reference. This version also makes the following changes:
- **Shadow Filtering Quality** only affects the quality of regular lights.
@@ -231,7 +231,7 @@ In HDRP 14, soft area shadows are more accurate to the raytraced reference. This
### Ray-Tracing and Terrain
-
+
From HDRP 14, all raytracing effects support Terrain.
@@ -243,14 +243,14 @@ HDRP 14 adds the **Time slicing** property to the [Reflection Probe](Reflection-
#### Reflection probe atlas
-
+
HDRP 14 replaces the cube reflection probe cache array with a 2D texture atlas cache in octahedral projection.
Planar reflection probes now use the same 2D texture cache. This means you can control the cube probe resolution for each reflection probe to save memory.
### Water system
-
+
HDRP 14 introduces the water system. This feature allows you to render highly realistic water surfaces and interact with them. Among the various features that this version includes:
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-16.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-16.md
index 9195529711d..3fa443ce665 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-16.md
+++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/whats-new-16.md
@@ -16,7 +16,7 @@ HDRP 16 adds a [Dynamic Resolution](https://docs.unity3d.com/Packages/com.unity.
### Colored translucent material Type
-
+
HDRP 16 adds the Colored Translucent material type in Shader Graph. Use this material for thin, double-sided geometry.
This material supports a colored transmission mask and doesn't require a diffusion profile.
@@ -25,7 +25,7 @@ This material supports a colored transmission mask and doesn't require a diffusi
HDRP 16 adds a cinematic mode for the physically-based hair shader. Enable this mode to trade performance for high-quality environment and area lighting with multiple scattering that closely matches a path-traced reference.
-
+
This image shows hair with a single environment light without cinematic shading (left) and with cinematic shading (right).
@@ -36,7 +36,7 @@ This feature requires the [Unity hair system](https://github.com/Unity-Technolog
HDRP 16 adds the [Canvas Master Node](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@16.0/manual/master-stack-canvas.md) to Shader Graph that you can use to create UI shaders for a Canvas.
-
+
### Path tracer
@@ -49,17 +49,17 @@ To support the decals with path tracing, HDRP regards all decals as clustered de
The HDRP Path Tracer does not support emissive decals.
-
+
This image displays a puddle and checkerboard decal in a path-traced scene.
#### Henyey-Greenstein Anisotropic Fog
HDRP 16 supports path-traced anisotropic fog. The path tracer uses the Henyey-Greenstein phase function evaluation and sampling. This phase function has an anisotropy parameter that controls the behaviour of the scattering. You can set this parameter between -1 and 1. A negative value corresponds to backward scattering and a positive value corresponds to forward scattering. A value of 0 corresponds to the regular isotropic behavior.
Isotropic scattering (anisotropy=0)
-
+
Anisotropic forward scattering (anisotropy=0.75)
-
+
### Volumetric fog output
@@ -67,13 +67,13 @@ In version 16, the [Volumetric fog shader graph](https://docs.unity3d.com/Packag
### Color Checker Tool
-
+
Use the Color Checker tool to check the calibration of lighting and post processing. To open the color checker tool, go to **GameObject** > **Rendering** > **Color Checker Tool**. HDRP doesn't save any changes you make in the color checker.
### Light Placement Tool
-
+
Use the Light Placement Tool to look through a light and use the camera movement controls to move the light around the scene.
To enter this mode select a light GameObject and select the Light Placement Tool button in the Tools Panel. For more information, refer to [Light placement tool](lights-placement-tool.md).
@@ -90,23 +90,23 @@ HDRP 16 updates the Physically Based Sky in the following ways to make it easier
- Added the PBRSky material type in Shader Graph to allow effects like procedural stars.
- Added an option to mark celestial bodies as moons which makes them receive lighting from the main directional light.
-
+
### SpeedTrees
HDRP 16 adds motion vector support for SpeedTrees. To do this, SpeedTree shader graphs use the Transmission Mask input of the Lit master node. This means that speedtrees with a single draw can also recieve vector transmission in the bark and branches.
-
-
+
+
### Shadows
#### Percentage-Closer Soft Shadow (PCSS)
HDRP 16 improves Percentage-Closer Soft Shadow (PCSS) quality for directional. To do this, the shadow samples along a cone instead of a disk. This makes PCSS shadows appear softer.
-
+
PCSS shadows that sample a disc.
-
+
PCSS shadows that sample a cone.
#### Shadow quality
@@ -120,11 +120,11 @@ Version 16 also splits the **Shadow Filtering Quality** property in the [HDRP As
HDRP 16 updates the algorithm that it uses to render the volumetric clouds shadows. This fixes an issue where clouds cast shadows on objects above the clouds, and softens the shadows that clouds cast.
-
+
### Ray-Traced reflections ReBLUR denoiser
-
+
HDRP 16 replaces the Ray-Traced Reflections denoiser with the ReBLUR algorithm. This algorithm includes an anti-flickering setting that improves temporal stability and gives a more even result on rough and smooth objects.
Ray-Traced Reflections uses this denoiser in **Raytracing** mode and **Mixed** mode.
@@ -140,7 +140,7 @@ From version 16ayou can stream probe volume data directly from a disk. To use di
For more information, refer to [Probe volume streaming](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@16.0/manual/probevolumes-streaming.html).
### Water system
-
+
HDRP 16.0 makes the following improvements to the water system:
* Improves performance for rendering surfaces with Instanced Quads.
@@ -152,7 +152,7 @@ HDRP 16.0 makes the following improvements to the water system:
### Data Driven Lens Flare XR support
-
+
HDRP 16 adds [XR](https://docs.unity3d.com/Manual/xr-graphics.html) support to the data-driven Lens Flare component. This includes single pass instancing and multipass support.
@@ -160,7 +160,7 @@ HDRP 16 adds [XR](https://docs.unity3d.com/Manual/xr-graphics.html) support to t
HDRP 16 adds a Volumetric Fog fullscreen debug mode to the [rendering debugger](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@16.0/manual/Render-Pipeline-Debug-Window.html). This mode displays the fog that affects the opaque geometry. You can control how the fog appears in the debug mode's **Exposure** property. t.
-
+
### Render Graph Viewer
diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/FogVolumePropertyBlock.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/FogVolumePropertyBlock.cs
index 1400885baf9..fd745434799 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/FogVolumePropertyBlock.cs
+++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/FogVolumePropertyBlock.cs
@@ -20,6 +20,7 @@ internal class Styles
public static GUIContent blendMode = new GUIContent("Blend Mode", "Determines how the fog volume will blend with other fogs in the scene.");
public static GUIContent singleScatteringAlbedo = new GUIContent("Single Scattering Albedo", "The color this fog scatters light to.");
public static GUIContent fogDistance = new GUIContent("Fog Distance", "Density at the base of the fog. Determines how far you can see through the fog in meters.");
+ public static GUIContent debugSymbolsText = new GUIContent("Debug Symbols", "When enabled, HDRP activates d3d11 debug symbols for this Shader.");
}
protected override string title => "Fog Volume Options";
@@ -35,6 +36,9 @@ protected override void CreatePropertyGUI()
// AddProperty(Styles.singleScatteringAlbedo, () => fogData.singleScatteringAlbedo, (newValue) => fogData.singleScatteringAlbedo = newValue);
// AddProperty(Styles.fogDistance, () => fogData.fogDistance, (newValue) => fogData.fogDistance = newValue);
AddProperty(Styles.blendMode, () => fogData.blendMode, (newValue) => fogData.blendMode = newValue);
+
+ if (Unsupported.IsDeveloperMode())
+ AddProperty(Styles.debugSymbolsText, () => systemData.debugSymbols, (newValue) => systemData.debugSymbols = newValue);
}
}
}
diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/FogVolumeSubTarget.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/FogVolumeSubTarget.cs
index 5d10995ee62..3b5279bbe55 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/FogVolumeSubTarget.cs
+++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/FogVolumeSubTarget.cs
@@ -172,7 +172,7 @@ PassCollection GetVoxelizePasses()
{
// Definition
displayName = HDShaderPassNames.s_FogVolumeVoxelizeStr,
- referenceName = "SHADERPASS_FOGVOLUME_VOXELIZATION",
+ referenceName = "SHADERPASS_FOG_VOLUME_VOXELIZATION",
lightMode = HDShaderPassNames.s_FogVolumeVoxelizeStr,
useInPreview = false,
@@ -200,7 +200,7 @@ PassDescriptor ShaderGraphPreviewPass()
{
// Definition
displayName = "ShaderGraphPreview",
- referenceName = "SHADERPASS_FOGVOLUME_PREVIEW",
+ referenceName = "SHADERPASS_FOG_VOLUME_PREVIEW",
lightMode = "ShaderGraphPreview",
useInPreview = true,
@@ -228,7 +228,7 @@ PassDescriptor ShaderGraphOverdrawDebugPass()
{
// Definition
displayName = HDShaderPassNames.s_VolumetricFogVFXOverdrawDebugStr,
- referenceName = "SHADERPASS_FOGVOLUME_OVERDRAW_DEBUG",
+ referenceName = "SHADERPASS_FOG_VOLUME_OVERDRAW_DEBUG",
lightMode = HDShaderPassNames.s_VolumetricFogVFXOverdrawDebugStr,
useInPreview = true,
@@ -308,6 +308,7 @@ static class FogVolumeIncludes
const string kPacking = "Packages/com.unity.render-pipelines.core/ShaderLibrary/Packing.hlsl";
const string kColor = "Packages/com.unity.render-pipelines.core/ShaderLibrary/Color.hlsl";
const string kFunctions = "Packages/com.unity.shadergraph/ShaderGraphLibrary/Functions.hlsl";
+ const string kVoxelizationTransforms = "Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/VoxelizationTransforms.hlsl";
const string kVoxelizePass = "Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/ShaderPassVoxelize.hlsl";
const string kPreviewPass = "Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/ShaderPassPreview.hlsl";
const string kOverdrawPass = "Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/OverdrawDebug.hlsl";
@@ -318,6 +319,7 @@ static class FogVolumeIncludes
{ kColor, IncludeLocation.Pregraph },
{ kFunctions, IncludeLocation.Pregraph },
{ CoreIncludes.MinimalCorePregraph },
+ { kVoxelizationTransforms, IncludeLocation.Pregraph },
{ kVoxelizePass, IncludeLocation.Postgraph },
};
diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/OverdrawDebug.hlsl b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/OverdrawDebug.hlsl
index 7e079bfa8b5..24b54722674 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/OverdrawDebug.hlsl
+++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/OverdrawDebug.hlsl
@@ -1,4 +1,4 @@
-#if SHADERPASS != SHADERPASS_FOGVOLUME_OVERDRAW_DEBUG
+#if SHADERPASS != SHADERPASS_FOG_VOLUME_OVERDRAW_DEBUG
#error SHADERPASS_is_not_correctly_define
#endif
diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/ShaderPassPreview.hlsl b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/ShaderPassPreview.hlsl
index bd90a161800..cc785e80220 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/ShaderPassPreview.hlsl
+++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/ShaderPassPreview.hlsl
@@ -1,4 +1,4 @@
-#if SHADERPASS != SHADERPASS_FOGVOLUME_PREVIEW
+#if SHADERPASS != SHADERPASS_FOG_VOLUME_PREVIEW
#error SHADERPASS_is_not_correctly_define
#endif
diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/ShaderPassVoxelize.hlsl b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/ShaderPassVoxelize.hlsl
index 2f0ab60c9bd..3f5e933cfe8 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/ShaderPassVoxelize.hlsl
+++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/ShaderPassVoxelize.hlsl
@@ -1,4 +1,4 @@
-#if SHADERPASS != SHADERPASS_FOGVOLUME_VOXELIZATION
+#if SHADERPASS != SHADERPASS_FOG_VOLUME_VOXELIZATION
#error SHADERPASS_is_not_correctly_define
#endif
@@ -91,14 +91,13 @@ VertexToFragment Vert(uint instanceId : INSTANCEID_SEMANTIC, uint vertexId : VER
return output;
}
-FragInputs BuildFragInputs(VertexToFragment v2f, float3 voxelPositionOS, float3 voxelClipSpace)
+FragInputs BuildFragInputs(VertexToFragment v2f, float3 voxelPositionWS, float3 voxelClipSpace)
{
FragInputs output;
ZERO_INITIALIZE(FragInputs, output);
- float3 positionWS = mul(UNITY_MATRIX_M, float4(voxelPositionOS, 1)).xyz;
output.positionSS = v2f.positionCS;
- output.positionRWS = output.positionPredisplacementRWS = positionWS;
+ output.positionRWS = output.positionPredisplacementRWS = voxelPositionWS;
output.positionPixel = uint2(v2f.positionCS.xy);
output.texCoord0 = float4(saturate(voxelClipSpace * 0.5 + 0.5), 0);
output.tangentToWorld = k_identity3x3;
@@ -140,9 +139,11 @@ void Frag(VertexToFragment v2f, out float4 outColor : SV_Target0)
float3 rayoriginWS = GetCurrentViewPosition();
float3 voxelCenterWS = rayoriginWS + sliceDistance * raycenterDirWS;
+ // Build rotation matrix from normalized OBB axes to transform the world space position
float3x3 obbFrame = float3x3(_VolumetricMaterialObbRight.xyz, _VolumetricMaterialObbUp.xyz, cross(_VolumetricMaterialObbRight.xyz, _VolumetricMaterialObbUp.xyz));
- float3 voxelCenterBS = mul(voxelCenterWS - _VolumetricMaterialObbCenter.xyz + _WorldSpaceCameraPos.xyz, transpose(obbFrame));
+ // Rotate world position around the center of the local fog OBB
+ float3 voxelCenterBS = mul(GetAbsolutePositionWS(voxelCenterWS - _VolumetricMaterialObbCenter.xyz), transpose(obbFrame));
float3 voxelCenterCS = (voxelCenterBS * rcp(_VolumetricMaterialObbExtents.xyz));
// Still need to clip pixels outside of the box because of the froxel buffer shape
@@ -150,7 +151,7 @@ void Frag(VertexToFragment v2f, out float4 outColor : SV_Target0)
if (!overlap)
clip(-1);
- FragInputs fragInputs = BuildFragInputs(v2f, voxelCenterBS, voxelCenterCS);
+ FragInputs fragInputs = BuildFragInputs(v2f, voxelCenterWS, voxelCenterCS);
GetVolumeData(fragInputs, v2f.viewDirectionWS, albedo, extinction);
// Accumulate volume parameters
diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/VoxelizationTransforms.hlsl b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/VoxelizationTransforms.hlsl
new file mode 100644
index 00000000000..5290ae334ac
--- /dev/null
+++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/VoxelizationTransforms.hlsl
@@ -0,0 +1,36 @@
+#pragma once
+
+#include "Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricLighting/HDRenderPipeline.VolumetricLighting.cs.hlsl"
+
+// Overrides the transform functions that would use object matrices in the fog as they are not available due to the indirect draw
+// Instead we can re-build the object matrix from the OBB of the fog object
+
+float4x4 BuildWorldToObjectMatrixFromLocalFogOBB()
+{
+ float3x3 rotation = float3x3(
+ _VolumetricMaterialObbRight.xyz,
+ _VolumetricMaterialObbUp.xyz,
+ cross(_VolumetricMaterialObbRight.xyz, _VolumetricMaterialObbUp.xyz)
+ );
+
+ // inverse rotation
+ rotation = transpose(rotation);
+
+ // inverse translation
+ float3 inverseTranslation = -(mul(_VolumetricMaterialObbCenter.xyz, rotation));
+
+ // Build matrix
+ float4x4 objectMatrix = 0;
+ objectMatrix._m00_m10_m20 = rotation[0];
+ objectMatrix._m01_m11_m21 = rotation[1];
+ objectMatrix._m02_m12_m22 = rotation[2];
+ objectMatrix._m03_m13_m23_m33 = float4(inverseTranslation, 1);
+
+ return objectMatrix;
+}
+
+float3 TransformWorldToObjectFog(float3 positionRWS)
+{
+ float3 posWS = GetAbsolutePositionWS(positionRWS);
+ return mul(BuildWorldToObjectMatrixFromLocalFogOBB(), float4(posWS, 1));
+}
diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/VoxelizationTransforms.hlsl.meta b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/VoxelizationTransforms.hlsl.meta
new file mode 100644
index 00000000000..33154246ccb
--- /dev/null
+++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/FogVolume/ShaderGraph/VoxelizationTransforms.hlsl.meta
@@ -0,0 +1,7 @@
+fileFormatVersion: 2
+guid: 623770b432f006f49af1b65f726bf86f
+ShaderIncludeImporter:
+ externalObjects: {}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/LayeredLit/LayeredLitGUI.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/LayeredLit/LayeredLitGUI.cs
index d64874723fd..34fa632435f 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/LayeredLit/LayeredLitGUI.cs
+++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/LayeredLit/LayeredLitGUI.cs
@@ -76,36 +76,36 @@ public static void SynchronizeLayerProperties(Material material, int layerIndex,
if (layerMaterial != null)
{
Shader layerShader = layerMaterial.shader;
- int propertyCount = ShaderUtil.GetPropertyCount(layerShader);
+ int propertyCount = layerShader.GetPropertyCount();
for (int i = 0; i < propertyCount; ++i)
{
- string propertyName = ShaderUtil.GetPropertyName(layerShader, i);
+ string propertyName = layerShader.GetPropertyName(i);
string layerPropertyName = propertyName + layerIndex;
if (includeUVMappingProperties || !exclusionList.Contains(propertyName))
{
if (material.HasProperty(layerPropertyName))
{
- ShaderUtil.ShaderPropertyType type = ShaderUtil.GetPropertyType(layerShader, i);
+ ShaderPropertyType type = layerShader.GetPropertyType(i);
switch (type)
{
- case ShaderUtil.ShaderPropertyType.Color:
+ case ShaderPropertyType.Color:
{
material.SetColor(layerPropertyName, layerMaterial.GetColor(propertyName));
break;
}
- case ShaderUtil.ShaderPropertyType.Float:
- case ShaderUtil.ShaderPropertyType.Range:
+ case ShaderPropertyType.Float:
+ case ShaderPropertyType.Range:
{
material.SetFloat(layerPropertyName, layerMaterial.GetFloat(propertyName));
break;
}
- case ShaderUtil.ShaderPropertyType.Vector:
+ case ShaderPropertyType.Vector:
{
material.SetVector(layerPropertyName, layerMaterial.GetVector(propertyName));
break;
}
- case ShaderUtil.ShaderPropertyType.TexEnv:
+ case ShaderPropertyType.Texture:
{
material.SetTexture(layerPropertyName, layerMaterial.GetTexture(propertyName));
if (includeUVMappingProperties)
diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/ShaderGraph/Templates/Pixel.template.hlsl b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/ShaderGraph/Templates/Pixel.template.hlsl
index 070670340ec..11bb670afc7 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/ShaderGraph/Templates/Pixel.template.hlsl
+++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/ShaderGraph/Templates/Pixel.template.hlsl
@@ -25,12 +25,20 @@ SurfaceDescriptionInputs FragInputsToSurfaceDescriptionInputs(FragInputs input,
$SurfaceDescriptionInputs.TangentSpaceViewDirection: float3x3 tangentSpaceTransform = float3x3(output.WorldSpaceTangent,output.WorldSpaceBiTangent,output.WorldSpaceNormal);
$SurfaceDescriptionInputs.TangentSpaceViewDirection: output.TangentSpaceViewDirection = TransformWorldToTangent(output.WorldSpaceViewDirection, tangentSpaceTransform);
$SurfaceDescriptionInputs.WorldSpacePosition: output.WorldSpacePosition = input.positionRWS;
+#if SHADERPASS != SHADERPASS_FOG_VOLUME_VOXELIZATION
$SurfaceDescriptionInputs.ObjectSpacePosition: output.ObjectSpacePosition = TransformWorldToObject(input.positionRWS);
+#else
+ $SurfaceDescriptionInputs.ObjectSpacePosition: output.ObjectSpacePosition = TransformWorldToObjectFog(input.positionRWS);
+#endif
$SurfaceDescriptionInputs.ViewSpacePosition: output.ViewSpacePosition = TransformWorldToView(input.positionRWS);
$SurfaceDescriptionInputs.TangentSpacePosition: output.TangentSpacePosition = float3(0.0f, 0.0f, 0.0f);
$SurfaceDescriptionInputs.AbsoluteWorldSpacePosition: output.AbsoluteWorldSpacePosition = GetAbsolutePositionWS(input.positionRWS);
$SurfaceDescriptionInputs.WorldSpacePositionPredisplacement: output.WorldSpacePositionPredisplacement = input.positionPredisplacementRWS;
+#if SHADERPASS != SHADERPASS_FOG_VOLUME_VOXELIZATION
$SurfaceDescriptionInputs.ObjectSpacePositionPredisplacement: output.ObjectSpacePositionPredisplacement = TransformWorldToObject(input.positionPredisplacementRWS);
+#else
+ $SurfaceDescriptionInputs.ObjectSpacePositionPredisplacement: output.ObjectSpacePositionPredisplacement = TransformWorldToObjectFog(input.positionPredisplacementRWS);
+#endif
$SurfaceDescriptionInputs.ViewSpacePositionPredisplacement: output.ViewSpacePositionPredisplacement = TransformWorldToView(input.positionPredisplacementRWS);
$SurfaceDescriptionInputs.TangentSpacePositionPredisplacement: output.TangentSpacePositionPredisplacement = float3(0.0f, 0.0f, 0.0f);
$SurfaceDescriptionInputs.AbsoluteWorldSpacePositionPredisplacement: output.AbsoluteWorldSpacePositionPredisplacement = GetAbsolutePositionWS(input.positionPredisplacementRWS);
diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/USS/FrameSettings.uss b/Packages/com.unity.render-pipelines.high-definition/Editor/USS/FrameSettings.uss
index 0fdd4bcf5cd..c5e5d7a86f9 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Editor/USS/FrameSettings.uss
+++ b/Packages/com.unity.render-pipelines.high-definition/Editor/USS/FrameSettings.uss
@@ -58,3 +58,19 @@
{
background-color: var(--unity-colors-helpbox-background);
}
+
+.header-foldout
+{
+ margin: 0px -6px 0px -31px;
+ padding: 0px 0px 0px 0px;
+}
+
+.unity-foldout__toggle
+{
+ padding: 0px 6px 0px 30px;
+}
+
+.unity-foldout__content
+{
+ margin: 0px 5px 0px 47px;
+}
\ No newline at end of file
diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/VFX/SampleWaterSurface.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/VFX/SampleWaterSurface.cs
index 04b7fc9effd..8f36e0ff95a 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/VFX/SampleWaterSurface.cs
+++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/VFX/SampleWaterSurface.cs
@@ -20,9 +20,7 @@ public override void OnInspectorGUI()
}
}
}
-
-
- [VFXHelpURL("Operator-SampleWaterSurface")]
+
[VFXInfo(category = "Sampling")]
class SampleWaterSurface : VFXOperator
{
diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/HDShadow.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/HDShadow.hlsl
index 6a91a495279..0e74aa8ebdd 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/HDShadow.hlsl
+++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/HDShadow.hlsl
@@ -49,10 +49,11 @@ float GetPunctualShadowAttenuation(HDShadowContext shadowContext, float2 positio
if (pointLight)
{
- sd.rot0 = shadowContext.shadowDatas[shadowDataIndex + CubeMapFaceID(-L)].rot0;
- sd.rot1 = shadowContext.shadowDatas[shadowDataIndex + CubeMapFaceID(-L)].rot1;
- sd.rot2 = shadowContext.shadowDatas[shadowDataIndex + CubeMapFaceID(-L)].rot2;
- sd.atlasOffset = shadowContext.shadowDatas[shadowDataIndex + CubeMapFaceID(-L)].atlasOffset;
+ const int cubeFaceOffset = CubeMapFaceID(-L);
+ sd.rot0 = shadowContext.shadowDatas[shadowDataIndex + cubeFaceOffset].rot0;
+ sd.rot1 = shadowContext.shadowDatas[shadowDataIndex + cubeFaceOffset].rot1;
+ sd.rot2 = shadowContext.shadowDatas[shadowDataIndex + cubeFaceOffset].rot2;
+ sd.atlasOffset = shadowContext.shadowDatas[shadowDataIndex + cubeFaceOffset].atlasOffset;
}
if (sd.isInCachedAtlas > 0) // This is a scalar branch.
@@ -77,11 +78,12 @@ float GetPunctualShadowClosestDistance(HDShadowContext shadowContext, SamplerSta
if (pointLight)
{
- sd.shadowToWorld = shadowContext.shadowDatas[shadowDataIndex + CubeMapFaceID(-L)].shadowToWorld;
- sd.atlasOffset = shadowContext.shadowDatas[shadowDataIndex + CubeMapFaceID(-L)].atlasOffset;
- sd.rot0 = shadowContext.shadowDatas[shadowDataIndex + CubeMapFaceID(-L)].rot0;
- sd.rot1 = shadowContext.shadowDatas[shadowDataIndex + CubeMapFaceID(-L)].rot1;
- sd.rot2 = shadowContext.shadowDatas[shadowDataIndex + CubeMapFaceID(-L)].rot2;
+ const int cubeFaceOffset = CubeMapFaceID(-L);
+ sd.shadowToWorld = shadowContext.shadowDatas[shadowDataIndex + cubeFaceOffset].shadowToWorld;
+ sd.atlasOffset = shadowContext.shadowDatas[shadowDataIndex + cubeFaceOffset].atlasOffset;
+ sd.rot0 = shadowContext.shadowDatas[shadowDataIndex + cubeFaceOffset].rot0;
+ sd.rot1 = shadowContext.shadowDatas[shadowDataIndex + cubeFaceOffset].rot1;
+ sd.rot2 = shadowContext.shadowDatas[shadowDataIndex + cubeFaceOffset].rot2;
}
if (sd.isInCachedAtlas > 0) // This is a scalar branch.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Material/MaterialExtension.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Material/MaterialExtension.cs
index c032de143d9..8e79cfb7169 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Material/MaterialExtension.cs
+++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Material/MaterialExtension.cs
@@ -616,7 +616,7 @@ internal static IEnumerable GetShaderDiffusionProfileProperties(Shader shad
if (shader.FindPropertyIndex("_DiffusionProfileAsset3") != -1)
yield return Shader.PropertyToID("_DiffusionProfileAsset3");
- int propertyCount = UnityEditor.ShaderUtil.GetPropertyCount(shader);
+ int propertyCount = shader.GetPropertyCount();
for (int propIdx = 0; propIdx < propertyCount; ++propIdx)
{
var attributes = shader.GetPropertyAttributes(propIdx);
@@ -625,8 +625,8 @@ internal static IEnumerable GetShaderDiffusionProfileProperties(Shader shad
if (attribute == "DiffusionProfile")
{
propIdx++;
- var type = UnityEditor.ShaderUtil.GetPropertyType(shader, propIdx);
- if (type == UnityEditor.ShaderUtil.ShaderPropertyType.Vector)
+ var type = shader.GetPropertyType(propIdx);
+ if (type == ShaderPropertyType.Vector)
yield return shader.GetPropertyNameId(propIdx);
break;
}
diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs
index c52180548c5..83600636457 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs
+++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs
@@ -213,6 +213,8 @@ void RecordRenderGraph(RenderRequest renderRequest,
RenderForwardOpaque(m_RenderGraph, hdCamera, colorBuffer, lightingBuffers, gpuLightListOutput, prepassOutput, vtFeedbackBuffer, shadowResult, cullingResults);
+ RenderCustomPass(m_RenderGraph, hdCamera, colorBuffer, prepassOutput, customPassCullingResults, cullingResults, CustomPassInjectionPoint.AfterOpaqueColor, aovRequest, aovCustomPassBuffers, lightingBuffers);
+
if (IsComputeThicknessNeeded(hdCamera))
// Compute the thickness for All Transparent which can be occluded by opaque written on the DepthBuffer (which includes the Forward Opaques).
RenderThickness(m_RenderGraph, cullingResults, thicknessTexture, prepassOutput.depthPyramidTexture, hdCamera, HDRenderQueue.k_RenderQueue_AllTransparent, true);
@@ -227,7 +229,7 @@ void RecordRenderGraph(RenderRequest renderRequest,
// Send all the geometry graphics buffer to client systems if required (must be done after the pyramid and before the transparent depth pre-pass)
SendGeometryGraphicsBuffers(m_RenderGraph, prepassOutput.normalBuffer, prepassOutput.depthPyramidTexture, hdCamera);
- RenderCustomPass(m_RenderGraph, hdCamera, colorBuffer, prepassOutput, customPassCullingResults, cullingResults, CustomPassInjectionPoint.AfterOpaqueAndSky, aovRequest, aovCustomPassBuffers);
+ RenderCustomPass(m_RenderGraph, hdCamera, colorBuffer, prepassOutput, customPassCullingResults, cullingResults, CustomPassInjectionPoint.AfterOpaqueAndSky, aovRequest, aovCustomPassBuffers, lightingBuffers);
DoUserAfterOpaqueAndSky(m_RenderGraph, hdCamera, colorBuffer, prepassOutput.resolvedDepthBuffer, prepassOutput.resolvedNormalBuffer, prepassOutput.resolvedMotionVectorsBuffer);
@@ -336,7 +338,7 @@ void RecordRenderGraph(RenderRequest renderRequest,
// At this point, the color buffer has been filled by either debug views are regular rendering so we can push it here.
var colorPickerTexture = PushColorPickerDebugTexture(m_RenderGraph, colorBuffer);
- RenderCustomPass(m_RenderGraph, hdCamera, colorBuffer, prepassOutput, customPassCullingResults, cullingResults, CustomPassInjectionPoint.BeforePostProcess, aovRequest, aovCustomPassBuffers);
+ RenderCustomPass(m_RenderGraph, hdCamera, colorBuffer, prepassOutput, customPassCullingResults, cullingResults, CustomPassInjectionPoint.BeforePostProcess, aovRequest, aovCustomPassBuffers, lightingBuffers);
if (aovRequest.isValid)
{
@@ -434,7 +436,8 @@ void RecordRenderGraph(RenderRequest renderRequest,
// Stop XR single pass before rendering screenspace UI
StopXRSinglePass(m_RenderGraph, hdCamera);
- RenderScreenSpaceOverlayUI(m_RenderGraph, hdCamera, backBuffer);
+ if (renderRequest.isLast)
+ RenderScreenSpaceOverlayUI(m_RenderGraph, hdCamera, backBuffer);
}
}
@@ -2260,7 +2263,8 @@ bool RenderCustomPass(RenderGraph renderGraph,
CullingResults cameraCullingResults,
CustomPassInjectionPoint injectionPoint,
AOVRequestData aovRequest,
- List aovCustomPassBuffers)
+ List aovCustomPassBuffers,
+ in LightingBuffers lightingBuffers = default)
{
if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.CustomPass))
return false;
@@ -2283,6 +2287,8 @@ bool RenderCustomPass(RenderGraph renderGraph,
motionVectorBufferRG = prepassOutput.resolvedMotionVectorsBuffer,
renderingLayerMaskRG = renderingLayerMaskBuffer,
shadingRateImageRG = prepassOutput.shadingRateImage,
+ sssBuffer = lightingBuffers.sssBuffer,
+ diffuseLightingBuffer = lightingBuffers.diffuseLightingBuffer,
waterLineRG = prepassOutput.waterLine,
};
diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs
index ffcbbf277fb..6b25c904658 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs
+++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs
@@ -1417,6 +1417,7 @@ public struct Target
public List<(HDProbe.RenderData, HDProbe)> viewDependentProbesData;
public bool cullingResultIsShared;
public XRPass xrPass;
+ public bool isLast;
}
private void VisitRenderRequestRecursive(List requests, List visitStatus, int requestIndex, List renderIndices)
@@ -2340,6 +2341,7 @@ protected override void Render(ScriptableRenderContext renderContext, List("Debug view of the RTAS", out var passData, ProfilingSampler.Get(HDProfileId.RaytracingBuildAccelerationStructureDebug)))
+ RTASDebugPassData passData;
+
+ using (var builder = renderGraph.AddRenderPass("Debug view of the RTAS", out passData, ProfilingSampler.Get(HDProfileId.RaytracingBuildAccelerationStructureDebug)))
{
builder.EnableAsyncCompute(false);
@@ -788,10 +790,10 @@ internal void EvaluateRTASDebugView(RenderGraph renderGraph, HDCamera hdCamera)
// Evaluate the debug view
ctx.cmd.DispatchRays(data.debugRTASRT, m_RTASDebugRTKernel, (uint)data.actualWidth, (uint)data.actualHeight, (uint)data.viewCount);
});
-
- // Use the debug texture to do the full screen debug
- PushFullScreenDebugTexture(renderGraph, passData.outputTexture, FullScreenDebugMode.RayTracingAccelerationStructure);
}
+
+ // Use the debug texture to do the full screen debug
+ PushFullScreenDebugTexture(renderGraph, passData.outputTexture, FullScreenDebugMode.RayTracingAccelerationStructure);
}
internal static int RayTracingFrameIndex(HDCamera hdCamera, int targetFrameCount = 8)
diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPass.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPass.cs
index ac61c437278..a676b22d4bc 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPass.cs
+++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPass.cs
@@ -152,6 +152,8 @@ internal struct RenderTargets
public TextureHandle motionVectorBufferRG;
public TextureHandle renderingLayerMaskRG;
public TextureHandle shadingRateImageRG;
+ public TextureHandle sssBuffer;
+ public TextureHandle diffuseLightingBuffer;
public BufferHandle waterLineRG;
}
@@ -218,6 +220,10 @@ RenderTargets ReadRenderTargets(in RenderGraphBuilder builder, in RenderTargets
output.waterLineRG = builder.ReadBuffer(targets.waterLineRG);
if (targets.shadingRateImageRG.IsValid() && hdCamera.vrsEnabled)
output.shadingRateImageRG = builder.ReadTexture(targets.shadingRateImageRG);
+ if (targets.sssBuffer.IsValid())
+ output.sssBuffer = builder.ReadWriteTexture(targets.sssBuffer);
+ if (targets.diffuseLightingBuffer.IsValid())
+ output.diffuseLightingBuffer = builder.ReadWriteTexture(targets.diffuseLightingBuffer);
return output;
}
@@ -296,6 +302,8 @@ virtual internal void ExecuteInternal(RenderGraph renderGraph, HDCamera hdCamera
customPass.currentRenderTarget.depthBufferRG,
customPass.currentRenderTarget.normalBufferRG,
customPass.currentRenderTarget.motionVectorBufferRG,
+ customPass.currentRenderTarget.sssBuffer,
+ customPass.currentRenderTarget.diffuseLightingBuffer,
customPass.currentRenderTarget.customColorBuffer,
customPass.currentRenderTarget.customDepthBuffer,
ctx.renderGraphPool.GetTempMaterialPropertyBlock(),
diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassContext.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassContext.cs
index 3700ab21ed5..36320b97b71 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassContext.cs
+++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassContext.cs
@@ -72,6 +72,9 @@ public struct CustomPassContext
///
public readonly RTHandle shadingRateBuffer;
+ internal readonly RTHandle sssBuffer;
+ internal readonly RTHandle diffuseLightingBuffer;
+
internal readonly CustomPassInjectionPoint injectionPoint;
// This represent the state of HDRP globals at the point of recording the custom passes.
// Using GetShaderVariablesGlobals() from HDRP inside the execute of the custom pass would give invalid result
@@ -84,7 +87,7 @@ internal CustomPassContext(
CullingResults cameraCullingResults,
RTHandle cameraColorBuffer, RTHandle cameraDepthBuffer,
RTHandle cameraNormalBuffer, RTHandle cameraMotionVectorsBuffer,
- Lazy customColorBuffer,
+ RTHandle sssBuffer, RTHandle diffuseLightingBuffer, Lazy customColorBuffer,
Lazy customDepthBuffer, MaterialPropertyBlock propertyBlock,
RTHandle shadingRateBuffer,
CustomPassInjectionPoint injectionPoint, ShaderVariablesGlobal currentGlobalState)
@@ -102,6 +105,8 @@ internal CustomPassContext(
this.customDepthBuffer = customDepthBuffer;
this.propertyBlock = propertyBlock;
this.shadingRateBuffer = shadingRateBuffer;
+ this.sssBuffer = sssBuffer;
+ this.diffuseLightingBuffer = diffuseLightingBuffer;
this.injectionPoint = injectionPoint;
this.currentGlobalState = currentGlobalState;
}
diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassInjectionPoint.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassInjectionPoint.cs
index e5a63a900e6..0f032feeb0f 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassInjectionPoint.cs
+++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassInjectionPoint.cs
@@ -18,6 +18,8 @@ public enum CustomPassInjectionPoint
BeforeRendering = 0,
/// At this point, you can modify the normal, roughness, and depth buffer. If you write to these buffers at this injection point, HDRP takes it into account in the lighting and the depth pyramid.
AfterOpaqueDepthAndNormal = 5,
+ /// At this injection point, The color buffer contains all the opaque objects in your view. The Sky and the Fog is not rendered yet, so if you change the color buffer in this injection point, fog will be applied on top of your effect.
+ AfterOpaqueColor = 7,
/// At this injection point, The color buffer contains all the opaque objects in your view as well as the sky. The Fog is not rendered yet, so if you change the color buffer in this injection point, fog will be applied on top of your effect.
AfterOpaqueAndSky = 6,
/// At this injection point, you can render any transparent GameObject that you want to see in refraction. If you write to buffers at this injection point, they contents end up in the color pyramid that HDRP uses for refraction when it draws transparent GameObjects.
diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassInjectionPoint.cs.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassInjectionPoint.cs.hlsl
index 36efe130803..5aeac7b6f57 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassInjectionPoint.cs.hlsl
+++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassInjectionPoint.cs.hlsl
@@ -9,6 +9,7 @@
//
#define CUSTOMPASSINJECTIONPOINT_BEFORE_RENDERING (0)
#define CUSTOMPASSINJECTIONPOINT_AFTER_OPAQUE_DEPTH_AND_NORMAL (5)
+#define CUSTOMPASSINJECTIONPOINT_AFTER_OPAQUE_COLOR (7)
#define CUSTOMPASSINJECTIONPOINT_AFTER_OPAQUE_AND_SKY (6)
#define CUSTOMPASSINJECTIONPOINT_BEFORE_PRE_REFRACTION (4)
#define CUSTOMPASSINJECTIONPOINT_BEFORE_TRANSPARENT (1)
diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/ShaderPass/ShaderPass.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/ShaderPass/ShaderPass.cs
index 3cd5604cfa9..d1ccc6be1d1 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/ShaderPass/ShaderPass.cs
+++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/ShaderPass/ShaderPass.cs
@@ -34,5 +34,8 @@ enum ShaderPass
Constant,
FullScreenDebug,
PBRSky,
+ FogVolumePreview,
+ FogVolumeVoxelization,
+ FogVolumeOverdrawDebug,
}
}
diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/ShaderPass/ShaderPass.cs.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/ShaderPass/ShaderPass.cs.hlsl
index 8ff52388a7d..ee5aadc6da9 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/ShaderPass/ShaderPass.cs.hlsl
+++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/ShaderPass/ShaderPass.cs.hlsl
@@ -35,6 +35,9 @@
#define SHADERPASS_CONSTANT (25)
#define SHADERPASS_FULL_SCREEN_DEBUG (26)
#define SHADERPASS_PBRSKY (27)
+#define SHADERPASS_FOG_VOLUME_PREVIEW (28)
+#define SHADERPASS_FOG_VOLUME_VOXELIZATION (29)
+#define SHADERPASS_FOG_VOLUME_OVERDRAW_DEBUG (30)
#endif
diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/Shaders/SampleWaterSurface.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/Shaders/SampleWaterSurface.hlsl
index 6321f779038..082ba04846b 100644
--- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/Shaders/SampleWaterSurface.hlsl
+++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/Shaders/SampleWaterSurface.hlsl
@@ -442,7 +442,11 @@ void EvaluateWaterAdditionalData(float3 positionOS, float3 transformedPosition,
return;
// Evaluate the pre-displaced absolute position
+#if defined(WATER_DISPLACEMENT)
+ float3 positionRWS = positionOS;
+#else
float3 positionRWS = TransformObjectToWorld_Water(positionOS);
+#endif
// Evaluate the distance to the camera
float distanceToCamera = length(positionRWS);
// Get the world space transformed postion
diff --git a/Packages/com.unity.render-pipelines.universal/.buginfo b/Packages/com.unity.render-pipelines.universal/.buginfo
index fbfaf8ddc5a..407ad9952bb 100644
--- a/Packages/com.unity.render-pipelines.universal/.buginfo
+++ b/Packages/com.unity.render-pipelines.universal/.buginfo
@@ -22,4 +22,10 @@ Tools:
- ^.*Analytic?.*$
- ^.*LightExplorer?.*$
- ^.*MaterialUpgrader?.*$
- area: Graphics Tools
\ No newline at end of file
+ area: Graphics Tools
+
+post-processing-and-ui-features:
+ when:
+ path:
+ - ^.*PostProcessPass?.*$
+ area: Post-processing and UI Features
diff --git a/Packages/com.unity.render-pipelines.universal/Editor/Converter/RenderPipelineConvertersEditor.cs b/Packages/com.unity.render-pipelines.universal/Editor/Converter/RenderPipelineConvertersEditor.cs
index 566461cc826..e4be7b4dc0f 100644
--- a/Packages/com.unity.render-pipelines.universal/Editor/Converter/RenderPipelineConvertersEditor.cs
+++ b/Packages/com.unity.render-pipelines.universal/Editor/Converter/RenderPipelineConvertersEditor.cs
@@ -545,7 +545,7 @@ void RecreateUI()
Status status = (Status) property.FindPropertyRelative("status").enumValueIndex;
string info = property.FindPropertyRelative("message").stringValue;
- element.Q("converterItemActive").RegisterCallback((evt) =>
+ element.Q("converterItemActive").TrackPropertyValue(property, _ =>
{
UpdateSelectedConverterItems(id, child);
DeselectAllNoneLabels(item);
diff --git a/Packages/com.unity.render-pipelines.universal/Editor/ScriptTemplates/ScriptableRendererFeature.txt b/Packages/com.unity.render-pipelines.universal/Editor/ScriptTemplates/ScriptableRendererFeature.txt
index 9c5999f283d..c1b3bb11d19 100644
--- a/Packages/com.unity.render-pipelines.universal/Editor/ScriptTemplates/ScriptableRendererFeature.txt
+++ b/Packages/com.unity.render-pipelines.universal/Editor/ScriptTemplates/ScriptableRendererFeature.txt
@@ -17,8 +17,15 @@ public class #SCRIPTNAME# : ScriptableRendererFeature
// Configures where the render pass should be injected.
m_ScriptablePass.renderPassEvent = RenderPassEvent.AfterRenderingOpaques;
- // Requests URP resources as inputs, URP will ensure copies of these attachments are available for sampling before executing the render pass.
- m_ScriptablePass.ConfigureInput(ScriptableRenderPassInput.Color | ScriptableRenderPassInput.Depth);
+ // You can request URP color texture and depth buffer as inputs by uncommenting the line below,
+ // URP will ensure copies of these resources are available for sampling before executing the render pass.
+ // Only uncomment it if necessary, it will have a performance impact, especially on mobiles and other TBDR GPUs where it will break render passes.
+ //m_ScriptablePass.ConfigureInput(ScriptableRenderPassInput.Color | ScriptableRenderPassInput.Depth);
+
+ // You can request URP to render to an intermediate texture by uncommenting the line below.
+ // Use this option for passes that do not support rendering directly to the backbuffer.
+ // Only uncomment it if necessary, it will have a performance impact, especially on mobiles and other TBDR GPUs where it will break render passes.
+ //m_ScriptablePass.requiresIntermediateTexture = true;
}
// Here you can inject one or multiple render passes in the renderer.
diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Renderer2D.cs b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Renderer2D.cs
index d8a484bd5f9..a29331be976 100644
--- a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Renderer2D.cs
+++ b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Renderer2D.cs
@@ -227,7 +227,7 @@ void CreateRenderTextures(
{
var depthDescriptor = cameraTargetDescriptor;
depthDescriptor.colorFormat = RenderTextureFormat.Depth;
- depthDescriptor.depthStencilFormat = CoreUtils.GetDefaultDepthStencilFormat();
+ depthDescriptor.depthStencilFormat = CoreUtils.GetDefaultDepthStencilFormat();
if (!cameraData.resolveFinalTarget && m_UseDepthStencilBuffer)
depthDescriptor.bindMS = depthDescriptor.msaaSamples > 1 && !SystemInfo.supportsMultisampleAutoResolve && (SystemInfo.supportsMultisampledTextures != 0);
RenderingUtils.ReAllocateHandleIfNeeded(ref m_DepthTextureHandle, depthDescriptor, FilterMode.Point, wrapMode: TextureWrapMode.Clamp, name: "_CameraDepthAttachment");
@@ -466,7 +466,7 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re
// We can explicitely render the overlay UI from URP when HDR output is not enabled.
// SupportedRenderingFeatures.active.rendersUIOverlay should also be set to true.
- if (shouldRenderUI && !outputToHDR)
+ if (shouldRenderUI && cameraData.isLastBaseCamera && !outputToHDR)
{
EnqueuePass(m_DrawOverlayUIPass);
}
diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/Renderer2DRendergraph.cs b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/Renderer2DRendergraph.cs
index 30d6df63dc4..0ffc6c62799 100644
--- a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/Renderer2DRendergraph.cs
+++ b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/Renderer2DRendergraph.cs
@@ -226,7 +226,7 @@ void CreateResources(RenderGraph renderGraph)
var upscaleDescriptor = cameraTargetDescriptor;
upscaleDescriptor.width = ppc.refResolutionX * ppc.pixelRatio;
upscaleDescriptor.height = ppc.refResolutionY * ppc.pixelRatio;
- upscaleDescriptor.depthStencilFormat = GraphicsFormat.None;
+ upscaleDescriptor.depthStencilFormat = GraphicsFormat.None;
universal2DResourceData.upscaleTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, upscaleDescriptor, "_UpscaleTexture", true, ppc.finalBlitFilterMode);
}
@@ -706,7 +706,7 @@ private void OnAfterRendering(RenderGraph renderGraph)
// We can explicitly render the overlay UI from URP when HDR output is not enabled.
// SupportedRenderingFeatures.active.rendersUIOverlay should also be set to true.
- bool shouldRenderUI = cameraData.rendersOverlayUI;
+ bool shouldRenderUI = cameraData.rendersOverlayUI && cameraData.isLastBaseCamera;
bool outputToHDR = cameraData.isHDROutputActive;
if (shouldRenderUI && !outputToHDR)
m_DrawOverlayUIPass.RenderOverlay(renderGraph, frameData, in finalColorHandle, in finalDepthHandle);
diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Data/UniversalRenderPipelineAsset.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Data/UniversalRenderPipelineAsset.cs
index bc27237bb02..75abd39d238 100644
--- a/Packages/com.unity.render-pipelines.universal/Runtime/Data/UniversalRenderPipelineAsset.cs
+++ b/Packages/com.unity.render-pipelines.universal/Runtime/Data/UniversalRenderPipelineAsset.cs
@@ -1688,10 +1688,10 @@ static class Strings
}
///
- public bool IsGPUResidentDrawerSupportedBySRP(out string message, out LogType severty)
+ public bool IsGPUResidentDrawerSupportedBySRP(out string message, out LogType severity)
{
message = string.Empty;
- severty = LogType.Warning;
+ severity = LogType.Warning;
// Only the URP rendering paths using the cluster light loop (F+ lights & probes) can be used with GRD,
// since BiRP-style per-object lights and reflection probes are incompatible with DOTS instancing.
diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/FrameData/UniversalCameraData.cs b/Packages/com.unity.render-pipelines.universal/Runtime/FrameData/UniversalCameraData.cs
index 036b59f5777..30266a552fa 100644
--- a/Packages/com.unity.render-pipelines.universal/Runtime/FrameData/UniversalCameraData.cs
+++ b/Packages/com.unity.render-pipelines.universal/Runtime/FrameData/UniversalCameraData.cs
@@ -634,10 +634,16 @@ internal bool resetHistory
}
///
- /// Camera at the top of the overlay camera stack
+ /// Camera at the top of the overlay camera stack. If no stack, it equals the camera field present above.
///
public Camera baseCamera;
+ ///
+ /// Returns true if the baseCamera field is the last base camera being rendered to the frame.
+ /// While the last camera in a camera stack implies a last overlay camera, this indicates the last of all input base cameras.
+ ///
+ internal bool isLastBaseCamera;
+
///
public override void Reset()
{
@@ -695,6 +701,7 @@ public override void Reset()
stpHistory = null;
taaSettings = default;
baseCamera = null;
+ isLastBaseCamera = false;
stackAnyPostProcessingEnabled = false;
stackLastCameraOutputToHDR = false;
}
diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPassRenderGraph.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPassRenderGraph.cs
index e79ad85437c..d102ffcda28 100644
--- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPassRenderGraph.cs
+++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPassRenderGraph.cs
@@ -1346,16 +1346,15 @@ public TextureHandle RenderLensFlareScreenSpace(RenderGraph renderGraph, Camera
return originalBloomTexture;
}
-#endregion
+ #endregion
- static private void ScaleViewportAndBlit(RasterCommandBuffer cmd, RTHandle sourceTextureHdl, RTHandle dest, UniversalCameraData cameraData, Material material, bool hasFinalPass)
+ static private void ScaleViewport(RasterCommandBuffer cmd, RTHandle sourceTextureHdl, RTHandle dest, UniversalCameraData cameraData, bool hasFinalPass)
{
- Vector4 scaleBias = RenderingUtils.GetFinalBlitScaleBias(sourceTextureHdl, dest, cameraData);
RenderTargetIdentifier cameraTarget = BuiltinRenderTextureType.CameraTarget;
- #if ENABLE_VR && ENABLE_XR_MODULE
+#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.enabled)
cameraTarget = cameraData.xr.renderTarget;
- #endif
+#endif
if (dest.nameID == cameraTarget || cameraData.targetTexture != null)
{
if (hasFinalPass || !cameraData.resolveFinalTarget)
@@ -1380,12 +1379,31 @@ static private void ScaleViewportAndBlit(RasterCommandBuffer cmd, RTHandle sourc
else
cmd.SetViewport(cameraData.pixelRect);
}
+ }
+ static private void ScaleViewportAndBlit(RasterCommandBuffer cmd, RTHandle sourceTextureHdl, RTHandle dest, UniversalCameraData cameraData, Material material, bool hasFinalPass)
+ {
+ Vector4 scaleBias = RenderingUtils.GetFinalBlitScaleBias(sourceTextureHdl, dest, cameraData);
+ ScaleViewport(cmd, sourceTextureHdl, dest, cameraData, hasFinalPass);
Blitter.BlitTexture(cmd, sourceTextureHdl, scaleBias, material, 0);
}
-#region FinalPass
+ static private void ScaleViewportAndDrawVisibilityMesh(RasterCommandBuffer cmd, RTHandle sourceTextureHdl, RTHandle dest, UniversalCameraData cameraData, Material material, bool hasFinalPass)
+ {
+#if ENABLE_VR && ENABLE_XR_MODULE
+ Vector4 scaleBias = RenderingUtils.GetFinalBlitScaleBias(sourceTextureHdl, dest, cameraData);
+ ScaleViewport(cmd, sourceTextureHdl, dest, cameraData, hasFinalPass);
+
+ // Set property block for blit shader
+ MaterialPropertyBlock xrPropertyBlock = XRSystemUniversal.GetMaterialPropertyBlock();
+ xrPropertyBlock.SetVector(Shader.PropertyToID("_BlitScaleBias"), scaleBias);
+ xrPropertyBlock.SetTexture(Shader.PropertyToID("_BlitTexture"), sourceTextureHdl);
+ cameraData.xr.RenderVisibleMeshCustomMaterial(cmd, cameraData.xr.occlusionMeshScale, material, xrPropertyBlock, 1, cameraData.IsRenderTargetProjectionMatrixFlipped(dest));
+#endif
+ }
+
+ #region FinalPass
private class PostProcessingFinalSetupPassData
{
internal TextureHandle destinationTexture;
@@ -1614,7 +1632,18 @@ public void RenderFinalBlit(RenderGraph renderGraph, UniversalCameraData cameraD
Vector4 scaleBias = yflip ? new Vector4(viewportScale.x, -viewportScale.y, 0, viewportScale.y) : new Vector4(viewportScale.x, viewportScale.y, 0, 0);
cmd.SetViewport(data.cameraData.pixelRect);
- Blitter.BlitTexture(cmd, sourceTextureHdl, scaleBias, material, 0);
+#if ENABLE_VR && ENABLE_XR_MODULE
+ if (data.cameraData.xr.enabled && data.cameraData.xr.hasValidVisibleMesh)
+ {
+ MaterialPropertyBlock xrPropertyBlock = XRSystemUniversal.GetMaterialPropertyBlock();
+ xrPropertyBlock.SetVector(Shader.PropertyToID("_BlitScaleBias"), scaleBias);
+ xrPropertyBlock.SetTexture(Shader.PropertyToID("_BlitTexture"), sourceTextureHdl);
+
+ data.cameraData.xr.RenderVisibleMeshCustomMaterial(cmd, data.cameraData.xr.occlusionMeshScale, material, xrPropertyBlock, 1, !yflip);
+ }
+ else
+#endif
+ Blitter.BlitTexture(cmd, sourceTextureHdl, scaleBias, material, 0);
});
return;
@@ -1920,7 +1949,13 @@ public void RenderUberPost(RenderGraph renderGraph, ContextContainer frameData,
CoreUtils.SetKeyword(material, ShaderKeywordStrings._ENABLE_ALPHA_OUTPUT, data.enableAlphaOutput);
// Done with Uber, blit it
- ScaleViewportAndBlit(cmd, sourceTextureHdl, data.destinationTexture, data.cameraData, material, data.hasFinalPass);
+#if ENABLE_VR && ENABLE_XR_MODULE
+ if (data.cameraData.xr.enabled && data.cameraData.xr.hasValidVisibleMesh)
+ ScaleViewportAndDrawVisibilityMesh(cmd, sourceTextureHdl, data.destinationTexture, data.cameraData, material, data.hasFinalPass);
+ else
+#endif
+ ScaleViewportAndBlit(cmd, sourceTextureHdl, data.destinationTexture, data.cameraData, material, data.hasFinalPass);
+
});
return;
diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/ScriptableRenderer.cs b/Packages/com.unity.render-pipelines.universal/Runtime/ScriptableRenderer.cs
index b5ea11bed01..09aa7af014f 100644
--- a/Packages/com.unity.render-pipelines.universal/Runtime/ScriptableRenderer.cs
+++ b/Packages/com.unity.render-pipelines.universal/Runtime/ScriptableRenderer.cs
@@ -1001,6 +1001,8 @@ internal void SetupRenderGraphCameraProperties(RenderGraph renderGraph, bool isT
private class DrawGizmosPassData
{
public RendererListHandle gizmoRenderList;
+ public TextureHandle color;
+ public TextureHandle depth;
};
///
@@ -1018,20 +1020,25 @@ internal void DrawRenderGraphGizmos(RenderGraph renderGraph, ContextContainer fr
if (!Handles.ShouldRenderGizmos() || cameraData.camera.sceneViewFilterMode == Camera.SceneViewFilterMode.ShowFiltered)
return;
- using (var builder = renderGraph.AddRasterRenderPass("Draw Gizmos Pass", out var passData,
+ // We cannot draw gizmo rendererlists from an raster pass as the gizmo rendering triggers the MonoBehaviour.OnDrawGizmos or MonoBehaviour.OnDrawGizmosSelected callbacks that could run arbitrary graphics code
+ // like SetRenderTarget, texture and resource loading, ...
+ using (var builder = renderGraph.AddUnsafePass("Draw Gizmos Pass", out var passData,
Profiling.drawGizmos))
{
- builder.SetRenderAttachment(color, 0, AccessFlags.Write);
- builder.SetRenderAttachmentDepth(depth, AccessFlags.ReadWrite);
+ builder.UseTexture(color, AccessFlags.Write);
+ builder.UseTexture(depth, AccessFlags.ReadWrite);
passData.gizmoRenderList = renderGraph.CreateGizmoRendererList(cameraData.camera, gizmoSubset);
+ passData.color = color;
+ passData.depth = depth;
builder.UseRendererList(passData.gizmoRenderList);
builder.AllowPassCulling(false);
- builder.SetRenderFunc((DrawGizmosPassData data, RasterGraphContext rgContext) =>
+ builder.SetRenderFunc((DrawGizmosPassData data, UnsafeGraphContext rgContext) =>
{
using (new ProfilingScope(rgContext.cmd, Profiling.drawGizmos))
{
+ rgContext.cmd.SetRenderTarget(data.color, data.depth);
rgContext.cmd.DrawRendererList(data.gizmoRenderList);
}
});
diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs
index 88051430d5f..6b5fced6132 100644
--- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs
+++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs
@@ -458,12 +458,14 @@ protected override void Render(ScriptableRenderContext renderContext, List
/// Render context used to record commands during execution.
- /// Camera to render.
- static void RenderCameraStack(ScriptableRenderContext context, Camera baseCamera)
+ /// Camera to render.
+ /// True if this is the last base camera.
+ static void RenderCameraStack(ScriptableRenderContext context, Camera baseCamera, bool isLastBaseCamera)
{
using var profScope = new ProfilingScope(ProfilingSampler.Get(URPProfileId.RenderCameraStack));
@@ -1049,8 +1052,7 @@ static void RenderCameraStack(ScriptableRenderContext context, Camera baseCamera
#endif
// InitializeAdditionalCameraData needs to be initialized after the cameraTargetDescriptor is set because it needs to know the
// msaa level of cameraTargetDescriptor and XR modifications.
- InitializeAdditionalCameraData(baseCamera, baseCameraAdditionalData, !isStackedRendering,
- baseCameraData);
+ InitializeAdditionalCameraData(baseCamera, baseCameraAdditionalData, !isStackedRendering, isLastBaseCamera, baseCameraData);
#if VISUAL_EFFECT_GRAPH_0_0_1_OR_NEWER
//It should be called before culling to prepare material. When there isn't any VisualEffect component, this method has no effect.
@@ -1115,7 +1117,7 @@ static void RenderCameraStack(ScriptableRenderContext context, Camera baseCamera
}
#endif
- InitializeAdditionalCameraData(overlayCamera, overlayAdditionalCameraData, false, overlayCameraData);
+ InitializeAdditionalCameraData(overlayCamera, overlayAdditionalCameraData, false, isLastBaseCamera, overlayCameraData);
overlayCameraData.camera = overlayCamera;
overlayCameraData.baseCamera = baseCamera;
@@ -1129,8 +1131,8 @@ static void RenderCameraStack(ScriptableRenderContext context, Camera baseCamera
#endif
UpdateVolumeFramework(overlayCamera, overlayAdditionalCameraData);
- bool lastCamera = i == lastActiveOverlayCameraIndex;
- InitializeAdditionalCameraData(overlayCamera, overlayAdditionalCameraData, lastCamera, overlayCameraData);
+ bool isLastOverlayCamera = i == lastActiveOverlayCameraIndex;
+ InitializeAdditionalCameraData(overlayCamera, overlayAdditionalCameraData, isLastOverlayCamera, isLastBaseCamera, overlayCameraData);
overlayCameraData.stackAnyPostProcessingEnabled = anyPostProcessingEnabled;
overlayCameraData.stackLastCameraOutputToHDR = finalOutputHDR;
@@ -1470,8 +1472,9 @@ static void InitializeStackedCameraData(Camera baseCamera, UniversalAdditionalCa
/// Camera to initialize settings from.
/// Additional camera data component to initialize settings from.
/// True if this is the last camera in the stack and rendering should resolve to camera target.
+ /// True if the base camera is the last base camera.
/// Settings to be initilized.
- static void InitializeAdditionalCameraData(Camera camera, UniversalAdditionalCameraData additionalCameraData, bool resolveFinalTarget, UniversalCameraData cameraData)
+ static void InitializeAdditionalCameraData(Camera camera, UniversalAdditionalCameraData additionalCameraData, bool resolveFinalTarget, bool isLastBaseCamera, UniversalCameraData cameraData)
{
using var profScope = new ProfilingScope(Profiling.Pipeline.initializeAdditionalCameraData);
@@ -1521,6 +1524,7 @@ static void InitializeAdditionalCameraData(Camera camera, UniversalAdditionalCam
cameraData.renderer = renderer;
cameraData.postProcessingRequiresDepthTexture = CheckPostProcessForDepth(cameraData);
cameraData.resolveFinalTarget = resolveFinalTarget;
+ cameraData.isLastBaseCamera = isLastBaseCamera;
// enable GPU occlusion culling in game and scene views only
cameraData.useGPUOcclusionCulling = GPUResidentDrawer.IsInstanceOcclusionCullingEnabled()
@@ -2428,8 +2432,8 @@ private static void SetupScreenMSAASamplesState(int cameraCount)
#if UNITY_EDITOR
protected override bool IsPreviewSupported(Camera camera, out string reason)
{
- if (camera != null
- && camera.TryGetComponent(out var additionalData)
+ if (camera != null
+ && camera.TryGetComponent(out var additionalData)
&& additionalData.renderType == CameraRenderType.Overlay)
{
reason = "Overlay camera cannot be previewed directly.\nYou need to use a base camera instead.";
diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs
index 358e993717c..b46cac64c71 100644
--- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs
+++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs
@@ -1516,6 +1516,21 @@ void SortCameras(Camera[] cameras)
#endif
+ ///
+ /// Returns the index of the last base camera to draw ScreenSpace Overlay UI at the last base camera.
+ ///
+ private int GetLastBaseCameraIndex(List cameras)
+ {
+ int lastBaseCameraIndex = 0;
+ for (int i = 0; i < cameras.Count; i++)
+ {
+ cameras[i].TryGetComponent(out var baseCameraAdditionalData);
+ if (baseCameraAdditionalData?.renderType == CameraRenderType.Base)
+ lastBaseCameraIndex = i;
+ }
+ return lastBaseCameraIndex;
+ }
+
internal static GraphicsFormat MakeRenderTextureGraphicsFormat(bool isHdrEnabled, HDRColorBufferPrecision requestHDRColorBufferPrecision, bool needsAlpha)
{
if (isHdrEnabled)
diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderer.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderer.cs
index a1a8649cb7c..240405c6d22 100644
--- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderer.cs
+++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderer.cs
@@ -1513,7 +1513,7 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re
// We can explicitely render the overlay UI from URP when HDR output is not enabled.
// SupportedRenderingFeatures.active.rendersUIOverlay should also be set to true.
- if (shouldRenderUI && !outputToHDR)
+ if (shouldRenderUI && cameraData.isLastBaseCamera && !outputToHDR)
{
EnqueuePass(m_DrawOverlayUIPass);
}
diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererRenderGraph.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererRenderGraph.cs
index 078b1c3dd13..584d2335bc8 100644
--- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererRenderGraph.cs
+++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererRenderGraph.cs
@@ -1445,7 +1445,7 @@ private void OnAfterRendering(RenderGraph renderGraph, bool applyPostProcessing)
// We can explicitely render the overlay UI from URP when HDR output is not enabled.
// SupportedRenderingFeatures.active.rendersUIOverlay should also be set to true.
- bool shouldRenderUI = cameraData.rendersOverlayUI;
+ bool shouldRenderUI = cameraData.rendersOverlayUI && cameraData.isLastBaseCamera;
bool outputToHDR = cameraData.isHDROutputActive;
if (shouldRenderUI && !outputToHDR)
{
diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/XR/XRSystemUniversal.cs b/Packages/com.unity.render-pipelines.universal/Runtime/XR/XRSystemUniversal.cs
index db93bb95c28..561c6366a34 100644
--- a/Packages/com.unity.render-pipelines.universal/Runtime/XR/XRSystemUniversal.cs
+++ b/Packages/com.unity.render-pipelines.universal/Runtime/XR/XRSystemUniversal.cs
@@ -7,6 +7,14 @@ internal static class XRSystemUniversal
// Prevent GC by keeping an array pre-allocated
static Matrix4x4[] s_projMatrix = new Matrix4x4[2];
+#if ENABLE_VR && ENABLE_XR_MODULE
+ static MaterialPropertyBlock s_XRSharedPropertyBlock = new MaterialPropertyBlock();
+ internal static MaterialPropertyBlock GetMaterialPropertyBlock()
+ {
+ return s_XRSharedPropertyBlock;
+ }
+#endif
+
internal static void BeginLateLatching(Camera camera, XRPassUniversal xrPass)
{
#if ENABLE_VR && ENABLE_XR_MODULE
diff --git a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Clustering.hlsl b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Clustering.hlsl
index bf8dc36fc51..228b642a04a 100644
--- a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Clustering.hlsl
+++ b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Clustering.hlsl
@@ -6,6 +6,7 @@
#if USE_CLUSTER_LIGHT_LOOP
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/FoveatedRendering.hlsl"
+#define CLUSTER_HAS_REFLECTION_PROBES !(defined(_ENVIRONMENTREFLECTIONS_OFF) || (defined(_REFLECTION_PROBE_ATLAS_KEYWORD_DECLARED) && !defined(_REFLECTION_PROBE_ATLAS)))
// Debug switches for disabling parts of the algorithm. Not implemented for mobile.
#define URP_FP_DISABLE_ZBINNING 0
@@ -74,7 +75,7 @@ ClusterIterator ClusterInit(float2 normalizedScreenSpaceUV, float3 positionWS, i
#else
uint header = headerIndex == 0 ? ((URP_FP_PROBES_BEGIN - 1) << 16) : (((URP_FP_WORDS_PER_TILE * 32 - 1) << 16) | URP_FP_PROBES_BEGIN);
#endif
-#if MAX_LIGHTS_PER_TILE > 32 || (!defined(_ENVIRONMENTREFLECTIONS_OFF) && defined(_REFLECTION_PROBE_ATLAS))
+#if MAX_LIGHTS_PER_TILE > 32 || CLUSTER_HAS_REFLECTION_PROBES
state.entityIndexNextMax = header;
#else
uint tileIndex = state.tileOffset;
@@ -94,7 +95,7 @@ ClusterIterator ClusterInit(float2 normalizedScreenSpaceUV, float3 positionWS, i
// internal
bool ClusterNext(inout ClusterIterator it, out uint entityIndex)
{
-#if MAX_LIGHTS_PER_TILE > 32 || (!defined(_ENVIRONMENTREFLECTIONS_OFF) && defined(_REFLECTION_PROBE_ATLAS))
+#if MAX_LIGHTS_PER_TILE > 32 || CLUSTER_HAS_REFLECTION_PROBES
uint maxIndex = it.entityIndexNextMax >> 16;
[loop] while (it.tileMask == 0 && (it.entityIndexNextMax & 0xFFFF) <= maxIndex)
{
@@ -119,7 +120,7 @@ bool ClusterNext(inout ClusterIterator it, out uint entityIndex)
bool hasNext = it.tileMask != 0;
uint bitIndex = FIRST_BIT_LOW(it.tileMask);
it.tileMask ^= (1 << bitIndex);
-#if MAX_LIGHTS_PER_TILE > 32 || (!defined(_ENVIRONMENTREFLECTIONS_OFF) && defined(_REFLECTION_PROBE_ATLAS))
+#if MAX_LIGHTS_PER_TILE > 32 || CLUSTER_HAS_REFLECTION_PROBES
// Subtract 32 because it stores the index of the _next_ word to fetch, but we want the current.
// The upper 16 bits and bits representing values < 32 are masked out. The latter is due to the fact that it will be
// included in what FIRST_BIT_LOW returns.
diff --git a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/GlobalIllumination.hlsl b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/GlobalIllumination.hlsl
index 1e5f197911c..e647cbfd9e2 100644
--- a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/GlobalIllumination.hlsl
+++ b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/GlobalIllumination.hlsl
@@ -249,7 +249,7 @@ half3 CalculateIrradianceFromReflectionProbes(half3 reflectVector, float3 positi
{
half3 irradiance = half3(0.0h, 0.0h, 0.0h);
half mip = PerceptualRoughnessToMipmapLevel(perceptualRoughness);
-#if USE_CLUSTER_LIGHT_LOOP && defined(_REFLECTION_PROBE_ATLAS)
+#if USE_CLUSTER_LIGHT_LOOP && CLUSTER_HAS_REFLECTION_PROBES
float totalWeight = 0.0f;
uint probeIndex;
ClusterIterator it = ClusterInit(normalizedScreenSpaceUV, positionWS, 1);
diff --git a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Shadows.hlsl b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Shadows.hlsl
index 5694fe46a03..81a0f97afa4 100644
--- a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Shadows.hlsl
+++ b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Shadows.hlsl
@@ -406,8 +406,8 @@ half AdditionalLightRealtimeShadow(int lightIndex, float3 positionWS, half3 ligh
if (isPointLight)
{
// This is a point light, we have to find out which shadow slice to sample from
- float cubemapFaceId = CubeMapFaceID(-lightDirection);
- shadowSliceIndex += cubemapFaceId;
+ const int cubeFaceOffset = CubeMapFaceID(-lightDirection);
+ shadowSliceIndex += cubeFaceOffset;
}
#if USE_STRUCTURED_BUFFER_FOR_LIGHT_DATA
@@ -424,7 +424,7 @@ half AdditionalLightRealtimeShadow(int lightIndex, float3 positionWS, half3 ligh
half AdditionalLightRealtimeShadow(int lightIndex, float3 positionWS, half3 lightDirection)
{
- #if defined(ADDITIONAL_LIGHT_CALCULATE_SHADOWS)
+ #if !defined(ADDITIONAL_LIGHT_CALCULATE_SHADOWS)
return half(1.0);
#endif
diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/.buginfo b/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/.buginfo
new file mode 100644
index 00000000000..fbd5f8466e1
--- /dev/null
+++ b/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/.buginfo
@@ -0,0 +1 @@
+area: Post-processing and UI Features
diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/FinalPost.shader b/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/FinalPost.shader
index b719530bd4c..9db8b030508 100644
--- a/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/FinalPost.shader
+++ b/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/FinalPost.shader
@@ -160,36 +160,64 @@ Shader "Hidden/Universal Render Pipeline/FinalPost"
SubShader
{
Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
- LOD 100
- ZTest Always ZWrite Off Cull Off
Pass
{
Name "FinalPost"
-
+ LOD 100
+ ZTest Always ZWrite Off Cull Off
HLSLPROGRAM
#pragma vertex Vert
#pragma fragment FragFinalPost
#pragma target 4.5
ENDHLSL
}
+
+ Pass
+ {
+ Name "FinalPostXR"
+ LOD 100
+ ZWrite Off ZTest LEqual Blend Off Cull Off
+
+ HLSLPROGRAM
+ #include "Packages/com.unity.render-pipelines.universal/Shaders/XR/XRVisibilityMeshHelper.hlsl"
+
+ #pragma vertex VertVisibilityMeshXR
+ #pragma fragment FragFinalPost
+ #pragma target 4.5
+ ENDHLSL
+ }
}
/// Fallback version of FinalPost shader which lacks support for FSR
SubShader
{
Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
- LOD 100
- ZTest Always ZWrite Off Cull Off
Pass
{
Name "FinalPost"
+ LOD 100
+ ZTest Always ZWrite Off Cull Off
HLSLPROGRAM
#pragma vertex Vert
#pragma fragment FragFinalPost
ENDHLSL
}
+
+ Pass
+ {
+ Name "FinalPostXR"
+ LOD 100
+ ZWrite Off ZTest LEqual Blend Off Cull Off
+
+ HLSLPROGRAM
+ #include "Packages/com.unity.render-pipelines.universal/Shaders/XR/XRVisibilityMeshHelper.hlsl"
+
+ #pragma vertex VertVisibilityMeshXR
+ #pragma fragment FragFinalPost
+ ENDHLSL
+ }
}
}
diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/UberPost.shader b/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/UberPost.shader
index b6425fd4895..7b0aaf4d3f0 100644
--- a/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/UberPost.shader
+++ b/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/UberPost.shader
@@ -324,18 +324,34 @@ Shader "Hidden/Universal Render Pipeline/UberPost"
SubShader
{
Tags { "RenderType" = "Opaque" "RenderPipeline" = "UniversalPipeline"}
- LOD 100
- ZTest Always ZWrite Off Cull Off
- //ColorMask RGB
Pass
{
Name "UberPost"
+ LOD 100
+ ZTest Always ZWrite Off Cull Off
+ //ColorMask RGB
+
HLSLPROGRAM
#pragma vertex Vert
#pragma fragment FragUberPost
ENDHLSL
}
+
+ Pass
+ {
+ Name "UberPostXR"
+ LOD 100
+ ZWrite Off ZTest LEqual Blend Off Cull Off
+
+ HLSLPROGRAM
+ #include "Packages/com.unity.render-pipelines.universal/Shaders/XR/XRVisibilityMeshHelper.hlsl"
+
+ #pragma vertex VertVisibilityMeshXR
+ #pragma fragment FragUberPost
+ ENDHLSL
+
+ }
}
}
diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/Terrain/TerrainLitAdd.shader b/Packages/com.unity.render-pipelines.universal/Shaders/Terrain/TerrainLitAdd.shader
index afbfe21e34e..ffb530fa354 100644
--- a/Packages/com.unity.render-pipelines.universal/Shaders/Terrain/TerrainLitAdd.shader
+++ b/Packages/com.unity.render-pipelines.universal/Shaders/Terrain/TerrainLitAdd.shader
@@ -104,7 +104,19 @@ Shader "Hidden/Universal Render Pipeline/Terrain/Lit (Add Pass)"
Name "GBuffer"
Tags{"LightMode" = "UniversalGBuffer"}
- Blend One One
+ Blend 0 One One
+ Blend 1 One One
+ Blend 2 One One
+ Blend 3 One One
+ // disable the features that aren't needed for add pass deferred rendering
+ Blend 4 Off
+ Blend 5 Off
+ Blend 6 Off
+ ColorMask RGB 0 // Don't write .a to RT0.
+ ColorMask 0 4 // Don't write to RT4~6 (depth as color, shadow mask, rendering layer)
+ ColorMask 0 5
+ ColorMask 0 6
+
HLSLPROGRAM
#pragma target 4.5
diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/Terrain/TerrainLitPasses.hlsl b/Packages/com.unity.render-pipelines.universal/Shaders/Terrain/TerrainLitPasses.hlsl
index ca123a43366..f64343f5c0d 100644
--- a/Packages/com.unity.render-pipelines.universal/Shaders/Terrain/TerrainLitPasses.hlsl
+++ b/Packages/com.unity.render-pipelines.universal/Shaders/Terrain/TerrainLitPasses.hlsl
@@ -481,27 +481,10 @@ void SplatmapFragment(
brdfData.diffuse.rgb *= alpha;
brdfData.specular.rgb *= alpha;
brdfData.reflectivity *= alpha;
- inputData.normalWS = inputData.normalWS * alpha; //Note that normal blending is completely incorrect with 'Accurate G-Buffer Normals' enabled (since octahedral mapping is not linear)
+ inputData.normalWS = inputData.normalWS * alpha;
smoothness *= alpha;
- GBufferFragOutput gbuffer = PackGBuffersBRDFData(brdfData, inputData, smoothness, color.rgb, occlusion);
-
- #ifdef TERRAIN_SPLAT_ADDPASS
- //These parameters cannot be alpha blended, so we allow them to be set them in the base pass and zero them out in the add-pass:
- gbuffer.gBuffer0.a = 0.0f; //materialFlags
- //gbuffer.color.rgb = half3(0.0f, 0.0f, 0.0f); //globalIllumination
- #if defined(GBUFFER_FEATURE_DEPTH)
- gbuffer.depth = 0.0f;
- #endif
- #if defined(GBUFFER_FEATURE_SHADOWMASK)
- gbuffer.shadowMask = half4(0.0f, 0.0f, 0.0f, 0.0f);
- #endif
- #if defined(GBUFFER_FEATURE_RENDERING_LAYERS)
- gbuffer.meshRenderingLayers = half4(0.0f, 0.0f, 0.0f, 0.0f);
- #endif
- #endif
-
- return gbuffer;
+ return PackGBuffersBRDFData(brdfData, inputData, smoothness, color.rgb, occlusion);
#else
half4 color = UniversalFragmentPBR(inputData, albedo, metallic, /* specular */ half3(0.0h, 0.0h, 0.0h), smoothness, occlusion, /* emission */ half3(0, 0, 0), alpha);
diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/XR/XRVisibilityMeshHelper.hlsl b/Packages/com.unity.render-pipelines.universal/Shaders/XR/XRVisibilityMeshHelper.hlsl
new file mode 100644
index 00000000000..bd817edec93
--- /dev/null
+++ b/Packages/com.unity.render-pipelines.universal/Shaders/XR/XRVisibilityMeshHelper.hlsl
@@ -0,0 +1,35 @@
+// Helper include for using XRVisibilityMesh vertex shader
+#include "Packages/com.unity.render-pipelines.core/Runtime/Utilities/Blit.hlsl"
+#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
+
+struct AttributesXR
+{
+ float4 vertex : POSITION;
+ UNITY_VERTEX_INPUT_INSTANCE_ID
+};
+
+Varyings VertVisibilityMeshXR(AttributesXR IN)
+{
+ Varyings OUT;
+ UNITY_SETUP_INSTANCE_ID(IN);
+ UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(OUT);
+
+ OUT.positionCS = mul(UNITY_MATRIX_M, float4(IN.vertex.xy * float2(2.0f, -2.0f) + float2(-1.0f, 1.0), UNITY_NEAR_CLIP_VALUE, 1.0f));
+
+#if defined(UNITY_STEREO_INSTANCING_ENABLED)
+ OUT.stereoTargetEyeIndexAsRTArrayIdx = IN.vertex.z;
+#elif defined(UNITY_STEREO_MULTIVIEW_ENABLED)
+ if (unity_StereoEyeIndex != uint(IN.vertex.z))
+ OUT.positionCS = float4(0.0f, 0.0f, 0.0f, 0.0f);
+#endif
+
+ // Mimic same logic of GetFullScreenTriangleVertexPosition, where the v is flipped
+ // This matches the orientation of the screen to avoid additional y-flips
+#if UNITY_UV_STARTS_AT_TOP
+ OUT.texcoord.xy = DYNAMIC_SCALING_APPLY_SCALEBIAS((OUT.positionCS.xy) * float2(0.5f, -0.5f) + float2(0.5f, 0.5f));
+#else
+ OUT.texcoord.xy = DYNAMIC_SCALING_APPLY_SCALEBIAS((OUT.positionCS.xy) * float2(0.5f, 0.5f) + float2(0.5f, 0.5f));
+#endif
+
+ return OUT;
+}
diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/XR/XRVisibilityMeshHelper.hlsl.meta b/Packages/com.unity.render-pipelines.universal/Shaders/XR/XRVisibilityMeshHelper.hlsl.meta
new file mode 100644
index 00000000000..0a0feadc342
--- /dev/null
+++ b/Packages/com.unity.render-pipelines.universal/Shaders/XR/XRVisibilityMeshHelper.hlsl.meta
@@ -0,0 +1,7 @@
+fileFormatVersion: 2
+guid: ded32ce19f1afce42a36cb8e415e3aca
+ShaderIncludeImporter:
+ externalObjects: {}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Packages/com.unity.shadergraph/Documentation~/Append-Node.md b/Packages/com.unity.shadergraph/Documentation~/Append-Node.md
new file mode 100644
index 00000000000..28c06c8730d
--- /dev/null
+++ b/Packages/com.unity.shadergraph/Documentation~/Append-Node.md
@@ -0,0 +1,35 @@
+# Append Node
+
+## Description
+
+Creates a new vector **Out** by combining the channels of input **A** followed by the channels of input **B**.
+Inputs **A** and **B** can have up to **three** channels.
+
+**Out** can have **two** to **four** channels, depending on the combination of channels of the inputs.
+
+Input **A** channels take priority over input **B** to combine up to a maximum of **four** in the output.
+
+## Ports
+
+| Name | Direction | Type | Binding | Description |
+|:------------ |:-------------|:-----|:---|:---|
+| A | Input | Dynamic | None | First input value |
+| B | Input | Dynamic | None | Second input value |
+| Out | Output | Dynamic | None | Combined vector from A and B |
+
+## Generated Code Example
+
+The following example code represents one possible outcome of this node for different inputs combinations.
+
+### Vector2 and Float
+```
+float3 Append_Out = float3( A.xy, B.x);
+```
+### Float and Vector3
+```
+float4 Append_Out = float4( A.x, B.xyz);
+```
+### Vector3 and Vector2
+```
+float4 Append_Out = float4( A.xyz, B.x);
+```
\ No newline at end of file
diff --git a/Packages/com.unity.shadergraph/Documentation~/Channel-Nodes.md b/Packages/com.unity.shadergraph/Documentation~/Channel-Nodes.md
index 75fad691fc0..565c7428b01 100644
--- a/Packages/com.unity.shadergraph/Documentation~/Channel-Nodes.md
+++ b/Packages/com.unity.shadergraph/Documentation~/Channel-Nodes.md
@@ -1,10 +1,13 @@
# Channel Nodes
-|[Combine](Combine-Node.md)| [Flip](Flip-Node.md) |
+|[Append](Append-Node.md)|[Combine](Combine-Node.md)|
|:---------:|:---------:|
-|||
-|Creates new vectors from the four inputs **R**, **G**, **B** and **A**.|Flips the individual channels of input **In** selected by the [Node](Node.md)'s parameters.|
-|[**Split**](Split-Node.md)|[**Swizzle**](Swizzle-Node.md)|
-|||
-|Splits the input vector **In** into four **Float** outputs **R**, **G**, **B** and **A**.|Creates a new [vector](https://docs.unity3d.com/Manual/VectorCookbook.html) from the reordered elements of the input vector. |
+|||
+|Combine two float or vector inputs into a single new vector of variable dimensions.|Creates new vectors from the four inputs **R**, **G**, **B** and **A**.|
+|[**Flip**](Flip-Node.md)|[**Split**](Split-Node.md)|
+|||
+|Flips the individual channels of input **In** selected by the [Node](Node.md)'s parameters.|Splits the input vector **In** into four **Float** outputs **R**, **G**, **B** and **A**.|
+|[**Swizzle**](Swizzle-Node.md)||
+|||
+|Creates a new [vector](https://docs.unity3d.com/Manual/VectorCookbook.html) from the reordered elements of the input vector.||
\ No newline at end of file
diff --git a/Packages/com.unity.shadergraph/Documentation~/TableOfContents.md b/Packages/com.unity.shadergraph/Documentation~/TableOfContents.md
index 3ee1cf68408..861a45e2e04 100644
--- a/Packages/com.unity.shadergraph/Documentation~/TableOfContents.md
+++ b/Packages/com.unity.shadergraph/Documentation~/TableOfContents.md
@@ -68,6 +68,7 @@
* Utility
* [Colorspace Conversion](Colorspace-Conversion-Node)
* [Channel](Channel-Nodes)
+ * [Append](Append-Node)
* [Combine](Combine-Node)
* [Flip](Flip-Node)
* [Split](Split-Node)
diff --git a/Packages/com.unity.shadergraph/Documentation~/images/AppendNodeThumb.png b/Packages/com.unity.shadergraph/Documentation~/images/AppendNodeThumb.png
new file mode 100644
index 00000000000..645feed756a
Binary files /dev/null and b/Packages/com.unity.shadergraph/Documentation~/images/AppendNodeThumb.png differ
diff --git a/Packages/com.unity.shadergraph/Editor/Data/Nodes/Channel/AppendVectorNode.cs b/Packages/com.unity.shadergraph/Editor/Data/Nodes/Channel/AppendVectorNode.cs
new file mode 100644
index 00000000000..369b89cd8e9
--- /dev/null
+++ b/Packages/com.unity.shadergraph/Editor/Data/Nodes/Channel/AppendVectorNode.cs
@@ -0,0 +1,184 @@
+using System;
+using System.Collections.Generic;
+using UnityEditor.Graphing;
+using UnityEngine;
+
+namespace UnityEditor.ShaderGraph
+{
+ [Title("Channel", "Append")]
+ class AppendVectorNode : AbstractMaterialNode, IGeneratesBodyCode
+ {
+ public AppendVectorNode()
+ {
+ name = "Append";
+ synonyms = new string[] { "join", "combine" };
+ UpdateNodeAfterDeserialization();
+ }
+
+ const int Input1SlotId = 0;
+ const int Input2SlotId = 1;
+ const int OutputSlotId = 2;
+ const string kInput1SlotName = "A";
+ const string kInput2SlotName = "B";
+ const string kOutputSlotName = "Out";
+
+ public override bool hasPreview => true;
+
+ public sealed override void UpdateNodeAfterDeserialization()
+ {
+ AddSlot(new DynamicVectorMaterialSlot(Input1SlotId, kInput1SlotName, kInput1SlotName, SlotType.Input, Vector4.zero));
+ AddSlot(new DynamicVectorMaterialSlot(Input2SlotId, kInput2SlotName, kInput2SlotName, SlotType.Input, Vector4.zero));
+ AddSlot(new DynamicVectorMaterialSlot(OutputSlotId, kOutputSlotName, kOutputSlotName, SlotType.Output, Vector4.zero));
+ RemoveSlotsNameNotMatching(new[] { Input1SlotId, Input2SlotId, OutputSlotId });
+ }
+
+ public void GenerateNodeCode(ShaderStringBuilder sb, GenerationMode generationMode)
+ {
+ var input1Value = GetSlotValue(Input1SlotId, generationMode);
+ var input2Value = GetSlotValue(Input2SlotId, generationMode);
+ var outputValue = GetSlotValue(OutputSlotId, generationMode);
+
+ var outputTypeString = FindOutputSlot(OutputSlotId).concreteValueType.ToShaderString();
+ var input1Type = FindInputSlot(Input1SlotId).concreteValueType;
+ var input2Type = FindInputSlot(Input2SlotId).concreteValueType;
+
+ var input1Swizzle = SwizzleFromVectorSlotType(input1Type, 3);
+ var input2Swizzle = SwizzleFromVectorSlotType(input2Type, 3);
+
+ sb.AppendLine("{0} {1} = {0}( {2}.{3}, {4}.{5} );",
+ outputTypeString,
+ GetVariableNameForSlot(OutputSlotId),
+ input1Value,
+ input1Swizzle,
+ input2Value,
+ input2Swizzle
+ );
+ }
+
+ string SwizzleFromVectorSlotType( ConcreteSlotValueType type , uint dimensionLimit = 3)
+ {
+ if (dimensionLimit == 0)
+ dimensionLimit = 4;
+
+ uint typeDimension = type switch {
+ ConcreteSlotValueType.Vector2 => 2,
+ ConcreteSlotValueType.Vector3 => 3,
+ ConcreteSlotValueType.Vector4 => 4,
+ _ => 1,
+ };
+
+ if (typeDimension > dimensionLimit)
+ typeDimension = dimensionLimit;
+
+ return typeDimension switch {
+ 1 => "x",
+ 2 => "xy",
+ 3 => "xyz",
+ _ => "xyzw",
+ };
+ }
+
+ uint ProcessInputSlot(MaterialSlot inputSlot, string referenceName, uint maxDimensions = 4)
+ {
+ uint dimensions = 0;
+
+ if (maxDimensions == 0)
+ maxDimensions = 4;
+
+ inputSlot.hasError = false;
+
+ // default input type
+ var outputConcreteType = ConcreteSlotValueType.Vector1;
+
+ // if there is a connection
+ var edges = owner.GetEdges(inputSlot.slotReference);
+ foreach(var edge in edges)
+ {
+ if (edge != null)
+ {
+ // get the output details
+ var outputSlotRef = edge.outputSlot;
+ var outputNode = outputSlotRef.node;
+ if (outputNode != null)
+ {
+ var outputSlot = outputNode.FindOutputSlot(outputSlotRef.slotId);
+ if (outputSlot != null)
+ {
+ if (!outputSlot.hasError)
+ {
+ outputConcreteType = outputSlot.concreteValueType;
+ }
+ }
+ }
+
+ break;
+ }
+ }
+
+ var dynVectorInputSlot = inputSlot as DynamicVectorMaterialSlot;
+
+ // get the connected output dimensions and limit it if needed
+ dimensions = outputConcreteType switch {
+ ConcreteSlotValueType.Vector2 => 2,
+ ConcreteSlotValueType.Vector3 => 3,
+ ConcreteSlotValueType.Vector4 => 4,
+ _ => 1,
+ };
+
+ if (dimensions > maxDimensions)
+ dimensions = maxDimensions;
+
+ outputConcreteType = dimensions switch {
+ 2 => ConcreteSlotValueType.Vector2,
+ 3 => ConcreteSlotValueType.Vector3,
+ 4 => ConcreteSlotValueType.Vector4,
+ _ => ConcreteSlotValueType.Vector1
+ };
+
+ dynVectorInputSlot.SetConcreteType(outputConcreteType);
+
+ return dimensions;
+ }
+
+ public override void EvaluateDynamicMaterialSlots(List inputSlots, List outputSlots)
+ {
+ uint slot1Dimensions = 1;
+ uint slot2Dimensions = 1;
+ uint availableDimensionsForInput2 = 4;
+ uint outputVectorDimensions = 0;
+
+ // iterate over the input slots
+ int i = 0;
+ foreach (var inputSlot in inputSlots)
+ {
+ if (i == 0)
+ {
+ slot1Dimensions = ProcessInputSlot(inputSlot, kInput1SlotName, 3);
+ availableDimensionsForInput2 -= slot1Dimensions;
+ }
+ else if (i == 1)
+ {
+ slot2Dimensions = ProcessInputSlot(inputSlot, kInput2SlotName, availableDimensionsForInput2);
+ }
+ else
+ break; // No other input slots should be present
+
+ i++;
+ }
+
+ // Set the output vector dimension to the sum of the input
+ outputVectorDimensions = slot1Dimensions + slot2Dimensions;
+ foreach (var outputSlot in outputSlots)
+ {
+ (outputSlot as DynamicVectorMaterialSlot).SetConcreteType( outputVectorDimensions switch {
+ 2 => ConcreteSlotValueType.Vector2,
+ 3 => ConcreteSlotValueType.Vector3,
+ 4 => ConcreteSlotValueType.Vector4,
+ _ => ConcreteSlotValueType.Vector1
+ });
+ }
+
+ CalculateNodeHasError();
+ }
+ }
+}
diff --git a/Packages/com.unity.shadergraph/Editor/Data/Nodes/Channel/AppendVectorNode.cs.meta b/Packages/com.unity.shadergraph/Editor/Data/Nodes/Channel/AppendVectorNode.cs.meta
new file mode 100644
index 00000000000..38a320d27e8
--- /dev/null
+++ b/Packages/com.unity.shadergraph/Editor/Data/Nodes/Channel/AppendVectorNode.cs.meta
@@ -0,0 +1,2 @@
+fileFormatVersion: 2
+guid: 34bb760e026fa494c83d62660c2c9b4a
\ No newline at end of file
diff --git a/Packages/com.unity.shadergraph/Editor/Drawing/PreviewManager.cs b/Packages/com.unity.shadergraph/Editor/Drawing/PreviewManager.cs
index 9080a251547..98f2b0d9485 100644
--- a/Packages/com.unity.shadergraph/Editor/Drawing/PreviewManager.cs
+++ b/Packages/com.unity.shadergraph/Editor/Drawing/PreviewManager.cs
@@ -638,7 +638,7 @@ public void RenderPreviews(EditorWindow editorWindow, bool requestShaders = true
if (drawPreviewCount <= 0)
return;
- previewTime += Time.deltaTime;
+ previewTime += Time.fixedDeltaTime;
var timeParameters = new Vector4(previewTime, Mathf.Sin(previewTime), Mathf.Cos(previewTime), 0.0f);
m_SharedPreviewPropertyBlock.SetVector("_TimeParameters", timeParameters);
diff --git a/Packages/com.unity.shadergraph/Editor/Generation/Processors/Generator.cs b/Packages/com.unity.shadergraph/Editor/Generation/Processors/Generator.cs
index 926f78cab85..76787bd929e 100644
--- a/Packages/com.unity.shadergraph/Editor/Generation/Processors/Generator.cs
+++ b/Packages/com.unity.shadergraph/Editor/Generation/Processors/Generator.cs
@@ -395,9 +395,6 @@ void GenerateSubShader(int targetIndex, SubShaderDescriptor descriptor, Property
if (m_Mode == GenerationMode.Preview)
activeFields.baseInstance.Add(Fields.IsPreview);
- if (m_OutputNode == null && m_Mode == GenerationMode.Preview)
- activeFields.baseInstance.Add(Fields.IsMainPreview);
-
// Check masternode fields for valid passes
if (pass.TestActive(activeFields))
GenerateShaderPass(targetIndex, pass.descriptor, activeFields, activeBlockDescriptors.Select(x => x.descriptor).ToList(), subShaderProperties);
@@ -1070,6 +1067,9 @@ void ProcessStackForPass(ContextData contextData, BlockFieldDescriptor[] passBlo
{
graphDefines.AppendLine("#define SHADERPASS {0}", pass.referenceName);
+ if (m_OutputNode == null && m_Mode == GenerationMode.Preview)
+ graphDefines.AppendLine("#define SHADERGRAPH_PREVIEW_MAIN");
+
if (pass.defines != null)
{
foreach (DefineCollection.Item define in pass.defines)
diff --git a/Packages/com.unity.shadergraph/Editor/Generation/TargetResources/Fields.cs b/Packages/com.unity.shadergraph/Editor/Generation/TargetResources/Fields.cs
index ebc1b330e6b..153a9ddd457 100644
--- a/Packages/com.unity.shadergraph/Editor/Generation/TargetResources/Fields.cs
+++ b/Packages/com.unity.shadergraph/Editor/Generation/TargetResources/Fields.cs
@@ -19,7 +19,6 @@ internal static class Fields
public static FieldDescriptor BlendAlpha = new FieldDescriptor(kBlendMode, "Alpha", "_BLENDMODE_ALPHA 1"); // URP: only sprite targets, vfx: HDRP?
public static FieldDescriptor DoubleSided = new FieldDescriptor(string.Empty, "DoubleSided", "_DOUBLE_SIDED 1"); // URP: only sprite targets, duplicated in HD
public static FieldDescriptor IsPreview = new FieldDescriptor(string.Empty, "isPreview", "SHADERGRAPH_PREVIEW");
- public static FieldDescriptor IsMainPreview = new FieldDescriptor(string.Empty, "isMainPreview", "SHADERGRAPH_PREVIEW_MAIN");
public static FieldDescriptor LodCrossFade = new FieldDescriptor(string.Empty, "LodCrossFade", "_LODCROSSFADE 1"); // HD only
public static FieldDescriptor AlphaToMask = new FieldDescriptor(string.Empty, "AlphaToMask", "_ALPHATOMASK_ON 1"); // HD only
diff --git a/Packages/com.unity.shadergraph/Editor/Generation/Targets/BuiltIn/ShaderLibrary/Shadows.hlsl b/Packages/com.unity.shadergraph/Editor/Generation/Targets/BuiltIn/ShaderLibrary/Shadows.hlsl
index fd146904f3c..016c7e75d2c 100644
--- a/Packages/com.unity.shadergraph/Editor/Generation/Targets/BuiltIn/ShaderLibrary/Shadows.hlsl
+++ b/Packages/com.unity.shadergraph/Editor/Generation/Targets/BuiltIn/ShaderLibrary/Shadows.hlsl
@@ -326,8 +326,8 @@ half AdditionalLightRealtimeShadow(int lightIndex, float3 positionWS, half3 ligh
if (isPointLight)
{
// This is a point light, we have to find out which shadow slice to sample from
- float cubemapFaceId = CubeMapFaceID(-lightDirection);
- shadowSliceIndex += cubemapFaceId;
+ const int cubeFaceOffset = CubeMapFaceID(-lightDirection);
+ shadowSliceIndex += cubeFaceOffset;
}
#if USE_STRUCTURED_BUFFER_FOR_LIGHT_DATA
diff --git a/Packages/com.unity.shadergraph/Editor/Resources/DefaultHeatmapValues.asset b/Packages/com.unity.shadergraph/Editor/Resources/DefaultHeatmapValues.asset
index 57e36fbea71..4e0fb08b8e2 100644
--- a/Packages/com.unity.shadergraph/Editor/Resources/DefaultHeatmapValues.asset
+++ b/Packages/com.unity.shadergraph/Editor/Resources/DefaultHeatmapValues.asset
@@ -38,6 +38,8 @@ MonoBehaviour:
m_Category: 3
- m_NodeName: AnyNode
m_Category: 3
+ - m_NodeName: AppendVectorNode
+ m_Category: 1
- m_NodeName: ArccosineNode
m_Category: 6
- m_NodeName: ArcsineNode
diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Block-SetPosition(Cone).md b/Packages/com.unity.visualeffectgraph/Documentation~/Block-SetPosition(Cone).md
index 9d8c282d1e5..e04c383a8fe 100644
--- a/Packages/com.unity.visualeffectgraph/Documentation~/Block-SetPosition(Cone).md
+++ b/Packages/com.unity.visualeffectgraph/Documentation~/Block-SetPosition(Cone).md
@@ -9,7 +9,7 @@ The ArcCone shape adds an arc property to the cone to determine its arc angle, i
This Block can calculate the position either from the ArcCone 's **Surface**, **Volume**, or **Thick Surface** where thickness can be relative to the size of the shape, or an absolute value.
-This Block also calculates a direction vector based on the calculated position on the shape, and stores it to the [direction attribute](Reference-Attributes.md), based on composition. This direction is equal to the normalized vector from the center of the cone to the calculated position.
+This Block also calculates a direction vector based on the calculated position on the shape, and stores it to the [direction attribute](Reference-Attributes.md), based on composition. This direction is equal to the normalized vector from the calculated position to the top of the cone.
Note: [Velocity from Direction and Speed](Block-VelocityFromDirectionAndSpeed.md) Blocks can then process the direction attribute.
diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(RandomDirection).md b/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(RandomDirection).md
index 69da873c2b1..888adb88050 100644
--- a/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(RandomDirection).md
+++ b/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(RandomDirection).md
@@ -9,7 +9,7 @@ The **Velocity from Direction And Speed (Random Direction)** Block calculates a
The Block then scales the final direction vector by a speed, and composes it with the velocity attribute.
-Example.gif)
+Example.gif)
## Block compatibility
diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Blocks.md b/Packages/com.unity.visualeffectgraph/Documentation~/Blocks.md
index f59af1ad985..1484f6b0e0c 100644
--- a/Packages/com.unity.visualeffectgraph/Documentation~/Blocks.md
+++ b/Packages/com.unity.visualeffectgraph/Documentation~/Blocks.md
@@ -2,8 +2,6 @@
Blocks are Nodes that define the behavior of a [Context](Contexts.md). You can create and reorder Blocks within a Context and, when Unity plays a visual effect, Blocks execute from top to bottom.
-
-
You can use Blocks for many purposes, from simple value storage (for example, a random Color) to high-level complex operations such as Noise Turbulence, Forces, or Collisions.
## Adding Blocks
@@ -61,4 +59,4 @@ You can connect a graph logic to the activation port to accurately control under
Unity is able to determine if a Block is statically inactive. An inactive Block appears greyed out, and Unity removes it during compilation so it has zero runtime cost.
-**Note**: Subgraph Blocks don't have activation ports. To emulate an activation port, you can expose a boolean exposed property from the subgraph, and connect the property to the activation ports of the subgraph's internal Blocks.
\ No newline at end of file
+**Note**: Subgraph Blocks don't have activation ports. To emulate an activation port, you can expose a boolean exposed property from the subgraph, and connect the property to the activation ports of the subgraph's internal Blocks.
diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/Block-Anatomy.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/Block-Anatomy.png
deleted file mode 100644
index fc22a80fe54..00000000000
Binary files a/Packages/com.unity.visualeffectgraph/Documentation~/Images/Block-Anatomy.png and /dev/null differ
diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/VisualEffectProjectSettings.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/VisualEffectProjectSettings.png
deleted file mode 100644
index 8cc6a39dd11..00000000000
Binary files a/Packages/com.unity.visualeffectgraph/Documentation~/Images/VisualEffectProjectSettings.png and /dev/null differ
diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-TriangleWave.md b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-TriangleWave.md
index 2f86d61f9a8..0c628988526 100644
--- a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-TriangleWave.md
+++ b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-TriangleWave.md
@@ -4,7 +4,7 @@ Menu Path : **Operator > Math > Wave > Triangle Wave**
The **Triangle Wave** Operator allows you to generate a value which linearly oscillates between a minimum and a maximum value based on a provided input and a set frequency.
-
+
If **Frequency** is set to 1, the blue dot gos in a straight line from **Min** to **Max** and back to **Min** in the span on **Input** going from 0 to 1.
diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/VisualEffectProjectSettings.md b/Packages/com.unity.visualeffectgraph/Documentation~/VisualEffectProjectSettings.md
index 6e94b59e1bd..1882a68c6ec 100644
--- a/Packages/com.unity.visualeffectgraph/Documentation~/VisualEffectProjectSettings.md
+++ b/Packages/com.unity.visualeffectgraph/Documentation~/VisualEffectProjectSettings.md
@@ -2,8 +2,6 @@
Visual Effect Graph Project Settings is a section in Unity Project Settings Window. You can access these settings in **Edit > Project Settings > VFX**.
-
-
## Properties:
| Name | Description |
diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/sample-learningTemplates.md b/Packages/com.unity.visualeffectgraph/Documentation~/sample-learningTemplates.md
index b4627b47629..e73d16f2978 100644
--- a/Packages/com.unity.visualeffectgraph/Documentation~/sample-learningTemplates.md
+++ b/Packages/com.unity.visualeffectgraph/Documentation~/sample-learningTemplates.md
@@ -95,6 +95,8 @@ To start exploring the content of this sample:
Each VFX covers a feature or aspect of VFX Graph, and illustrates what you can do with those features. Each VFX is accompanied by documentation and embedded explanations to help you in your learning journey.
+
+
### Sample Showcase window
To open the Sample Showcase window:
@@ -136,17 +138,15 @@ To open a VFX asset, you can do one of the following:
1. In the [Inspector](https://docs.unity3d.com/Manual/UsingTheInspector.html), find the Visual Effect component.
1. Select **Edit**.
-- From the **Sample Showcase window**:
+- From the [Sample Showcase window](#sample-showcase):
- 1. Open the [Sample Showcase window](#Inspector).
1. Use the drop-down menu or arrow button to select the desired VFX.
1. Select **Open VFX**.
-- From the **Template window**:
+- From the [Template window](Templates-window.md):
- 1. Open the [Sample Showcase window](#Inspector).
- 1. Use the drop-down menu or arrow button to select the desired VFX.
- 1. Select **Open VFX**.
+ 1. Select the desired VFX.
+ 1. Select **Create**.
diff --git a/Packages/com.unity.visualeffectgraph/Editor/Core/VFXLibrary.cs b/Packages/com.unity.visualeffectgraph/Editor/Core/VFXLibrary.cs
index c5148cbba60..0fcf1e7e8a6 100644
--- a/Packages/com.unity.visualeffectgraph/Editor/Core/VFXLibrary.cs
+++ b/Packages/com.unity.visualeffectgraph/Editor/Core/VFXLibrary.cs
@@ -35,7 +35,11 @@ public Variant(string name, string category, Type modelType, KeyValuePair();
}
- public virtual string GetDocumentationLink() => VFXHelpURLAttribute.GetHelpUrl(modelType);
+ public virtual string GetDocumentationLink()
+ {
+ DocumentationUtils.TryGetHelpURL(modelType, out var url);
+ return url;
+ }
public virtual VFXModel CreateInstance()
{
diff --git a/Packages/com.unity.visualeffectgraph/Editor/FilterPopup/VFXFilterWindow.cs b/Packages/com.unity.visualeffectgraph/Editor/FilterPopup/VFXFilterWindow.cs
index 51cff4fc496..bb1b84d4cf2 100644
--- a/Packages/com.unity.visualeffectgraph/Editor/FilterPopup/VFXFilterWindow.cs
+++ b/Packages/com.unity.visualeffectgraph/Editor/FilterPopup/VFXFilterWindow.cs
@@ -7,6 +7,7 @@
using Unity.UI.Builder;
using UnityEditor.UIElements;
using UnityEngine;
+using UnityEngine.Rendering;
using UnityEngine.UIElements;
namespace UnityEditor.VFX.UI
@@ -492,7 +493,7 @@ private void OnDocumentation()
var docLink = descriptor.GetDocumentationLink();
if (!string.IsNullOrEmpty(docLink))
{
- Help.BrowseURL(string.Format(docLink, VFXHelpURLAttribute.version));
+ Help.BrowseURL(string.Format(docLink, Documentation.version));
}
}
}
diff --git a/Packages/com.unity.visualeffectgraph/Editor/GraphView/Views/VFXHelpDropdownButton.cs b/Packages/com.unity.visualeffectgraph/Editor/GraphView/Views/VFXHelpDropdownButton.cs
index 9e373e8569e..ae66df92b3e 100644
--- a/Packages/com.unity.visualeffectgraph/Editor/GraphView/Views/VFXHelpDropdownButton.cs
+++ b/Packages/com.unity.visualeffectgraph/Editor/GraphView/Views/VFXHelpDropdownButton.cs
@@ -4,6 +4,7 @@
using UnityEditor.PackageManager.Requests;
using UnityEditor.PackageManager.UI;
using UnityEngine;
+using UnityEngine.Rendering;
using UnityEngine.UIElements;
namespace UnityEditor.VFX.UI
@@ -42,10 +43,8 @@ public VFXHelpDropdownButton(VFXView vfxView)
protected override void OnMainButton()
{
- if (string.IsNullOrEmpty(m_ManualUrlWithVersion))
- {
- m_ManualUrlWithVersion = string.Format(k_ManualUrl, VFXHelpURLAttribute.version);
- }
+ if (string.IsNullOrEmpty(m_ManualUrlWithVersion))
+ m_ManualUrlWithVersion = DocumentationInfo.GetDefaultPackageLink(Documentation.packageName);
GotoUrl(m_ManualUrlWithVersion);
}
diff --git a/Packages/com.unity.visualeffectgraph/Editor/Models/Blocks/Implementations/Position/PositionCone.cs b/Packages/com.unity.visualeffectgraph/Editor/Models/Blocks/Implementations/Position/PositionCone.cs
index e4995a8eeab..747d4a88ad5 100644
--- a/Packages/com.unity.visualeffectgraph/Editor/Models/Blocks/Implementations/Position/PositionCone.cs
+++ b/Packages/com.unity.visualeffectgraph/Editor/Models/Blocks/Implementations/Position/PositionCone.cs
@@ -55,10 +55,6 @@ public override IEnumerable GetParameters(PositionShape posi
var slope = new VFXExpressionATan(tanSlope);
yield return new VFXNamedExpression(CalculateVolumeFactor(positionBase.positionMode, baseRadius, thickness, 2.0f), "volumeFactor");
yield return new VFXNamedExpression(new VFXExpressionCombine(new VFXExpression[] { new VFXExpressionSin(slope), new VFXExpressionCos(slope) }), "sincosSlope");
-
-
- var invFinalTransform = VFXOperatorUtility.InverseTransposeTRS(transform);
- yield return new VFXNamedExpression(invFinalTransform, "arcCone_cone_inverseTranspose");
}
public override string GetSource(PositionShape positionBase)
@@ -114,8 +110,8 @@ public override string GetSource(PositionShape positionBase)
float3 currentAxisY = float3(sincosTheta, -sincosSlope.x);
finalPos = mul(arcCone_cone_transform, float4(finalPos.xzy, 1.0f)).xyz;
-currentAxisY = mul(arcCone_cone_inverseTranspose, float4(currentAxisY.xzy, 0.0f)).xyz;
-currentAxisZ = mul(arcCone_cone_inverseTranspose, float4(currentAxisZ.xzy, 0.0f)).xyz;
+currentAxisY = mul(arcCone_cone_transform, float4(currentAxisY.xzy, 0.0f)).xyz;
+currentAxisZ = mul(arcCone_cone_transform, float4(currentAxisZ.xzy, 0.0f)).xyz;
currentAxisY = normalize(currentAxisY);
currentAxisZ = normalize(currentAxisZ);
float3 currentAxisX = cross(currentAxisY, currentAxisZ);
diff --git a/Packages/com.unity.visualeffectgraph/Editor/Models/Contexts/Implementations/VFXStaticMeshOutput.cs b/Packages/com.unity.visualeffectgraph/Editor/Models/Contexts/Implementations/VFXStaticMeshOutput.cs
index 2c8ebcab598..29f23df7463 100644
--- a/Packages/com.unity.visualeffectgraph/Editor/Models/Contexts/Implementations/VFXStaticMeshOutput.cs
+++ b/Packages/com.unity.visualeffectgraph/Editor/Models/Contexts/Implementations/VFXStaticMeshOutput.cs
@@ -129,43 +129,44 @@ protected override IEnumerable inputProperties
{
var mat = ((VFXDataMesh)GetData()).GetOrCreateMaterial();
var propertyAttribs = new List