I have a floor node
, on which I need to cast shadow from directional light
. This node needs to be transparent (used in AR
environment).
And this works fine when I use ARKit
, but the same setup using SceneKit
shows no shadow or reflection. How can I cast a shadow in SceneKit
like this?
The problem with SceneKit is caused by the fact, that I set sceneView.backgroundColor = .clear
- but I need this behaviour in this app. Can this be somehow avoided?
Sample code, demonstrating this issue (works only on device, not in simulator):
@IBOutlet weak var sceneView: SCNView! {
didSet {
sceneView.scene = SCNScene()
let cameraNode = SCNNode()
cameraNode.camera = SCNCamera()
sceneView.pointOfView = cameraNode
let testNode = SCNNode(geometry: SCNBox(width: 1, height: 1, length: 1, chamferRadius: 0))
testNode.position = SCNVector3(x: 0, y: 0, z: -5)
sceneView.scene!.rootNode.addChildNode(testNode)
let animation = SCNAction.rotateBy(x: 0, y: .pi, z: 0, duration: 3.0)
testNode.runAction(SCNAction.repeatForever(animation), completionHandler: nil)
let floor = SCNFloor()
floor.firstMaterial!.colorBufferWriteMask = []
floor.firstMaterial!.readsFromDepthBuffer = true
floor.firstMaterial!.writesToDepthBuffer = true
floor.firstMaterial!.lightingModel = .constant
let floorNode = SCNNode(geometry: floor)
floorNode.position = SCNVector3(x: 0, y: -2, z: 0)
sceneView.scene!.rootNode.addChildNode(floorNode)
let light = SCNLight()
light.type = .directional
light.shadowColor = UIColor(red: 0, green: 0, blue: 0, alpha: 0.5)
light.color = UIColor.white
light.castsShadow = true
light.automaticallyAdjustsShadowProjection = true
light.shadowMode = .deferred
let sunLightNode = SCNNode()
sunLightNode.position = SCNVector3(x: 1_000, y: 1_000, z: 0)
sunLightNode.rotation = SCNVector4(x: 1, y: 0, z: 0, w: .pi * 1.5)
sunLightNode.light = light
sceneView.scene!.rootNode.addChildNode(sunLightNode)
let omniLightNode: SCNNode = {
let omniLightNode = SCNNode()
let light: SCNLight = {
let light = SCNLight()
light.type = .omni
return light
}()
omniLightNode.light = light
return omniLightNode
}()
sceneView.scene!.rootNode.addChildNode(omniLightNode)
}
}
override func viewDidLoad() {
super.viewDidLoad()
let tapGR = UITapGestureRecognizer(target: self, action: #selector(toggleTransparent))
view.addGestureRecognizer(tapGR)
}
@objc func toggleTransparent() {
transparent = !transparent
}
var transparent = false {
didSet {
sceneView.backgroundColor = transparent ? .clear : .white
}
}
Here is the same example for macOS, build on top of SceneKit game project:
import SceneKit
import QuartzCore
class GameViewController: NSViewController {
override func viewDidLoad() {
super.viewDidLoad()
// create a new scene
let scene = SCNScene(named: "art.scnassets/ship.scn")!
// create and add a camera to the scene
let cameraNode = SCNNode()
cameraNode.camera = SCNCamera()
scene.rootNode.addChildNode(cameraNode)
// place the camera
cameraNode.position = SCNVector3(x: 0, y: 0, z: 15)
let testNode = SCNNode(geometry: SCNBox(width: 1, height: 1, length: 1, chamferRadius: 0))
testNode.position = SCNVector3(x: 0, y: 0, z: -5)
scene.rootNode.addChildNode(testNode)
let animation = SCNAction.rotateBy(x: 0, y: .pi, z: 0, duration: 3.0)
testNode.runAction(SCNAction.repeatForever(animation), completionHandler: nil)
let floor = SCNFloor()
floor.firstMaterial!.colorBufferWriteMask = []
floor.firstMaterial!.readsFromDepthBuffer = true
floor.firstMaterial!.writesToDepthBuffer = true
floor.firstMaterial!.lightingModel = .constant
let floorNode = SCNNode(geometry: floor)
floorNode.position = SCNVector3(x: 0, y: -2, z: 0)
scene.rootNode.addChildNode(floorNode)
let light = SCNLight()
light.type = .directional
light.shadowColor = NSColor(red: 0, green: 0, blue: 0, alpha: 0.5)
light.color = NSColor.white
light.castsShadow = true
light.automaticallyAdjustsShadowProjection = true
light.shadowMode = .deferred
let sunLightNode = SCNNode()
sunLightNode.position = SCNVector3(x: 1_000, y: 1_000, z: 0)
sunLightNode.rotation = SCNVector4(x: 1, y: 0, z: 0, w: .pi * 1.5)
sunLightNode.light = light
scene.rootNode.addChildNode(sunLightNode)
let omniLightNode: SCNNode = {
let omniLightNode = SCNNode()
let light: SCNLight = {
let light = SCNLight()
light.type = .omni
return light
}()
omniLightNode.light = light
return omniLightNode
}()
scene.rootNode.addChildNode(omniLightNode)
// retrieve the SCNView
let scnView = self.view as! SCNView
// set the scene to the view
scnView.scene = scene
// allows the user to manipulate the camera
scnView.allowsCameraControl = true
// configure the view
scnView.backgroundColor = .clear
// scnView.backgroundColor = .white // shadow works in this mode, but I need it to be clear
}
}
Sample projects:
MacOS: https://www.dropbox.com/s/1o50mbgzg4gc0fg/Test_macOS.zip?dl=1
iOS: https://www.dropbox.com/s/fk71oay1sopc1vp/Test.zip?dl=1
In macOS you can change backgroundColor in last line of ViewController - I need it to be clear, so I can show camera preview under it.
On pictures below you can see what it looks like when sceneView.backgroundColor is white, and below - clear. On clear version there is no shadow.
First : You need to connect it as a node
to the scene
, not as a geometry type
.
let floor = SCNNode()
floor.geometry = SCNFloor()
floor.geometry?.firstMaterial!.colorBufferWriteMask = []
floor.geometry?.firstMaterial!.readsFromDepthBuffer = true
floor.geometry?.firstMaterial!.writesToDepthBuffer = true
floor.geometry?.firstMaterial!.lightingModel = .constant
scene.rootNode.addChildNode(floor)
Shadow on invisible SCNFloor():
Shadow on visible SCNPlane() and our camera is under SCNFloor():
For getting a
transparent shadow
you need to set ashadow color
, not theobject's transparency itself
.
Second : A shadow color
must be set like this for macOS:
lightNode.light!.shadowColor = NSColor(calibratedRed: 0,
green: 0,
blue: 0,
alpha: 0.5)
...and for iOS it looks like this:
lightNode.light!.shadowColor = UIColor(white: 0, alpha: 0.5)
Alpha component here (alpha: 0.5
) is an opacity
of the shadow and RGB components (white: 0
) is black color of the shadow.
P.S.
sceneView.backgroundColor
switching between.clear
colour and.white
colour.
In this particular case I can't catch a robust shadow when sceneView.backgroundColor = .clear
, because you need to switch between RGBA=1,1,1,1
(white mode: white colour, alpha=1) and RGBA=0,0,0,0
(clear mode: black colour, alpha=0).
In order to see semi-transparent shadow on a background the components should be RGB=1,1,1
and A=0.5
, but these values are whitening the image due to internal compositing mechanism of SceneKit. But when I set RGB=1,1,1
and A=0.02
the shadow is very feeble.
Here's a tolerable workaround for now (look for solution below in SOLUTION section):
@objc func toggleTransparent() {
transparent = !transparent
}
var transparent = false {
didSet {
// this shadow is very FEEBLE and it's whitening BG image a little bit
sceneView.backgroundColor =
transparent ? UIColor(white: 1, alpha: 0.02) : .white
}
}
let light = SCNLight()
light.type = .directional
if transparent == false {
light.shadowColor = UIColor(white: 0, alpha: 0.9)
}
If I set light.shadowColor = UIColor(white: 0, alpha: 1)
I'll get satisfactory shadow on BG image but solid black shadow on white.
SOLUTION:
You should grab a render of 3D objects to have premultiplied RGBA image with its useful Alpha channel. After that, you can composite
rgba image of cube and its shadow
overimage of nature
using classicalOVER
compositing operation in another View.
Here's a formula for OVER
operation :
(RGB1 * A1) + (RGB2 * (1 – A1))
It's been awhile since this was posted but maybe someone will find this alternative solution useful. I encountered a similar situation and what I ended up doing was rendering using multiple passes via SCNTechnique
. First I rendered a floor with a solid white diffuse
and then I rendered the rest of the scene without the floor. To do this I set the categoryBitMask
of my SCNFloor
to 3 and left the others with the default value of 1.
Next I created my SCNTechnique
with this definition which renders the floor and the rest of the scene into separate buffers and then combines them together into the final scene:
self.sceneView.technique = SCNTechnique(dictionary: [
"passes" : [
"store-floor": [
"draw" : "DRAW_NODE",
"node" : "floor-node",
"inputs" : [],
"outputs" : [ "color" : "color_floor" ]
],
"store-scene": [
"draw" : "DRAW_SCENE",
"excludeCategoryMask" : 2,
"inputs" : [],
"outputs" : [ "color" : "color_scene" ]
],
"recall-scene": [
"draw" : "DRAW_QUAD",
"metalVertexShader" : "vertex_tecnique_basic",
"metalFragmentShader" : "fragment_tecnique_merge",
"inputs" : [ "alphaTex" : "color_floor", "sceneTex" : "color_scene" ],
"outputs" : [ "color" : "COLOR" ]
]
],
"symbols" : [
"vertexSymbol" : [ "semantic" : "vertex" ]
],
"targets" : [
"color_floor" : [ "type" : "color" ],
"color_scene" : [ "type" : "color" ],
],
"sequence" : [
"store-floor",
"store-scene",
"recall-scene"
]
])
Next the Metal share code that takes those two buffers and combines them together where the alpha value ranges from 0 for white to 1 for black.
using namespace metal;
#include <SceneKit/scn_metal>
struct TechniqueVertexIn
{
float4 position [[attribute(SCNVertexSemanticPosition)]];
};
struct TechniqueVertexOut
{
float4 framePos [[position]];
float2 centeredLoc;
};
constexpr sampler s = sampler(coord::normalized, address::repeat, filter::linear);
vertex TechniqueVertexOut vertex_tecnique_basic(
TechniqueVertexIn in [[stage_in]],
constant SCNSceneBuffer& scnFrame [[buffer(0)]])
{
TechniqueVertexOut vert;
vert.framePos = float4(in.position.x, in.position.y, 0.0, 1.0);
vert.centeredLoc = float2((in.position.x + 1.0) * 0.5 , (in.position.y + 1.0) * -0.5);
return vert;
}
fragment half4 fragment_tecnique_merge(
TechniqueVertexOut vert [[stage_in]],
texture2d<float> alphaTex [[texture(0)]],
texture2d<float> sceneTex [[texture(1)]])
{
float4 alphaColor = alphaTex.sample(s, vert.centeredLoc);
float4 sceneColor = sceneTex.sample(s, vert.centeredLoc);
float alpha = 1.0 - max(max(alphaColor.r, alphaColor.g), alphaColor.b); // since floor should be white, could just pick a chan
alpha *= alphaColor.a;
alpha = max(sceneColor.a, alpha);
return half4(half3(sceneColor.rgb * alpha), alpha);
}
Lastly here's an example of what that ends up looking like with all the pieces put together.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With