RealityKit & Reality Composer Pro

Mar 27 2024 · Swift 5.10, visionOS 1.1, Xcode 15.3

Lesson 04: Using Reality Composer Pro to Create AR Experiences

Demo

Episode complete

Play next episode

Next

Heads up... You've reached locked video content where the transcript will be shown as obfuscated text.

Start with the app you’ve been building or use the QHoops app in the Starter folder.

var tapGesture: some Gesture {
  TapGesture()
    .targetedToAnyEntity()
    .onEnded { value in
      // do nothing
      value.entity.components[PhysicsBodyComponent.self]?.mode = .dynamic
      value.entity.components[PhysicsMotionComponent.self]?.linearVelocity = [0, 7, -5]
    }
}
.gesture(tapGesture)
@State private var goalEntity: Entity?
@State private var goalScored: EventSubscription?
@State private var goalCelebration: Bool = false
goalEntity = content.entities.first?.findEntity(named: "Goal")
goalScored = content.subscribe(to: CollisionEvents.Began.self, on: goalEntity) { collisionEvent in
  print("Goal detected \(collisionEvent.entityA.name) and \(collisionEvent.entityB.name)")
  goalCelebration = true
}
goalEntity?.components.set(OpacityComponent(opacity: 0.0))
@State private var confetti: Entity?
confetti = content.entities.first?.findEntity(named: "ConfettiEmitter")
confetti?.components.set(OpacityComponent(opacity: 0.0))
} update: { content in
  if let _ = content.entities.first {
    if goalCelebration == true {
      confetti?.components.set(OpacityComponent(opacity: 1.0))
    }
  }
}
@State private var cheering: Entity?
@State private var audio: AudioFileResource?
cheering = content.entities.first?.findEntity(named: "ChannelAudio")
audio = try? await AudioFileResource(named: "/Root/cheering_m4a", from: "Immersive.usda", in: realityKitContentBundle)
if let audioPlaybackControl = cheering?.prepareAudio(audio!) {
  audioPlaybackControl.play()
}
See forum comments
Cinema mode Download course materials from Github
Previous: Instruction Next: Conclusion