import UIKit
import RealityKit
class ViewController: UIViewController {
@IBOutlet var arView: ARView!
override func viewDidLoad() {
super.viewDidLoad()
// Load the "Box" scene from the "Experience" Reality File
let boxAnchor = try! Experience.loadBox()
// Add the box anchor to the scene
arView.scene.anchors.append(boxAnchor)
}
}
SceneKit
import UIKit
import SceneKit
import ARKit
class ViewController: UIViewController, ARSCNViewDelegate {
@IBOutlet var sceneView: ARSCNView!
override func viewDidLoad() {
super.viewDidLoad()
// Set the view's delegate
sceneView.delegate = self
// Show statistics such as fps and timing information
sceneView.showsStatistics = true
// Create a new scene
let scene = SCNScene(named: "art.scnassets/ship.scn")!
// Set the scene to the view
sceneView.scene = scene
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a session configuration
let configuration = ARWorldTrackingConfiguration()
// Run the view's session
sceneView.session.run(configuration)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session
sceneView.session.pause()
}
// MARK: - ARSCNViewDelegate
/*
// Override to create and configure nodes for anchors added to the view's session.
func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
let node = SCNNode()
return node
}
*/
func session(_ session: ARSession, didFailWithError error: Error) {
// Present an error message to the user
}
func sessionWasInterrupted(_ session: ARSession) {
// Inform the user that the session has been interrupted, for example, by presenting an overlay
}
func sessionInterruptionEnded(_ session: ARSession) {
// Reset tracking and/or remove existing anchors if consistent tracking is required
}
}
Type ‘MLModel’ has no member ‘__loadContents’ error occurred on Xcode 12
Cause
CoreML compiler in Xcode 12.0 GM is generating code that has symbols available only on macOS BigSur causing the compilation issue. If the goal is to build a catalyst or macOS-only app with Xcode 12.0
Solution
In your targets build settings you can set COREML_CODEGEN_LANGUAGE to “None”
Open terminal then go to where your .mlmodel folder
Type “xcrun coremlcompiler generate <YourModel.mlmodel> –language Swift .”
This will create <YourModel.swift> file in same folder.
Open Xcode and add <YourModel.swift> in your project.
Click <YourModel.swift> and comment out method that compiler complains.
In my case, I commented out the following two functions, recompiled the project again and it worked fine.
Chichibu is also famous for the animation “Anohana: The Flower We Saw That Day“. It was on air in 2011, but still may anime fans are visiting Chichibu, the place of anime scenes.
When I execute the previous article, TimeLapse, the following warnings appear in the terminal.
objc[3865]: Class QMacAutoReleasePoolTracker is implemented in both /usr/local/opt/qt/lib/QtCore.framework/Versions/5/QtCore (0x10a1e0a48) and /usr/local/lib/python3.8/site-packages/cv2/.dylibs/QtCore (0x10f5fe700). One of the two will be used. Which one is undefined.
objc[3865]: Class QT_ROOT_LEVEL_POOL__THESE_OBJECTS_WILL_BE_RELEASED_WHEN_QAPP_GOES_OUT_OF_SCOPE is implemented in both /usr/local/opt/qt/lib/QtCore.framework/Versions/5/QtCore (0x10a1e0ac0) and /usr/local/lib/python3.8/site-packages/cv2/.dylibs/QtCore (0x10f5fe778). One of the two will be used. Which one is undefined.
objc[3865]: Class KeyValueObserver is implemented in both /usr/local/opt/qt/lib/QtCore.framework/Versions/5/QtCore (0x10a1e0ae8) and /usr/local/lib/python3.8/site-packages/cv2/.dylibs/QtCore (0x10f5fe7a0). One of the two will be used. Which one is undefined.
objc[3865]: Class RunLoopModeTracker is implemented in both /usr/local/opt/qt/lib/QtCore.framework/Versions/5/QtCore (0x10a1e0b38) and /usr/local/lib/python3.8/site-packages/cv2/.dylibs/QtCore (0x10f5fe7f0). One of the two will be used. Which one is undefined.
Live Photosをキャプチャするには、キャプチャパイプラインの内部再構成が必要です。これには時間がかかり、進行中のキャプチャが中断されます。最初のライブ写真を撮影する前に、AVCapturePhotoOutputオブジェクトでライブ写真のキャプチャを有効にして、パイプラインが適切に構成されていることを確認してください。
let photoOutput = AVCapturePhotoOutput()
// Attempt to add the photo output to the session.
if captureSession.canAddOutput(photoOutput) {
captureSession.sessionPreset = .photo
captureSession.addOutput(photoOutput)
} else {
throw CameraError.configurationFailed
}
// Configure the photo output's behavior.
photoOutput.isHighResolutionCaptureEnabled = true
photoOutput.isLivePhotoCaptureEnabled = photoOutput.isLivePhotoCaptureSupported
// Start the capture session.
captureSession.startRunning()
let photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc])
photoSettings.livePhotoMovieFileURL = // output url
// Shoot the Live Photo, using a custom class to handle capture delegate callbacks.
let captureProcessor = LivePhotoCaptureProcessor()
photoOutput.capturePhoto(with: photoSettings, delegate: captureProcessor)
func saveLivePhotoToPhotosLibrary(stillImageData: Data, livePhotoMovieURL: URL) { PHPhotoLibrary.requestAuthorization { status in
guard status == .authorized else { return }
PHPhotoLibrary.shared().performChanges({
// Add the captured photo's file data as the main resource for the Photos asset.
let creationRequest = PHAssetCreationRequest.forAsset()
creationRequest.addResource(with: .photo, data: stillImageData, options: nil)
// Add the movie file URL as the Live Photo's paired video resource.
let options = PHAssetResourceCreationOptions()
options.shouldMoveFile = true
creationRequest.addResource(with: .pairedVideo, fileURL: livePhotoMovieURL, options: options)
}) { success, error in
// Handle completion.
}
}
}
Live Photosをキャプチャすると、「写真キャプチャの進行状況の追跡」に示すプロセスに2つの追加ステップが追加されます。静止画の結果の配信(ステップ4)の後、写真出力はムービーキャプチャステータスを通知し(ステップ5)、ムービー結果を配信します(ステップ6) )。(最終クリーンアップはステップ7になります。)
class LivePhotoCaptureProcessor: NSObject, AVCapturePhotoCaptureDelegate {
// ... other PhotoCaptureDelegate methods and supporting properties ...
// A handler to call when Live Photo capture begins and ends.
var livePhotoStatusHandler: (Bool) -> () = { _ in }
// A property for tracking in-progress captures and updating UI accordingly.
var livePhotosInProgress = 0 {
didSet {
// Update the UI accordingly based on the value of this property
}
}
// Call the handler when PhotoCaptureDelegate methods indicate Live Photo capture is in progress.
func photoOutput(_ output: AVCapturePhotoOutput,
willBeginCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings) {
let capturingLivePhoto = (resolvedSettings.livePhotoMovieDimensions.width > 0 && resolvedSettings.livePhotoMovieDimensions.height > 0)
livePhotoStatusHandler(capturingLivePhoto)
}
func photoOutput(_ output: AVCapturePhotoOutput,
didFinishRecordingLivePhotoMovieForEventualFileAt outputFileURL: URL,
resolvedSettings: AVCaptureResolvedPhotoSettings) {
livePhotoStatusHandler(false)
}
}