以前写了一款AR的项目,包括添加,删除,移动旋转一些小功能。不说那些网上很容易搜到的关于API的解释了。
1.创建ARSCNView
-(ARSCNView *)arSCNView{
if(!_arSCNView) {
_arSCNView= [[ARSCNViewalloc]initWithFrame:self.view.bounds];
_arSCNView.delegate=self;
_arSCNView.session = self.arSession;
_arSCNView.debugOptions = ARSCNDebugOptionShowFeaturePoints;
_arSCNView.antialiasingMode = SCNAntialiasingModeMultisampling4X;
SCNPlane*documentPlane = [SCNPlaneplaneWithWidth:30height:30];
documentPlane.firstMaterial.diffuse.contents= [UIColorclearColor];
self.documentNode= [SCNNodenodeWithGeometry:documentPlane];
self.documentNode.hidden=YES;
[_arSCNView.scene.rootNode addChildNode:self.documentNode];
// setup camera
SCNLookAtConstraint *constraint = [SCNLookAtConstraint lookAtConstraintWithTarget:self.documentNode];
constraint.gimbalLockEnabled=YES;
_arSCNView.scene.rootNode.constraints=@[constraint];
}
return _arSCNView;
}
- (ARSession *)arSession{
if(!_arSession) {
_arSession= [[ARSessionalloc]init];
_arSession.delegate=self;
}
return _arSession;
}
-(ARConfiguration *)arConfiguration{
if (!_arConfiguration) {
/* ARWorldTrackingConfiguration
* 提供高品质的AR体验,使用后置摄像头精确跟踪设备的位置和方向,并允许进行检测
*/
ARWorldTrackingConfiguration *arWTConfiguration = [[ARWorldTrackingConfiguration alloc] init];
arWTConfiguration.planeDetection = ARPlaneDetectionHorizontal;
arWTConfiguration.lightEstimationEnabled=true;
_arConfiguration= arWTConfiguration;
}
return _arConfiguration;
}
2。添加模型 (目前使用obj模型)
模型的比例调节( node.transform =SCNMatrix4MakeScale(0.001,0.001,0.001);)根据自己模型实际大小做调节到合适的大小。此处模型并非为了这个项目,所以比例差距很大。
根据SCNVector3Make 算出模型的位置,这个网上算法解释很多
path:写了一个简单的存储
此处思想: 模型下载完成之前,先给一个模型大小相同的黑框,算是对用户体验的优化,等模型下载完成,在黑框的地方显示出模型,模型出现的角度在制作模型的时候可以跟3D组商量好角度,以便于展示出来之后减少用户操作。材质则直接用SDWebimage存储
-(void)nodeWithMDLObjectWithMaterial:(ARHitTestResult *)hitResult contentFrom:(ShopModel *)sModel{
SCNBox *cube = [SCNBox boxWithWidth:[sModel.size[0] floatValue] height:[sModel.size[2] floatValue] length:[sModel.size[1] floatValue] chamferRadius:0];
SCNNode*node = [SCNProductnodeWithGeometry:cube];
for(SCNMaterial* matincube.materials) {
mat.diffuse.contents=UIColorFromRGB(0x434C5D);
}
node.transform =SCNMatrix4MakeScale(0.001,0.001,0.001);
node.physicsBody = [SCNPhysicsBody bodyWithType:SCNPhysicsBodyTypeDynamic shape:nil];
node.physicsBody.mass = 2.0;
node.physicsBody.damping=0.0;
node.physicsBody.categoryBitMask = CollisionCategoryCube;
node.categoryBitMask=MastTypeCube;
ARFrame* frame =self.arSCNView.session.currentFrame;
SCNMatrix4mat =SCNMatrix4FromMat4(frame.camera.transform);
SCNVector3dir =SCNVector3Make(-1* mat.m31, -1* mat.m32, -1* mat.m33);
node.position = SCNVector3Make(
mat.m41+dir.x,
hitResult.worldTransform.columns[3].y+0.4, // + insertionYOffset,
mat.m43+-1* mat.m33
);
node.opacity=0.8;
[self.gestureControl nodeAddObjectFrom:(SCNProduct*)node andTag:1 andIsHide:NO];
[self.arSCNView.scene.rootNodeaddChildNode:node];
[self.nodeArraddObject:node];
// self.activityIndicator.hidden=NO;
// [self.activityIndicator startAnimating];
NSString* strUrl = sModel.texture;
SDWebImageManager *manager = [SDWebImageManager sharedManager];
NSString* key = [managercacheKeyForURL:[NSURLURLWithString:strUrl]];
SDImageCache* cache = [SDImageCache sharedImageCache];
NSString* path = [dataPathDic stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.obj",sModel.furID]];
dispatch_async(dispatch_get_main_queue(), ^{
if([selfisHaveObj:path] && [cacheimageFromDiskCacheForKey:key]) {
[self nodeMaterilWithImage:[cache imageFromDiskCacheForKey:key] andHit:hitResult andPath:path andCube:node];
}else{
if(![selfisHaveObj:path]){
[UtilityFuncDownloadFilesTo:sModel.objandPath:pathfinished:^(NSError*error,NSDictionary*resultDict) {
if(!error && resultDict) {
// Set the correct content with the Sprite
[[SDWebImageDownloadersharedDownloader]downloadImageWithURL:[NSURLURLWithString:sModel.texture]options:SDWebImageDownloaderHighPriorityprogress:nilcompleted:^(UIImage*image,NSData*data,NSError*error,BOOLfinished) {
[selfnodeMaterilWithImage:image andHit:hitResultandPath:pathandCube:node];
}];
}else{
[self.activityIndicatorstartAnimating];
self.activityIndicator.hidden=YES;
}
}];
}else{
[[SDWebImageDownloadersharedDownloader]downloadImageWithURL:[NSURLURLWithString:sModel.texture]options:SDWebImageDownloaderHighPriorityprogress:nilcompleted:^(UIImage*image,NSData*data,NSError*error,BOOLfinished) {
[selfnodeMaterilWithImage:image andHit:hitResultandPath:pathandCube:node];
}];
}
}
});
}
-(void)nodeMaterilWithImage:(UIImage*)image andHit:(ARHitTestResult*)hitResultandPath:(NSString*)pathandCube:(SCNNode*)Knode{
NSURL*url = [NSURLfileURLWithPath:path];
MDLAsset*asset = [[MDLAssetalloc]initWithURL:url];
SCNNode * node = [SCNProduct nodeWithMDLObject:[asset objectAtIndex:0]];
for(SCNMaterial* materialinnode.geometry.materials) {
material.diffuse.contents= image;
}
node.transform =SCNMatrix4MakeScale(0.001,0.001,0.001);
node.physicsBody = [SCNPhysicsBody bodyWithType:SCNPhysicsBodyTypeDynamic shape:nil];
node.physicsBody.mass = 2.0;
node.physicsBody.damping=0.0;
node.physicsBody.categoryBitMask = CollisionCategoryCube;
node.categoryBitMask=MastTypeCube;
node.position= Knode.position;
[self.nodeArrremoveObject:Knode];
[KnoderemoveFromParentNode];
[self.gestureControl.timer invalidate];
self.gestureControl.timer=nil;
[self.arSCNView.scene.rootNodeaddChildNode:node];
[self.nodeArraddObject:node];
[self.gestureControl nodeAddObjectFrom:(SCNProduct*)node andTag:2 andIsHide:YES];
}
-(void)deleteFile {
NSFileManager *fileManager = [NSFileManager defaultManager];
NSString *folder=[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
NSArray*fileList ;
fileList =[fileManagercontentsOfDirectoryAtPath:foldererror:NULL];
for(NSString*fileinfileList) {
NSLog(@"file=%@",file);
NSString*path =[folderstringByAppendingPathComponent:file];
NSLog(@"得到的路径=%@",path);
[fileManagerremoveItemAtPath:patherror:nil];
}
}
-(BOOL)isHaveObj:(NSString*)testPath {
NSFileManager *fileManager = [NSFileManager defaultManager];
BOOLresult = [fileManagerfileExistsAtPath:testPath];
returnresult;
}
3。材质的属性设置
+ (SCNMaterial*)materialWithImageName:(NSString*)iamgeName {
SCNMaterial*mat =materials[@"mat"];
if(mat) {
returnmat;
}
mat = [SCNMaterialnew];
mat.lightingModelName = SCNLightingModelPhysicallyBased;
UIImage* img =[UIImageimageNamed:@"keji"];
mat.diffuse.contents= img;
// mat.diffuse.contents = [UIImage imageNamed:@"1"];
mat.diffuse.wrapS = SCNWrapModeRepeat;
mat.diffuse.wrapT = SCNWrapModeRepeat;
mat.roughness.wrapS = SCNWrapModeRepeat;
mat.roughness.wrapT = SCNWrapModeRepeat;
mat.metalness.wrapS = SCNWrapModeRepeat;
mat.metalness.wrapT = SCNWrapModeRepeat;
mat.normal.wrapS = SCNWrapModeRepeat;
mat.normal.wrapT = SCNWrapModeRepeat;
materials[@"mat"] = mat;
returnmat;
}
4,模型旋转和移动(大家可以网上搜一下 DHVector2D)
- (void)translate:(SCNVector3)deltaandDis:(CGFloat)disand:(SCNVector3)vecCurrentand:(SCNVector3)vecLast{
MovementModemode =Translate;
if (self.activeComponent) {
mode =self.activeComponent.movementMode;
}
if(mode ==Translate) {
self.selectedProduct.position = SCNVector3Make(self.selectedProduct.position.x + delta.x, self.selectedProduct.worldPosition.y, self.selectedProduct.position.z + delta.z);
for(GizmoComponent* componentin_gizmoComponents) {
component.position=SCNVector3Make(self.selectedProduct.position.x, component.position.y,self.selectedProduct.position.z);
}
}else{
floatangle=0;
for(GizmoComponent* componentin_gizmoComponents) {
_vector1= [[DHVector2Dalloc]initWithCoordinateExpression:CGPointMake(vecCurrent.x-self.activeComponent.worldPosition.x, vecCurrent.z-self.activeComponent.worldPosition.z)];
_vector2= [[DHVector2Dalloc]initWithCoordinateExpression:CGPointMake(vecLast.x-self.activeComponent.worldPosition.x, vecLast.z-self.activeComponent.worldPosition.z)];
angle = [_vector1 antiClockwiseAngleToVector:_vector2];
NSLog(@"===========angle:%f=============",angle);
[componentrunAction:[SCNAction rotateByAngle:-angle aroundAxis:SCNVector3Make(0, 1, 0) duration:0]];
}
[self.selectedProduct runAction:[SCNAction rotateByAngle:-angle aroundAxis:SCNVector3Make(0, 1, 0) duration:0]];
}
}
附:第一次写,潦草之处见谅
网友评论