#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "LJPreView.h"
@interface ViewController ()<AVCaptureMetadataOutputObjectsDelegate>
@property(nonatomic,strong)AVCaptureSession *session;
@property(nonatomic,strong)AVCaptureDeviceInput *input;
@property(nonatomic,strong)AVCaptureMetadataOutput *output;
@property(nonatomic,strong)LJPreView *preView;
@end
@implementation ViewController
- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
self.input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
self.output = [[AVCaptureMetadataOutput alloc]init];
self.session = [[AVCaptureSession alloc]init];
[self.session setSessionPreset:AVCaptureSessionPresetHigh];
if([self.session canAddInput:self.input]){
[self.session addInput:self.input];
}
if([self.session canAddOutput:self.output]){
[self.session addOutput:self.output];
}
[self.output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[self.output setMetadataObjectTypes:@[AVMetadataObjectTypeQRCode]];
self.preView = [[LJPreView alloc]initWithFrame:self.view.bounds];
self.preView.session = self.session;
[self.view addSubview:self.preView];
[self.session startRunning];
}
#pragma mark - 实现代理方法
- (void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
[self.session stopRunning];
[self.preView removeFromSuperview];
NSString *msg = nil;
for(AVMetadataMachineReadableCodeObject *objc in metadataObjects){
NSLog(@"%@",objc);
msg = objc.stringValue;
NSLog(@"%@",objc.stringValue);
}
UIAlertController *alerVC = [UIAlertController alertControllerWithTitle:@"扫描结果" message:msg preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *action = [UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleDefault handler:nil];
[alerVC addAction:action];
[self presentViewController:alerVC animated:YES completion:nil];
}
@end
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface LJPreView : UIView
@property(nonatomic,strong)AVCaptureSession *session;
@end
NS_ASSUME_NONNULL_END
#import "LJPreView.h"
@interface LJPreView()
@property(nonatomic,strong)UIImageView *imageView;
@property(nonatomic,strong)UIImageView *lineImageView;
@property(nonatomic,strong)NSTimer *timer;
@end
@implementation LJPreView
-(void)setupUI{
_imageView = [[UIImageView alloc]initWithImage:[UIImage imageNamed:@"scan@2x.png"]];
_imageView.frame = CGRectMake(self.bounds.size.width * 0.5 - 140, self.bounds.size.height * 0.5 - 140, 280, 280);
[self addSubview:_imageView];
_lineImageView = [[UIImageView alloc]initWithFrame:CGRectMake(30, 10, 220, 2)];
_lineImageView.image = [UIImage imageNamed:@"scanning@2x.png"];
[_imageView addSubview:_lineImageView];
_timer = [NSTimer scheduledTimerWithTimeInterval:3.0f target:self selector:@selector(animation) userInfo:nil repeats:YES];
}
-(void)animation{
[UIView animateWithDuration:2.8 delay:0 options:UIViewAnimationOptionCurveLinear animations:^{
self->_lineImageView.frame = CGRectMake(30, 260, 220, 2);
} completion:^(BOOL finished) {
self->_lineImageView.frame = CGRectMake(30, 10, 220, 2);
}];
}
- (instancetype)initWithFrame:(CGRect)frame{
self = [super initWithFrame:frame];
if (self) {
[self setupUI];
}
return self;
}
- (void)setSession:(AVCaptureSession *)session{
_session = session;
AVCaptureVideoPreviewLayer *layer = (AVCaptureVideoPreviewLayer *)self.layer;
layer.session = session;
}
+(Class)layerClass{
return [AVCaptureVideoPreviewLayer class];
}
@end
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· Manus重磅发布:全球首款通用AI代理技术深度解析与实战指南
· 被坑几百块钱后,我竟然真的恢复了删除的微信聊天记录!
· 没有Manus邀请码?试试免邀请码的MGX或者开源的OpenManus吧
· 园子的第一款AI主题卫衣上架——"HELLO! HOW CAN I ASSIST YOU TODAY
· 【自荐】一款简洁、开源的在线白板工具 Drawnix