diff --git a/SemSegiPhone/AnnotationViewController.swift b/SemSegiPhone/AnnotationViewController.swift new file mode 100644 index 0000000..09c6a95 --- /dev/null +++ b/SemSegiPhone/AnnotationViewController.swift @@ -0,0 +1,99 @@ +// +// AnnotationViewController.swift +// SemSegiPhone +// +// ViewController for AnnotationView +// +// Created by ESP-NET on 5/29/20. +// Copyright © 2020 Sachin Mehta. All rights reserved. +// + +import UIKit + +class AnnotationViewController: UIViewController { + + @IBOutlet weak var capturedImage: UIImageView! + @IBOutlet weak var capturedSeg: UIImageView! + @IBOutlet weak var buttons: UIStackView! + @IBOutlet weak var colorswatch: UIImageView! + @IBOutlet weak var classLabel: UILabel! + + @IBOutlet weak var progressBar: UIProgressView! + + private var index: Int! + + // handled by segue + var capImage: UIImage! + var capSeg: UIImage! + var classes = [String](); + var selection = [Int](); + var responses = [Int](); + + override func viewDidLoad() { + super.viewDidLoad() + + responses = selection + + capturedImage.image = capImage + capturedSeg.image = capSeg + + index = 0 + + colorswatch.backgroundColor = UIColor(red: 1.0, green: CGFloat(12 * selection[index]) / 255.0, blue: CGFloat(12 * selection[index]) / 255.0, alpha: 1.0); + + classLabel.text = classes[selection[index]] + progressBar.progress = Float(index) + + for subview in buttons.arrangedSubviews { + let btn = subview as! UIButton + btn.addTarget(self, action: #selector(selectionClicked), for: .touchUpInside) + } + } + + // TODO: clean up + @objc func selectionClicked(_ sender: AnyObject?) { + let btn = sender as! UIButton + if sender === buttons.arrangedSubviews[0] { + responses[index] = 0 + btn.setTitleColor(UIColor.red, for: .normal) + let btn2 = buttons.arrangedSubviews[1] as! UIButton + let btn3 = buttons.arrangedSubviews[2] as! UIButton + btn2.setTitleColor(UIColor.blue, for: .normal) + btn3.setTitleColor(UIColor.blue, for: .normal) + } else if sender === buttons.arrangedSubviews[1] { + responses[index] = 1 + btn.setTitleColor(UIColor.red, for: .normal) + let btn2 = buttons.arrangedSubviews[2] as! UIButton + let btn3 = buttons.arrangedSubviews[0] as! UIButton + btn2.setTitleColor(UIColor.blue, for: .normal) + btn3.setTitleColor(UIColor.blue, for: .normal) + } else if sender === buttons.arrangedSubviews[2] { + responses[index] = 2 + btn.setTitleColor(UIColor.red, for: .normal) + let btn2 = buttons.arrangedSubviews[1] as! UIButton + let btn3 = buttons.arrangedSubviews[0] as! UIButton + btn2.setTitleColor(UIColor.blue, for: .normal) + btn3.setTitleColor(UIColor.blue, for: .normal) + } + print("response", responses[index]) + } + + // handle next btn + @IBAction func onClickNext(_ sender: Any) { + index += 1 + progressBar.progress += 1.0 / Float(selection.count) + + if (index == selection.count) { + _ = navigationController?.popViewController(animated: true) + } else { + classLabel.text = classes[selection[index]] + // TODO: swap to new color mapping + colorswatch.backgroundColor = UIColor(red: 1.0, green: CGFloat(12 * selection[index]) / 255.0, blue: CGFloat(12 * selection[index]) / 255.0, alpha: 1.0); + } + + for subview in buttons.arrangedSubviews { + let btn = subview as! UIButton + btn.setTitleColor(UIColor.blue, for: .normal) + } + } +} diff --git a/SemSegiPhone/AppDelegate.swift b/SemSegiPhone/AppDelegate.swift index 9460697..8d159b4 100644 --- a/SemSegiPhone/AppDelegate.swift +++ b/SemSegiPhone/AppDelegate.swift @@ -9,7 +9,7 @@ import UIKit @UIApplicationMain -class AppDelegate: UIResponder, UIApplicationDelegate { + class AppDelegate: UIResponder, UIApplicationDelegate { var window: UIWindow? diff --git a/SemSegiPhone/Assets.xcassets/AsphaltSwatch.imageset/Contents.json b/SemSegiPhone/Assets.xcassets/AsphaltSwatch.imageset/Contents.json new file mode 100644 index 0000000..e140881 --- /dev/null +++ b/SemSegiPhone/Assets.xcassets/AsphaltSwatch.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "idiom" : "universal", + "filename" : "asphalt_swatch.jpg", + "scale" : "2x" + }, + { + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} \ No newline at end of file diff --git a/SemSegiPhone/Assets.xcassets/AsphaltSwatch.imageset/asphalt_swatch.jpg b/SemSegiPhone/Assets.xcassets/AsphaltSwatch.imageset/asphalt_swatch.jpg new file mode 100644 index 0000000..fff0162 Binary files /dev/null and b/SemSegiPhone/Assets.xcassets/AsphaltSwatch.imageset/asphalt_swatch.jpg differ diff --git a/SemSegiPhone/Assets.xcassets/Image.imageset/Contents.json b/SemSegiPhone/Assets.xcassets/Image.imageset/Contents.json new file mode 100644 index 0000000..039aa45 --- /dev/null +++ b/SemSegiPhone/Assets.xcassets/Image.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "idiom" : "universal", + "filename" : "Image@2x.png", + "scale" : "2x" + }, + { + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} \ No newline at end of file diff --git a/SemSegiPhone/Assets.xcassets/Image.imageset/Image@2x.png b/SemSegiPhone/Assets.xcassets/Image.imageset/Image@2x.png new file mode 100644 index 0000000..5070db2 Binary files /dev/null and b/SemSegiPhone/Assets.xcassets/Image.imageset/Image@2x.png differ diff --git a/SemSegiPhone/Base.lproj/Main.storyboard b/SemSegiPhone/Base.lproj/Main.storyboard index b82454e..a454ad8 100644 --- a/SemSegiPhone/Base.lproj/Main.storyboard +++ b/SemSegiPhone/Base.lproj/Main.storyboard @@ -1,48 +1,339 @@ - - - - + + - + + - + - + - + - - - + + + + + + + + + + + + + + + + + - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + - + + + + - + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -50,12 +341,12 @@ - + - + @@ -63,4 +354,8 @@ + + + + diff --git a/SemSegiPhone/CollectionViewCell.swift b/SemSegiPhone/CollectionViewCell.swift new file mode 100644 index 0000000..519cdc9 --- /dev/null +++ b/SemSegiPhone/CollectionViewCell.swift @@ -0,0 +1,14 @@ +// +// CollectionViewCell.swift +// SemSegiPhone +// +// Created by ESP-NET on 5/22/20. +// Copyright © 2020 Sachin Mehta. All rights reserved. +// + +import UIKit + +class CollectionViewCell: UICollectionViewCell { + + @IBOutlet weak var CellLabel: UILabel! +} diff --git a/SemSegiPhone/Info.plist b/SemSegiPhone/Info.plist index 8ad8cb5..541c976 100644 --- a/SemSegiPhone/Info.plist +++ b/SemSegiPhone/Info.plist @@ -20,6 +20,8 @@ 1 LSRequiresIPhoneOS + NSCameraUsageDescription + Camera UILaunchStoryboardName LaunchScreen UIMainStoryboardFile @@ -41,7 +43,5 @@ UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight - NSCameraUsageDescription - Camera diff --git a/SemSegiPhone/SetupViewController.swift b/SemSegiPhone/SetupViewController.swift new file mode 100644 index 0000000..7ddbff5 --- /dev/null +++ b/SemSegiPhone/SetupViewController.swift @@ -0,0 +1,66 @@ +// +// SetupViewController.swift +// SemSegiPhone +// +// ViewController for SetupView +// +// Created by ESP-NET on 5/22/20. +// Copyright © 2020 Sachin Mehta. All rights reserved. +// + +import UIKit + +class SetupViewController: UIViewController, UICollectionViewDataSource, UICollectionViewDelegate { + // TODO: import list of classes from file + let classes = ["Background", "Aeroplane", "Bicycle", "Bird", "Boat", "Bottle", "Bus", "Car", "Cat", "Chair", "Cow", "Diningtable", "Dog", "Horse", "Motorbike", "Person", "Pottedplant", "Sheep", "Sofa", "Train", "TV"] + + var selection = [Int](); + + @IBOutlet weak var Grid: UICollectionView! + + override func viewDidLoad() { + super.viewDidLoad() + } + + func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int { + return classes.count + } + + // populate collection view with cells + func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell { + print("in cell") + let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "Cell", for: indexPath) as! CollectionViewCell + + cell.CellLabel.text = classes[indexPath.item] + print("creating cell for", classes[indexPath.item]) + + // legacy red monochrome color equation +// cell.backgroundColor = UIColor(red: 1.0, green: CGFloat(12 * indexPath.item) / 255.0, blue: CGFloat(12 * indexPath.item) / 255.0, alpha: 1.0); + + cell.backgroundColor = UIColor(red: CGFloat((indexPath.item * indexPath.item * 7) % 255) / 255.0, green: CGFloat(12 * indexPath.item) / 255.0, blue: CGFloat((((indexPath.item * indexPath.item) % 21) * 39) % 255) / 255.0, alpha: 1.0); + if (cell.isSelected) { + cell.backgroundColor = UIColor(red: CGFloat((indexPath.item * indexPath.item * 7) % 255) / 255.0, green: CGFloat(12 * indexPath.item) / 255.0, blue: CGFloat((((indexPath.item * indexPath.item) % 21) * 39) % 255) / 255.0, alpha: 0.5); + } + + return cell + } + + func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) { + selection.append(indexPath.item) + print("selected index ", indexPath.item) + print("selection ", selection) + + let selectedCell:UICollectionViewCell = collectionView.cellForItem(at: indexPath)! + selectedCell.backgroundColor = UIColor(red: CGFloat((indexPath.item * indexPath.item * 7) % 255) / 255.0, green: CGFloat(12 * indexPath.item) / 255.0, blue: CGFloat((((indexPath.item * indexPath.item) % 21) * 39) % 255) / 255.0, alpha: 0.5); + } + + override func prepare(for segue: UIStoryboardSegue, sender: Any?) + { + if segue.destination is ViewController + { + let vc = segue.destination as? ViewController + vc?.selection = selection + vc?.classes = classes + } + } +} diff --git a/SemSegiPhone/ViewController.swift b/SemSegiPhone/ViewController.swift index 95e16e1..dc154a3 100644 --- a/SemSegiPhone/ViewController.swift +++ b/SemSegiPhone/ViewController.swift @@ -2,6 +2,8 @@ // ViewController.swift // EdgeNetsCV // +// ViewController for CameraView +// // Created by Sachin on 7/2/19. // Copyright © 2019 Sachin Mehta. All rights reserved. // @@ -15,12 +17,14 @@ import Accelerate import VideoToolbox -class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { +class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCapturePhotoCaptureDelegate, UICollectionViewDataSource, UICollectionViewDelegate { + @IBOutlet weak var AnnotView: UIView! @IBOutlet weak var cameraView: UIView! - @IBOutlet weak var segView: UIImageView! + @IBOutlet weak var SmallGrid: UICollectionView! + private var imageTaken = false private var requests = [VNRequest]() //espnet model @@ -33,34 +37,76 @@ class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDele private let session = AVCaptureSession() private let videoDataOutput = AVCaptureVideoDataOutput() private let videoDataOutputQueue = DispatchQueue(label: "videoQueue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem) + + private let photoOutput = AVCapturePhotoOutput() + private let reuseIdentifier = "ChooserCell" + private let sectionInsets = UIEdgeInsets(top: 50.0, left: 20.0, bottom: 50.0, right: 20.0) + private var previewLayer: AVCaptureVideoPreviewLayer! = nil + private var output: AVCapturePhotoOutput? - //define the filter that will convert the grayscale prediction to color image + private var videoDevice: AVCaptureDevice? + private var deviceInput: AVCaptureDeviceInput! + + // define the filter that will convert the grayscale prediction to color image let masker = ColorMasker() + // handled by segue + var classes = [String](); + var selection = [Int](); + private var capImage: UIImage! + private var capSeg: UIImage! + override func viewDidLoad() { super.viewDidLoad() + // Do any additional setup after loading the view, typically from a nib. - setupAVCapture() + setupAVCapture(position: .back) //setup vision parts setupVisionModel() //start the capture startCaptureSession() + + print("camera view classes", classes) + print("camera view selection", selection) } func startCaptureSession(){ session.startRunning() } - func setupAVCapture(){ - var deviceInput: AVCaptureDeviceInput! + func stopCaptureSession(){ + session.stopRunning() + session.removeInput(_: deviceInput) + session.removeOutput(_: videoDataOutput) + } + + func getDevice(position: AVCaptureDevice.Position) -> AVCaptureDevice? { + return AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: position).devices.first + } + + func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int { + return selection.count + } + + // populate collection view with cells + func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell { + let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "SmallCell", for: indexPath) as! CollectionViewCell + cell.CellLabel.text = classes[selection[indexPath.item]] + + cell.backgroundColor = UIColor( + red: CGFloat((selection[indexPath.item] * selection[indexPath.item] * 7) % 255) / 255.0, + green: CGFloat(12 * selection[indexPath.item]) / 255.0, + blue: CGFloat((((selection[indexPath.item] * selection[indexPath.item]) % 21) * 39) % 255) / 255.0, alpha: 1.0); + return cell + } + + func setupAVCapture(position: AVCaptureDevice.Position){ //select a video device - let videoDevice = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], - mediaType: .video, - position: .back).devices.first + videoDevice = getDevice(position: position) do { deviceInput = try AVCaptureDeviceInput(device: videoDevice!) @@ -76,7 +122,7 @@ class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDele //add video input guard session.canAddInput(deviceInput) else{ - print("Could not add video device input to the session") + print("Could not add video device input to the session2") session.commitConfiguration() return } @@ -84,10 +130,11 @@ class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDele session.addInput(deviceInput) if session.canAddOutput(videoDataOutput) { session.addOutput(videoDataOutput) - + print("Added video data output to the session") + //add video data output videoDataOutput.alwaysDiscardsLateVideoFrames = true - + videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue) } else{ print("Could not add video data output to the session") @@ -95,6 +142,15 @@ class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDele return } + if session.canAddOutput(photoOutput) { + session.addOutput(photoOutput) + print("Added still image output to the session") + } else{ + print("Could not add still image output to the session") + session.commitConfiguration() + return + } + let captureConnection = videoDataOutput.connection(with: .video) //always process the frames captureConnection?.isEnabled = true @@ -117,6 +173,7 @@ class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDele previewLayer.frame = rootLayer.bounds rootLayer.addSublayer(previewLayer) + cameraView.bringSubview(toFront: segView) } func setupVisionModel() { @@ -143,7 +200,6 @@ class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDele } let outPixelBuffer = (obs.first)! - let segMaskGray = CIImage(cvPixelBuffer: outPixelBuffer.pixelBuffer) //pass through the filter that converts grayscale image to different shades of red @@ -154,11 +210,19 @@ class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDele } - // this function notifies AVCatpreuDelegate everytime a new frame is received + // this function notifies AVCaptureDelegate everytime a new frame is received func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {return} - let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: .right, options: [:]) + var imageRequestHandler: VNImageRequestHandler + + if (videoDevice?.position == AVCaptureDevice.Position.back) { + // video device back + imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: .right, options: [:]) + } else { + // video device front + imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: .leftMirrored, options: [:]) + } do { try imageRequestHandler.perform(self.requests) @@ -171,11 +235,74 @@ class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDele super.didReceiveMemoryWarning() // Dispose of any resources that can be recreated. } + + // handle change camera btn + @IBAction func changeCamera(_ sender: UIButton) { + stopCaptureSession() + setupAVCapture(position: videoDevice?.position == AVCaptureDevice.Position.back ? .front : .back) + setupVisionModel() + startCaptureSession() + } + + // prepare segue to AnnotationViewController + override func prepare(for segue: UIStoryboardSegue, sender: Any?) + { + if (segue.identifier == "photoSegue") { + let vc = segue.destination as? AnnotationViewController + vc?.capImage = capImage + vc?.capSeg = capSeg + vc?.classes = classes + vc?.selection = selection + } + } + + // handle camera btn + @IBAction func takePhoto(_ sender: UIButton) { + let photoSettings = AVCapturePhotoSettings() + let previewPixelType = photoSettings.availablePreviewPhotoPixelFormatTypes.first! + let previewFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPixelType, + kCVPixelBufferWidthKey as String: 160, + kCVPixelBufferHeightKey as String: 160] + photoSettings.previewPhotoFormat = previewFormat + + photoOutput.capturePhoto(with: photoSettings, delegate: self) + imageTaken = true + } + + // handle photo capture and trigger segue + func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) { + + if let error = error { + print("error occured : \(error.localizedDescription)") + } + + if let sampleBuffer = photoSampleBuffer, + let previewBuffer = previewPhotoSampleBuffer, + let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) { + + let dataProvider = CGDataProvider(data: dataImage as CFData) + let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent) + let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right) + + self.capImage = image + + self.capSeg = UIImage(ciImage: self.masker.outputImage!, scale: 1.0, orientation: .right) + + print("image", image) + print("capturedImage", capImage) + + self.performSegue(withIdentifier: "photoSegue", sender: self) + } else { + print("error") + } + } } //converts the Grayscale image to RGB -// provides different shades of red based on pixel values +// r = class * class * 7 mod 255 +// g = class * 12 +// b = class * class mod 21 * 39 mod 255 class ColorMasker: CIFilter { var inputGrayImage : CIImage? @@ -183,9 +310,19 @@ class ColorMasker: CIFilter let colormapKernel = CIColorKernel(source: "kernel vec4 colorMasker(__sample gray)" + "{" + - " if (gray.r == 0.0f) {return vec4(0.0, 0.0, 0.0, 1.0);}" + - " return vec4(1.0, gray.r, gray.r, 1.0);" + - "}" + " if (gray.r == 0.0f) {return vec4(0.0, 0.0, 0.0, 0.0);} else {" + + "vec4 result;" + +// "int class;" + +// "class = (int)(1 / gray.r);" + +// "result.r = (float)((((gray.r * 255 / 12) * (gray.r * 255 / 12) * 7) % 255) / 255);" + +// "result.g = gray.r;" + +// "result.b = (float)((((((gray.r * 255 / 12) * (gray.r * 255 / 12)) % 21) * 39) % 255) / 255);" + + "result.r = 1;" + + "result.g = gray.r;" + + "result.b = gray.r;" + + "result.a = 0.9;" + + "return result;" + + "}}" ) override var attributes: [String : Any]