class VideoEditorViewController: UIViewController {
private let viewModel = VideoEditorViewModel()
private let canvas = CanvasView()
private let picker = UIColorPickerViewController()
override func viewDidLoad() {
super.viewDidLoad()
setCanvasUI()
}
}
// MARK: - Private interface
private extension VideoEditorViewController {
func setCanvasUI() {
view.addSubview(canvas)
canvas.backgroundColor = .clear
bindViewModelToUpdateCanvas()
bindCanvasToUpdateViewModel()
addButtons()
}
func bindViewModelToUpdateCanvas() {
viewModel.onUpdateDrawing = { [weak self] lines in
self?.canvas.lines = lines
}
}
func bindCanvasToUpdateViewModel() {
canvas.onStartGesture = { [weak self] point in
self?.viewModel.startNewLine(at: point)
}
canvas.onContinueGesture = { [weak self] points in
self?.viewModel.appendToLastLine(points)
}
}
func addButtons() {
let size = CGSize(width: 50, height: 50)
let undoButton = CanvasButton(size: size, image: UIImage(systemName: "arrowshape.turn.up.left.fill")) { [weak self] _ in
self?.viewModel.undo()
}
let colorPicker = CanvasButton(size: size, image: UIImage(systemName: "paintpalette.fill")) { [weak self] _ in
guard let self else { return }
picker.delegate = self
present(picker, animated: true)
}
let trashCanButton = CanvasButton(size: size, image: UIImage(systemName: "trash")) { [weak self] _ in
self?.viewModel.undoAll()
}
let uploadViewButton = CanvasButton(size: size, image: UIImage(systemName: "envelope")) { _ in // [weak self]
print("upload not implemented")
}
let toggleDrawing = CanvasButton(size: size, image: UIImage(systemName: "pencil.circle")) { [weak self] _ in
self?.viewModel.isDrawable.toggle()
}
let stackView = UIStackView(arrangedSubviews: [
undoButton,
trashCanButton,
colorPicker,
uploadViewButton,
toggleDrawing
])
view.addSubview(stackView)
stackView.axis = .vertical
stackView.bringSubviewToFront(view)
stackView.spacing = 30
stackView.snp.makeConstraints { make in
make.right.equalTo(view.snp_rightMargin).offset(-20)
make.top.equalTo(view.snp_topMargin)
}
canvas.frame = view.framebounds
}
}
// MARK: - UIColorPickerViewControllerDelegate
extension VideoEditorViewController: UIColorPickerViewControllerDelegate {
func colorPickerViewController(_ viewController: UIColorPickerViewController, didSelect color: UIColor, continuously: Bool) {
viewModel.strokeColor = picker.selectedColor
}
}
You are constructing the stack view in
viewDidAppear. Each time it appears, you are going to create a new stack view. This should be done only once, inviewDidLoad.The color picker is presenting the picker and as soon as that presentation is complete, you are setting the canvas color. This should be moved into the
UIColorPickerViewControllerDelegatemethodcolorPickerViewController(_:didSelect:continuously:).You are not using
UIImagePickerControllerDelegate, so that conformance declaration (esp where you do not implement any of these methods) is unnecessary.In
touchesBegan, you really should capture where the gesture started. Right now, you are discarding the first point of the gesture.Where you are extracting the position from
Set<UITouch>, you really should specify the view asself, otherwise you will be getting coordinates in the window’s coordinate system, not your current view’s coordinate system. (You might not notice if the view is located at(0, 0), but if it was not, you would not get the coordinates you expected.)Likewise, when setting the
frameofcanvas, you should use the coordinate system ofview. Specifically, you should referenceview.bounds, notview.frame; the latter is in the coordinate system of thesuperviewofview. In this case, there is not an observable difference, but if this view was not at(0,0), you will start to manifest problems.Where extracting updates in gestures, you should probably consider coalesced touches, for more fine-grained details on touch information (on capable hardware). It’s not relevant to the broader MVVM questions, but a consideration when processing touches.
Personally, I would also capture predictive touches to reduce lag in the UI (but you need a slightly more refined model to capture this dimension). See https://stackoverflow.com/a/64709364/1271826 or https://stackoverflow.com/a/58456179/1271826.
When rendering gestures, I would also be inclined to smooth them via Catmull Rom or Hermite splines. See https://stackoverflow.com/a/34583708/1271826. Again, unrelated to the broader MVVM question, so I will leave that to the reader.
I must confess that it offends my MVVM-purist tendencies that a model object (like
Line) is still entangled with UIKit. This is because of theUIColorproperty. I might be inclined to abstract this to some non-platform-specific structure, but, again, that is beyond the scope of this question.But as a general design principle, many of us avoid entangling model objects with platform-specific types like this. FWIW.
I did not grok the purpose of the
drawableproperty ofLine. I attempted to preserve that in my example, but I am not sure what the utility of capturing aLinethat was notdrawable. I interpreted this as a path that was not to be rendered, but that doesn’t make sense IMHO, so I apologize if misunderstood your intent. But, nonetheless, I attempted to preserve the idea. It is not quite relevant to the broader question, but FWIW.