DEV Community

Connie Leung
Connie Leung

Posted on

Take photos with web camera using RxJS and Angular

Introduction

This is day 19 of Wes Bos's JavaScript 30 challenge where I am going to use RxJS operators and Angular to take photos, add them to photo section for me to download to my local hard drive.

In this blog post, I inject native Navigator to component such that I can load web camera to video element. Every 16 seconds, a callback function draws the video image to canvas with special effects. Whenever I click "Take photo" button, the canvas converts data to base64 string and add it to photo section from most recent to earliest.

Create a new Angular project in workspace

ng generate application day19-webcam-fun
Enter fullscreen mode Exit fullscreen mode

Define Native Navigator

First, we create core module and define native navigaor to inject into the web camera component.

// core.module.ts

import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { NAVIGATOR_PROVIDERS } from './navigator.service';

@NgModule({
  declarations: [],
  imports: [
    CommonModule
  ],
  providers: [NAVIGATOR_PROVIDERS]
})
export class CoreModule { }
Enter fullscreen mode Exit fullscreen mode

Next, I create NAVIGATOR injection token and NAVIGATOR_PROVIDERS in navigator service

// navigator.service.ts
import { isPlatformBrowser } from '@angular/common';
import { ClassProvider, FactoryProvider, InjectionToken, PLATFORM_ID } from '@angular/core';

/* Create a new injection token for injecting the navigator into a component. */
export const NAVIGATOR = new InjectionToken('NavigatorToken');

export abstract class NavigatorRef {
  get nativeNavigator(): Navigator | Object {
    throw new Error('Not implemented.');
  }
}

/* Define class that implements the abstract class and returns the native navigator object. */
export class BrowserNavigatorRef extends NavigatorRef {

  constructor() {
    super();
  }

  override get nativeNavigator(): Object | Navigator {
    return navigator;    
  }
}

/* Create a injectable provider for the NavigatorRef token that uses the BrowserNavigatorRef class. */
const browserNavigatorProvider: ClassProvider = {
  provide: NavigatorRef,
  useClass: BrowserNavigatorRef
};

/* Create an injectable provider that uses the navigatorFactory function for returning the native navigator object. */
const navigatorProvider: FactoryProvider = {
  provide: NAVIGATOR,
  useFactory: (browserWindowRef: BrowserNavigatorRef, platformId: Object) => 
    isPlatformBrowser(platformId) ? browserWindowRef.nativeNavigator : new Object(),
  deps: [ NavigatorRef, PLATFORM_ID ]
};

/* Create an array of providers. */
export const NAVIGATOR_PROVIDERS = [
  browserNavigatorProvider,
  navigatorProvider
];
Enter fullscreen mode Exit fullscreen mode

After defining the providers, I provide NAVIGATOR_PROVIDERS to the core module

// core.module.ts 

@NgModule({
  declarations: [],
  imports: [
    CommonModule
  ],
  providers: [NAVIGATOR_PROVIDERS]
})
export class CoreModule { }
Enter fullscreen mode Exit fullscreen mode

The definition of the core module is now complete and I import CoreModule to AppModule.

// app.module.ts

import { APP_BASE_HREF, PlatformLocation } from '@angular/common';
import { NgModule } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';

import { AppComponent } from './app.component';
import { CoreModule } from './core';

@NgModule({
  declarations: [
    AppComponent
  ],
  imports: [
    BrowserModule,
    WebCamModule,
  ],
  providers: [],
  bootstrap: [AppComponent]
})
export class AppModule { }
Enter fullscreen mode Exit fullscreen mode

Create Web Camera feature module

The next feature module to create is Web Camera feature module and it also imports into AppModule. I declare two components in the feature module: WebCameraComponent and PhotoStripeComponent. WebCameraComponent takes photos with a web camera and lists them in PhotoStripeComponent from most recent to earliest.

Then, Import WebCamModule in AppModule

// webcam.module.ts

import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { WebCameraComponent } from './web-camera/web-camera.component';
import { PhotoStripeComponent } from './photo-stripe/photo-stripe.component';

@NgModule({
  declarations: [
    WebCameraComponent,
    PhotoStripeComponent
  ],
  imports: [
    CommonModule
  ],
  exports: [
    WebCameraComponent
  ]
})
export class WebCamModule { }
Enter fullscreen mode Exit fullscreen mode
// app.module.ts

import { APP_BASE_HREF, PlatformLocation } from '@angular/common';
import { NgModule } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';

import { AppComponent } from './app.component';
import { CoreModule } from './core';
import { WebCamModule } from './webcam';

@NgModule({
  declarations: [
    AppComponent
  ],
  imports: [
    BrowserModule,
    WebCamModule,
    CoreModule,
  ],
  providers: [
    {
      provide: APP_BASE_HREF,
      useFactory: (platformLocation: PlatformLocation) => platformLocation.getBaseHrefFromDOM(),
      deps: [PlatformLocation]
    }
  ],
  bootstrap: [AppComponent]
})
export class AppModule { }
Enter fullscreen mode Exit fullscreen mode

Declare components in web camera feature module

In web camera module, I declare WebCameraComponent that loads the web camera of my laptop to video element in order to take photos. PhotoStripeComponent is a presentation component that iterates an array of base64 string and display them from latest to earliest.

src/assets
└── audio
    └── snap.mp3

src/app
├── app.component.ts
├── app.module.ts
├── core
│   ├── core.module.ts
│   ├── index.ts
│   └── navigator.service.ts
└── webcam
    ├── index.ts
    ├── interfaces
    │   └── webcam.interface.ts
    ├── photo-stripe
    │   └── photo-stripe.component.ts
    ├── web-camera
    │   └── web-camera.component.ts
    └── webcam.module.ts
Enter fullscreen mode Exit fullscreen mode

I define component selector, inline template and inline CSS styles in WebCameraComponent. Later sections of the blog post will implement RxJS codes to add the functionality. For your information, <app-web-camera> is the tag of the component.

// webcam.interface.ts

export interface Photo {
    data: string;
    description: string;
    download: string;
}
Enter fullscreen mode Exit fullscreen mode
// web-camera.component.ts

import { APP_BASE_HREF } from '@angular/common';
import { ChangeDetectionStrategy, Component, ElementRef, Inject, OnDestroy, OnInit, ViewChild } from '@angular/core';
import { Observable, of, Subject } from 'rxjs';
import { NAVIGATOR } from '../../core/navigator.service';
import { Photo } from '../interfaces/webcam.interface';

@Component({
  selector: 'app-web-camera',
  template: `
  <ng-container>
    <div class="photobooth">
      <div class="controls">
        <button #btnPhoto>Take Photo</button>
      </div>
      <canvas class="photo" #photo></canvas>
      <video class="player" #video></video>
      <ng-container *ngIf="photoStripe$ | async as photoStripe">
        <app-photo-stripe [photoStripe]="photoStripe"></app-photo-stripe>
      </ng-container>
    </div>
    <audio class="snap" [src]="soundUrl" hidden #snap></audio>
  </ng-container>
  `,
  styles: [`
    :host {
      display: block;
    }
    ...omitted for brevity...
  `],
  changeDetection: ChangeDetectionStrategy.OnPush
})
export class WebCameraComponent implements OnInit, OnDestroy {

  @ViewChild('btnPhoto', { static: true, read: ElementRef })
  btnPhoto!: ElementRef<HTMLButtonElement>;

  @ViewChild('snap', { static: true, read: ElementRef })
  snap!: ElementRef<HTMLAudioElement>;

  @ViewChild('video', { static: true, read: ElementRef })
  video!: ElementRef<HTMLVideoElement>;

  @ViewChild('photo', { static: true, read: ElementRef })
  canvas!: ElementRef<HTMLCanvasElement>;

  destroy$ = new Subject<void>();

  photoStripe$!: Observable<Photo[]>;

  constructor(@Inject(APP_BASE_HREF) private baseHref: string, @Inject(NAVIGATOR) private navigator: Navigator) { }

  ngOnInit(): void {
    const videoNative = this.video.nativeElement;
    const canvasNative = this.canvas.nativeElement;
    const ctx = canvasNative.getContext('2d', { willReadFrequently: true });

    this.getVideo();

    this.photoStripe$ = of([]);
  }

  get soundUrl() {
    const isEndWithSlash = this.baseHref.endsWith('/');
    return `${this.baseHref}${ isEndWithSlash ? '' : '/' }assets/audio/snap.mp3`; 
  }

  private getVideo() {
    console.log('navigator', this.navigator);

    this.navigator.mediaDevices.getUserMedia({ video: true, audio: false })
      .then(localMediaStream => {
        console.log(localMediaStream);

        const nativeElement = this.video.nativeElement;         
        nativeElement.srcObject = localMediaStream;
        nativeElement.play();
      })
      .catch(err => {
        console.error(`OH NO!!!`, err);
      });
  }

  private rgbSplit(pixels: ImageData) {
    for (let i = 0; i < pixels.data.length; i += 4) {
      pixels.data[i - 150] = pixels.data[i + 0]; // RED
      pixels.data[i + 500] = pixels.data[i + 1]; // GREEN
      pixels.data[i - 550] = pixels.data[i + 2]; // Blue
    }
    return pixels;
  }

  ngOnDestroy(): void {
    this.destroy$.next();
    this.destroy$.complete();
  }
}
Enter fullscreen mode Exit fullscreen mode

photoStripe$ is an Observable and async pipe resolves the observable in inline template to render the array elements.

<ng-container *ngIf="photoStripe$ | async as photoStripe">
    <app-photo-stripe [photoStripe]="photoStripe"></app-photo-stripe>
 </ng-container>
Enter fullscreen mode Exit fullscreen mode

async resolves photoStripe$ to photoStripe variable and photoStripe is the input parameter of PhotoStripeComponent.

getVideo is a method that uses the native Navigator to load web camera and assign it to video element.

// photo-stripe.component.ts

import { Component, ChangeDetectionStrategy, Input } from '@angular/core';
import { Photo } from '../interfaces/webcam.interface';

@Component({
  selector: 'app-photo-stripe',
  template: `<div class="strip">
    <a *ngFor="let photo of photoStripe; index as i;" [href]="photo.data" download="{{photo.download}}{{i + 1}}">
      <img [src]="photo.data" [alt]="photo.description" />
    </a>
  </div>`,
  styles: [`
    :host {
      display: block;
    }
    ... omitted for brevity ...
  `],
  changeDetection: ChangeDetectionStrategy.OnPush
})
export class PhotoStripeComponent {

  @Input()
  photoStripe!: Photo[];
}
Enter fullscreen mode Exit fullscreen mode

PhotoStripeComponent is a simple presentation component that renders base64 strings to lt;agt; and lt;imggt; elements, and the hyperlinks are downloaded when clicked.

Next, I delete boilerplate codes in AppComponent and render WebCameraComponent in inline template.

import { Component } from '@angular/core';
import { Title } from '@angular/platform-browser';

@Component({
  selector: 'app-root',
  template: '<app-web-camera></app-web-camera>', 
  styles: [`
    :host {
      display: block;
    }
  `],
})
export class AppComponent {
  title = 'Day 19 Web Cam Fun';

  constructor(titleService: Title) {
    titleService.setTitle(this.title);
  }
}
Enter fullscreen mode Exit fullscreen mode

Apply RxJS operators to render video capture in canvas

In ngOnInit, I use RxJS to render video capture in 2D canvas. The event streaming starts when video is ready to play.

// web-camera.component.ts

import { concatMap, filter, fromEvent, map, Observable, scan, startWith, Subject, takeUntil, tap, timer } from 'rxjs';

const videoNative = this.video.nativeElement;
const canvasNative = this.canvas.nativeElement;
const ctx = canvasNative.getContext('2d', { willReadFrequently: true });

fromEvent(videoNative, 'canplay')
  .pipe(
      filter(() => !!ctx),
      map(() => ctx as CanvasRenderingContext2D),
      concatMap((canvasContext) => {
        const width = videoNative.videoWidth;
        const height = videoNative.videoHeight;    
        canvasNative.width = width;
        canvasNative.height = height;
        const interval = 16;   

        return timer(0, interval).pipe(
           tap(() => {
              canvasContext.drawImage(this.video.nativeElement, 0, 0, width, height);
              // take the pixels out
              const pixels = canvasContext.getImageData(0, 0, width, height);

              this.rgbSplit(pixels);
              canvasContext.globalAlpha = 0.8;  
              canvasContext.putImageData(pixels, 0, 0);
           })
         )
       }),
       takeUntil(this.destroy$)
  )
  .subscribe();
Enter fullscreen mode Exit fullscreen mode

Explanations:

  • fromEvent(videoNative, 'canplay') listens to canplay event of the video
  • filter(() => !!ctx) validates 2D canvas is defined
  • map(() => ctx as CanvasRenderingContext2D) casts 2D canvas as CanvasRenderingContext2D
  • concatMap((canvasContext) => {....}) creates a timer observable to draw the canvas every 16 seconds
  • takeUntil(this.destroy$) unsubscribes the observable

timer returns an Observable; therefore, I use concatMap instead of map to write the pixels to the canvas

concatMap((canvasContext) => {
    const width = videoNative.videoWidth;
    const height = videoNative.videoHeight;    
    canvasNative.width = width;
    canvasNative.height = height;    

    return timer(0, interval).pipe(
       tap(() => {
          canvasContext.drawImage(this.video.nativeElement, 0, 0, width, height);
          // take the pixels out
          const pixels = canvasContext.getImageData(0, 0, width, height);

          this.rgbSplit(pixels);
          canvasContext.globalAlpha = 0.8;  
          canvasContext.putImageData(pixels, 0, 0);
        })
     )
})
Enter fullscreen mode Exit fullscreen mode

Build photo list with RxJS operators

// web-camera.component.ts

this.photoStripe$ = fromEvent(this.btnPhoto.nativeElement, 'click')
  .pipe(
      tap(() => {
         const snapElement = this.snap.nativeElement;
         snapElement.currentTime = 0;
         snapElement.play();
      }),
      map(() => ({ 
         data: this.canvas.nativeElement.toDataURL('image/jpeg'),
         description: 'My photo',
         download: 'photo',
      })),
      scan((photos, photo) => [photo, ...photos], [] as Photo[]),
      startWith([] as Photo[]),
  );
Enter fullscreen mode Exit fullscreen mode

Explanations:

  • tap(() => { ...play sound... }) plays an audio file when I click "Take photo" button
  • map(() => { ...create base64 string, description and file download name ... }) constructs base64 string, description and file name
  • scan((photos, photo) => [photo, ...photos], [] as Photo[]) accumulates photos from most recent to earliest
  • startWith([] as Photo[]) initializes an empty photo list

This is the end of the example. I built an Angular and RxJS example to take photos and prepend new photo in photo stripe component for download.

Final Thoughts

In this post, I show how to use RxJS and Angular to take fun photos with web camera and make them available for download.

This is the end of the blog post and I hope you like the content and continue to follow my learning experience in Angular and other technologies.

Resources:

Top comments (0)