How to use document.querySelector in aurelia

Hi,
I am writing a simple aurelia app that records the screen. Basically example from here:


For this i need to do document.querySelector(‘video’); How to do that in aurelia.
var video = document.querySelector('video');
if(!navigator.getDisplayMedia && !navigator.mediaDevices.getDisplayMedia) {
// do stuff.
 }

Any suggestion on how to achieve document.querySelector.

Best
Suvir

1 Like

You most likely wont need the queryselector if you go with a ref Binding http://aurelia.io/docs/binding/basics#referencing-elements

2 Likes

I added ref. But now i press the button, nothing happens. Any hints. Here is my js and html.

import * as RecordRTC from 'recordrtc';


export class MyApp {
  //public clock: HTMLCanvasElement;
  attached() {
    var video = this.video;
    // do stuffs


    if (!navigator.getDisplayMedia && !navigator.mediaDevices.getDisplayMedia) {
      var error = 'Your browser does NOT support the getDisplayMedia API.';
      //document.querySelector('h1').innerHTML = error;
      this.headerRef.innerHTML = error;
      this.video.style.display = 'none';
      //document.getElementById('btn-start-recording').style.display = 'none';
      this.btnStartRecording.style.display = 'none';
      //document.getElementById('btn-stop-recording').style.display = 'none';
      this.btnStopRecording.style.display = 'none';
      throw new Error(error);
    }

    function invokeGetDisplayMedia(success, error) {
      var displaymediastreamconstraints = {
        video: {
          displaySurface: 'monitor', // monitor, window, application, browser
          logicalSurface: true,
          cursor: 'always' // never, always, motion
        }
      };

      // above constraints are NOT supported YET
      // that's why overriding them
      displaymediastreamconstraints = {
        video: true
      };

      if (navigator.mediaDevices.getDisplayMedia) {
        navigator.mediaDevices.getDisplayMedia(displaymediastreamconstraints).then(success).catch(error);
      } else {
        navigator.getDisplayMedia(displaymediastreamconstraints).then(success).catch(error);
      }
    }

    function captureScreen(callback) {
      invokeGetDisplayMedia(function (screen) {
        addStreamStopListener(screen, function () {
          //document.getElementById('btn-stop-recording').click();
          this.btnStartRecording.click();
        });
        callback(screen);
      }, function (error) {
        console.error(error);
        alert('Unable to capture your screen. Please check console logs.\n' + error);
      });
    }

    function stopRecordingCallback() {
      video.src = video.srcObject = null;
      video.src = URL.createObjectURL(recorder.getBlob());

      recorder.screen.stop();
      recorder.destroy();
      recorder = null;

      //document.getElementById('btn-start-recording').disabled = false;
      this.btnStartRecording.disabled = false;
    }

    var recorder; // globally accessible

    //this.btnStartRecording.getContext( '2d' );

    //document.getElementById('btn-start-recording')
    this.btnStartRecording.onclick = function () {
      console.log("click me");
      this.disabled = true;
      captureScreen(function (screen) {
        video.srcObject = screen;

        recorder = RecordRTC(screen, {
          type: 'video'
        });

        recorder.startRecording();

        // release screen on stopRecording
        recorder.screen = screen;

        this.btnStopRecording.disabled = false;
      });
    };

    //document.getElementById('btn-stop-recording')
    this.btnStopRecording.onclick = function () {
      this.disabled = true;
      recorder.stopRecording(stopRecordingCallback);
    };

    function addStreamStopListener(stream, callback) {
      stream.addEventListener('ended', function () {
        callback();
        callback = function () {};
      }, false);
      stream.addEventListener('inactive', function () {
        callback();
        callback = function () {};
      }, false);
      stream.getTracks().forEach(function (track) {
        track.addEventListener('ended', function () {
          callback();
          callback = function () {};
        }, false);
        track.addEventListener('inactive', function () {
          callback();
          callback = function () {};
        }, false);
      });
    }

  }


}



and HTML

<div class="message">

    <style>

        html,

        body {

            margin: 0 !important;

            padding: 0 !important;

            text-align: center;

            font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen, Ubuntu, Cantarell, "Fira Sans", "Droid Sans", "Helvetica Neue", Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";

            font-size: 1em;

        }

        video {

            width: 30%;

            border-radius: 5px;

            border: 1px solid black;

        }

    </style>

    <title>Screen Recording | RecordRTC</title>

    <h1 ref="headerRef">Screen Recording using RecordRTC</h1>

    <br>

    <button ref="btnStartRecording" id="btn-start-recording">Start Recording</button>

    <button ref="btnStopRecording" id="btn-stop-recording" disabled>Stop Recording</button>

    <hr>

    <video ref="video" controls autoplay playsinline></video>
    <!-- <script src="https://www.WebRTC-Experiment.com/RecordRTC.js"></script> -->

</div>
1 Like

Can you clarify a few bits:

  • are you using v2 (vnext) or v1?
  • how does your project look like?

For v1, if you wanna get a hold of an element, you can do it in attached lifecycle:

import { inject } from 'aurelia-framework';

@inject(Element)
export class App {

  constructor(element) {
    this.element = element;
  }

  attached() {
    const video = this.element.querySelector('video');
    // ...
  }
}

Can you try that?

1 Like

Its aurelia 2 based for now. I would like to do for aurelia 1 later. I just started with default app. It works half way. So basically i can click and start recording the screeen…but then it can not find srcObject on video. Here is the complete project.


npm install
npm run and it shows on http://localhost:8080/

Actually, querySelector is not recommended, so i used ref instead.

1 Like

Arr I think it’s an easy fix. Our lifecycle name has been changed to afterAttach, instead of attached. Can you try that? Thanks for the info. You can consult the doc here https://docs.aurelia.io/getting-started/components#the-component-lifecycle for the rest of the lifecycle hooks :+1:

1 Like

So like this:
afterAttach() {
const theVideo = this.element.querySelector(‘video’);
}
and then i use theVideo in the rest of the code or only inside afterAttach. Thanks for the link, yes i should read that now. Also, inject is from aurelia-framework…shall i add that into my pacakge.json? Is that also a dev package? like this “@aurelia/framework”: “dev”, Nevermind for framework, i just added that like: “@aurelia/testing”: “dev”, “aurelia-framework”: “1.3.1”,

1 Like

If you are using v2, the packages that you work with should be scoped to @aurelia. aurelia-framework is our v1 package, so it should not be in your app.

For this part:

afterAttach() {
  const theVideo = this.element.querySelector(‘video’);
}

I’d check it again, and do it like this:

afterAttach() {
  const theVideo = this.theVideo; // same with the ref name
}

Ref should work, I’m thinking maybe you didn’t have the right lifecycle method name, or a typo somewhere and things didn’t work as expected.

2 Likes

The app works now but strangely i can not enable the button from disable state.

It says,

my-app.js:128 TypeError: Cannot set property 'stopButtonStatus' of undefined
    at my-app.js:161
    at my-app.js:125

Here is complete html with just two buttons,

<div class="message">
  <style>
    html,
    body {
      margin: 0 !important;
      padding: 0 !important;
      text-align: center;
      font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen,
        Ubuntu, Cantarell, "Fira Sans", "Droid Sans", "Helvetica Neue", Arial,
        sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
      font-size: 1em;
    }

    video {
      width: 30%;
      border-radius: 5px;
      border: 1px solid black;
    }
  </style>

  <title>Screen Recording | RecordRTC</title>
  <h1 ref="headerRef">Screen Recording using RecordRTC</h1>

  <br />

  <!-- <button ref="btnStartRecording" id="btn-start-recording">Start Recording</button> -->
  <button
    id="btnStartRecording"
    ref="btnStartRecording"
    click.delegate="startRecording()"
  >
    Start Recording
  </button>
  <button
    ref="btnStopRecording"
    disabled.bind="stopButtonStatus"
    click.trigger="stopRecording()"
  >
    Stop Recording
  </button>

  <hr />
  <video ref="theVideo" controls autoplay playsinline></video>

  <!-- <script src="https://www.WebRTC-Experiment.com/RecordRTC.js"></script> -->
</div>

And here is complete js,

` //import { inject } from "aurelia-framework";

import * as RecordRTC from "recordrtc";

var recorder;

var theVideo;

var stopButtonStatus = true;

//@inject(Element)

export class MyApp {

  //public clock: HTMLCanvasElement;

  //sourceObject = HTMLMediaElement.srcObject;

  //var stopButtonStatus = new Boolean(true);

  //HTMLVideoElement theVideo;

  constructor(element) {

    this.element = element;

    this.stopButtonStatus = true;

  }

  /*   constructor() {

      //var recorder; // globally accessible

      this.handleBodyClick = e => {

        console.log("clicked");

        console.log(e.target);

      };

    } */

  afterAttach() {

    //const theVideo = this.element.querySelector("video");

    theVideo = this.theVideo; // same with the ref name

    //document.addEventListener('click', this.handleBodyClick);

    if (!navigator.getDisplayMedia && !navigator.mediaDevices.getDisplayMedia) {

      var error = "Your browser does NOT support the getDisplayMedia API.";

      //document.querySelector('h1').innerHTML = error;

      this.headerRef.innerHTML = error;

      this.video.style.display = "none";

      //document.getElementById('btn-start-recording').style.display = 'none';

      this.btnStartRecording.style.display = "none";

      //document.getElementById('btn-stop-recording').style.display = 'none';

      this.btnStopRecording.style.display = "none";

      throw new Error(error);

    }

  }

  addStreamStopListener(stream, callback) {

    stream.addEventListener(

      "ended",

      function () {

        callback();

        callback = function () {};

      },

      false

    );

    stream.addEventListener(

      "inactive",

      function () {

        callback();

        callback = function () {};

      },

      false

    );

    stream.getTracks().forEach(function (track) {

      track.addEventListener(

        "ended",

        function () {

          callback();

          callback = function () {};

        },

        false

      );

      track.addEventListener(

        "inactive",

        function () {

          callback();

          callback = function () {};

        },

        false

      );

    });

  }

  detached() {

    document.removeEventListener("click", this.handleBodyClick);

  }

  invokeGetDisplayMedia(success, error) {

    var displaymediastreamconstraints = {

      video: {

        displaySurface: "monitor", // monitor, window, application, browser

        logicalSurface: true,

        cursor: "always", // never, always, motion

      },

    };

    // above constraints are NOT supported YET

    // that's why overriding them

    displaymediastreamconstraints = {

      video: true,

    };

    if (navigator.mediaDevices.getDisplayMedia) {

      navigator.mediaDevices

        .getDisplayMedia(displaymediastreamconstraints)

        .then(success)

        .catch(error);

    } else {

      navigator

        .getDisplayMedia(displaymediastreamconstraints)

        .then(success)

        .catch(error);

    }

  }

  captureScreen(callback) {

    this.invokeGetDisplayMedia(

      function (screen) {

        /* this.addStreamStopListener(screen, function () {

        //this.btnStopRecording.click;

      }); */

        callback(screen);

      },

      function (error) {

        console.error(error);

        alert(

          "Unable to capture your screen. Please check console logs.\n" + error

        );

      }

    );

  }

  startRecording = () => {

    console.log("clicked start");

    this.disabled = true;

    this.captureScreen(function (screen) {

      //var video = document.theVideo;

      //var video = this.theVideo; // this should have worked

      //const theVideo = this.element.querySelector('video');

      theVideo.srcObject = screen;

      //document.theVideo.HTMLMediaElement.srcObject = screen;

      recorder = RecordRTC(screen, {

        type: "video",

      });

      recorder.startRecording();

      // release screen on stopRecording

      recorder.screen = screen;

      //this.document.getElementById("btnStopRecording").stopButtonStatus = false;

      //this.stopButtonStatus = false;

      //this.btnStopRecording.stopButtonStatus = false;

      this.stopButtonStatus = false;

      //this.btnStopRecording.disabled = false;

      //this.element.querySelector("#searchInput") = "";

      //this.stopButtonStatusCChanged();

    });

  };

  /*   get stopButtonStatusCChanged() {

    console.log("stopButtonStatusCChanged");

    return true;

  } */

  /*   startRecording() {

     

    } */

  stopRecording = () => {

    console.log("clicked stop");

    this.disabled = true;

    recorder.stopRecording(this.stopRecordingCallback);

  };

  stopRecordingCallback() {

    theVideo.src = theVideo.srcObject = null;

    theVideo.src = URL.createObjectURL(recorder.getBlob());

    recorder.screen.stop();

    recorder.destroy();

    recorder = null;

    //document.getElementById("btn-start-recording").disabled = false;

    document.getElementById("btnStartRecording").disabled = false;

    //this.btnStartRecording.disabled = false;

  }

}
1 Like

this.captureScreen(function … where you’re using the prop is a function which changes the context of this. Try turning it into a arrow function or create a local variable of the class context this before the function and use it

2 Likes

Thanks @zewa666 It seems to work with arrow function and getting a this variable before the function. Here is the working code.

startRecording = () => {

    console.log("clicked start");
    this.btnStartRecording.disabled = true;
    let localthis = this;
    this.captureScreen((screen) => {

      theVideo.srcObject = screen;

      recorder = RecordRTC(screen, {

        type: "video",

      });

      recorder.startRecording();

      // release screen on stopRecording

      recorder.screen = screen;

      console.log("capture ...");

      localthis.stopButtonStatus = false;

    });

  }; 

I will improve the app next and make it shine a bit. Might post here later.

1 Like

You dont need both. Arrow function alone will make your this Point to the right scope. Localthis is only necessary if you keep the previous function approach

2 Likes

Thanks, that seems to work. Now i also got the whole app working with aurelia 1.
I was a bit more adventourous, so i made a typescript based app from it. How do i ref “theVideo” in typescipt.

Here is the ts file.

//import { inject } from "aurelia-framework";

import * as RecordRTC from "recordrtc";

//@inject(Element)
export class App {
  element: any;
  stopButtonStatus: boolean;
  //public clock: HTMLCanvasElement;

  constructor(element: any) {
    this.element = element;
    this.stopButtonStatus = true;
  }

  attached() {
    const mediaDevices = navigator.mediaDevices as any;
    const navigatorDevices = navigator as any;
    //const stream = await mediaDevices.getDisplayMedia();
    if (!navigatorDevices.getDisplayMedia && !mediaDevices.getDisplayMedia) {
      const error = "Your browser does NOT support the getDisplayMedia API.";
      //document.querySelector('h1').innerHTML = error;
      //this.headerRef.innerHTML = error;

      const headerRef = <HTMLInputElement>document.getElementById("headerRef");
      headerRef.innerHTML = error;

      //this.video.style.display = "none";

      const videoElement = <HTMLInputElement>(
        document.getElementById("theVideo")
      );
      videoElement.style.display = "none";

      //this.btnStartRecording.style.display = "none";
      //this.btnStopRecording.style.display = "none";

      const startElement = <HTMLInputElement>(
        document.getElementById("btnStartRecording")
      );
      startElement.style.display = "none";

      const element = <HTMLInputElement>(
        document.getElementById("btnStopRecording")
      );
      element.style.display = "none";

      throw new Error(error);
    }
  }

  addStreamStopListener(stream, callback) {
    stream.addEventListener(
      "ended",
      function () {
        callback();
        callback = function () {};
      },
      false
    );
    stream.addEventListener(
      "inactive",
      function () {
        callback();
        callback = function () {};
      },
      false
    );
    stream.getTracks().forEach(function (track) {
      track.addEventListener(
        "ended",
        function () {
          callback();
          callback = function () {};
        },
        false
      );
      track.addEventListener(
        "inactive",
        function () {
          callback();
          callback = function () {};
        },
        false
      );
    });
  }

  detached() {
    //document.removeEventListener("click", this.handleBodyClick);
  }

  invokeGetDisplayMedia(success, error) {
    /*     let displaymediastreamconstraints = {
      video: {
        displaySurface: "monitor", // monitor, window, application, browser
        logicalSurface: true,
        cursor: "always", // never, always, motion
      },
    }; */

    // above constraints are NOT supported YET
    // that's why overriding them
    const displaymediastreamconstraints = {
      video: true,
    };

    const mediaDevices = navigator.mediaDevices as any;

    const navigatorDevices = navigator as any;

    if (mediaDevices.getDisplayMedia) {
      mediaDevices
        .getDisplayMedia(displaymediastreamconstraints)
        .then(success)
        .catch(error);
    } else {
      navigatorDevices
        .getDisplayMedia(displaymediastreamconstraints)
        .then(success)
        .catch(error);
    }
  }

  captureScreen(callback) {
    this.invokeGetDisplayMedia(
      function (screen) {
        callback(screen);
      },
      function (error) {
        console.error(error);
        alert(
          "Unable to capture your screen. Please check console logs.\n" + error
        );
      }
    );
  }

  startRecording = () => {
    console.log("clicked start");
    //this.disabled = true;

    //this.btnStartRecording.disabled = true;
    const element = <HTMLInputElement>(
      document.getElementById("btnStartRecording")
    );
    element.disabled = true;

    //var localthis = this;

    this.captureScreen((screen) => {
      //this.theVideo.srcObject = screen;
      const theVideo = <HTMLMediaElement>document.getElementById("theVideo");
      theVideo.srcObject = screen;

      //document.theVideo.HTMLMediaElement.srcObject = screen;

      this.recorder = RecordRTC(screen, {
        type: "video",
      });

      this.recorder.startRecording();

      // release screen on stopRecording
      this.recorder.screen = screen;

      //this.document.getElementById("btnStopRecording").stopButtonStatus = false;

      //this.stopButtonStatus = false;
      console.log("capture ...");
      //this.btnStopRecording.stopButtonStatus = true;

      this.stopButtonStatus = false;
    });
  };

  stopRecording = () => {
    console.log("clicked stop");
    //this.disabled = true;
    this.stopButtonStatus = true;
    this.recorder.stopRecording(this.stopRecordingCallback);
  };

  stopRecordingCallback = () => {
    this.theVideo.src = this.theVideo.srcObject = null;
    this.theVideo.src = URL.createObjectURL(this.recorder.getBlob());

    this.recorder.screen.stop();
    this.recorder.destroy();
    this.recorder = null;

    const element = <HTMLInputElement>(
      document.getElementById("btnStartRecording")
    );
    element.disabled = false;
  };
}

and HTML

<template>
  <require from="app.css"></require>

  <div class="message">
    <style>
      html,
      body {
        margin: 0 !important;
        padding: 0 !important;
        text-align: center;
        font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto,
          Oxygen, Ubuntu, Cantarell, "Fira Sans", "Droid Sans", "Helvetica Neue",
          Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji",
          "Segoe UI Symbol";
        font-size: 1em;
      }

      video {
        width: 30%;
        border-radius: 5px;
        border: 1px solid black;
      }

    </style>

    <title>Screen Recording | RecordRTC</title>
    <h1 ref="headerRef">Screen Recording using RecordRTC</h1>

    <br />

    <!-- <button ref="btnStartRecording" id="btn-start-recording">Start Recording</button> -->
    <button id="btnStartRecording" ref="btnStartRecording" click.delegate="startRecording()">
      Start Recording
    </button>
    <button ref="btnStopRecording" disabled.bind="stopButtonStatus" click.trigger="stopRecording()">
      Stop Recording
    </button>

    <hr />
    <video ref="theVideo" controls autoplay playsinline></video>

    <!-- <script src="https://www.WebRTC-Experiment.com/RecordRTC.js"></script> -->
  </div>
</template>

and all the ts error now i am getting is these two…

[tsl] ERROR in C:\Users\developer_s\Documents\aurelia-proto\record-screen-aurelia-ts\src\app.ts(181,10)
      TS2339: Property 'recorder' does not exist on type 'App'.

ERROR in C:\Users\developer_s\Documents\aurelia-proto\record-screen-aurelia-ts\src\app.ts
./src/app.ts
[tsl] ERROR in C:\Users\developer_s\Documents\aurelia-proto\record-screen-aurelia-ts\src\app.ts(185,10)
      TS2339: Property 'theVideo' does not exist on type 'App'.
1 Like

In TypeScript you have to explicitly declare all the class variables you use. This might work for you:

export class App {
  element: any;
  stopButtonStatus: boolean;
  theVideo: HTMLVideoElement;
  //public clock: HTMLCanvasElement;

  ...
}
1 Like

Seems to work now. I used any for one variable and theVideo as HTMLMediaElement. Here is the working version in TS. same html file as above. Hope this helps someone as well. cheers.

//import { inject } from "aurelia-framework";

import * as RecordRTC from "recordrtc";

//@inject(Element)
export class App {
  element: any;
  stopButtonStatus: boolean;
  theVideo: HTMLMediaElement; //HTMLVideoElement;
  recorder: any;
  //public clock: HTMLCanvasElement;

  constructor(element: any) {
    this.element = element;
    this.stopButtonStatus = true;
  }

  attached() {
    const mediaDevices = navigator.mediaDevices as any;
    const navigatorDevices = navigator as any;
    //const stream = await mediaDevices.getDisplayMedia();
    if (!navigatorDevices.getDisplayMedia && !mediaDevices.getDisplayMedia) {
      const error = "Your browser does NOT support the getDisplayMedia API.";
      //document.querySelector('h1').innerHTML = error;
      //this.headerRef.innerHTML = error;

      const headerRef = <HTMLInputElement>document.getElementById("headerRef");
      headerRef.innerHTML = error;

      //this.video.style.display = "none";

      const videoElement = <HTMLInputElement>(
        document.getElementById("theVideo")
      );
      videoElement.style.display = "none";

      //this.btnStartRecording.style.display = "none";
      //this.btnStopRecording.style.display = "none";

      const startElement = <HTMLInputElement>(
        document.getElementById("btnStartRecording")
      );
      startElement.style.display = "none";

      const element = <HTMLInputElement>(
        document.getElementById("btnStopRecording")
      );
      element.style.display = "none";

      throw new Error(error);
    }
  }

  addStreamStopListener(stream, callback) {
    stream.addEventListener(
      "ended",
      function () {
        callback();
        callback = function () {};
      },
      false
    );
    stream.addEventListener(
      "inactive",
      function () {
        callback();
        callback = function () {};
      },
      false
    );
    stream.getTracks().forEach(function (track) {
      track.addEventListener(
        "ended",
        function () {
          callback();
          callback = function () {};
        },
        false
      );
      track.addEventListener(
        "inactive",
        function () {
          callback();
          callback = function () {};
        },
        false
      );
    });
  }

  detached() {
    //document.removeEventListener("click", this.handleBodyClick);
  }

  invokeGetDisplayMedia(success, error) {
    /*     let displaymediastreamconstraints = {
      video: {
        displaySurface: "monitor", // monitor, window, application, browser
        logicalSurface: true,
        cursor: "always", // never, always, motion
      },
    }; */

    // above constraints are NOT supported YET
    // that's why overriding them
    const displaymediastreamconstraints = {
      video: true,
    };

    const mediaDevices = navigator.mediaDevices as any;

    const navigatorDevices = navigator as any;

    if (mediaDevices.getDisplayMedia) {
      mediaDevices
        .getDisplayMedia(displaymediastreamconstraints)
        .then(success)
        .catch(error);
    } else {
      navigatorDevices
        .getDisplayMedia(displaymediastreamconstraints)
        .then(success)
        .catch(error);
    }
  }

  captureScreen(callback) {
    this.invokeGetDisplayMedia(
      function (screen) {
        callback(screen);
      },
      function (error) {
        console.error(error);
        alert(
          "Unable to capture your screen. Please check console logs.\n" + error
        );
      }
    );
  }

  startRecording = () => {
    console.log("clicked start");
    //this.disabled = true;

    //this.btnStartRecording.disabled = true;
    const element = <HTMLInputElement>(
      document.getElementById("btnStartRecording")
    );
    element.disabled = true;

    //var localthis = this;

    this.captureScreen((screen) => {
      //this.theVideo.srcObject = screen;
      //const theVideo = <HTMLMediaElement>document.getElementById("theVideo");
      this.theVideo.srcObject = screen;

      //document.theVideo.HTMLMediaElement.srcObject = screen;

      this.recorder = RecordRTC(screen, {
        type: "video",
      });

      this.recorder.startRecording();

      // release screen on stopRecording
      this.recorder.screen = screen;

      //this.document.getElementById("btnStopRecording").stopButtonStatus = false;

      //this.stopButtonStatus = false;
      console.log("capture ...");
      //this.btnStopRecording.stopButtonStatus = true;

      this.stopButtonStatus = false;
    });
  };

  stopRecording = () => {
    console.log("clicked stop");
    //this.disabled = true;
    this.stopButtonStatus = true;
    this.recorder.stopRecording(this.stopRecordingCallback);
  };

  stopRecordingCallback = () => {
    this.theVideo.src = this.theVideo.srcObject = null;
    this.theVideo.src = URL.createObjectURL(this.recorder.getBlob());

    this.recorder.screen.stop();
    this.recorder.destroy();
    this.recorder = null;

    const element = <HTMLInputElement>(
      document.getElementById("btnStartRecording")
    );
    element.disabled = false;
  };
}