diff --git a/README.md b/README.md
index 54270cde..4aebd5b0 100644
--- a/README.md
+++ b/README.md
@@ -1,13 +1,7 @@
-# @nativescript/\* plugins
-
-```
-npm run setup
-npm start
-```
-
- [@nativescript/animated-circle](packages/animated-circle/README.md)
- [@nativescript/appavailability](packages/appavailability/README.md)
- [@nativescript/apple-sign-in](packages/apple-sign-in/README.md)
+- [@nativescript/audio](packages/audio/README.md)
- [@nativescript/auto-fit-text](packages/auto-fit-text/README.md)
- [@nativescript/background-http](packages/background-http/README.md)
- [@nativescript/biometrics](packages/biometrics/README.md)
diff --git a/apps/demo-angular/package.json b/apps/demo-angular/package.json
index e52d57d8..92d05ee0 100644
--- a/apps/demo-angular/package.json
+++ b/apps/demo-angular/package.json
@@ -2,7 +2,7 @@
"main": "./src/main.ts",
"dependencies": {
"@nativescript/core": "file:../../node_modules/@nativescript/core",
- "@nativescript/pdf": "file:../../dist/packages/pdf",
+ "@nativescript/audio": "file:../../dist/packages/audio",
"@nativescript/animated-circle": "file:../../dist/packages/animated-circle",
"@nativescript/appavailability": "file:../../dist/packages/appavailability",
"@nativescript/apple-sign-in": "file:../../dist/packages/apple-sign-in",
@@ -30,6 +30,7 @@
"@nativescript/iqkeyboardmanager": "file:../../dist/packages/iqkeyboardmanager",
"@nativescript/local-notifications": "file:../../dist/packages/local-notifications",
"@nativescript/localize": "file:../../dist/packages/localize",
+ "@nativescript/pdf": "file:../../dist/packages/pdf",
"@nativescript/picker": "file:../../dist/packages/picker",
"@nativescript/shared-notification-delegate": "file:../../dist/packages/shared-notification-delegate",
"@nativescript/social-share": "file:../../dist/packages/social-share",
diff --git a/apps/demo-angular/src/app-routing.module.ts b/apps/demo-angular/src/app-routing.module.ts
index 9056cbdd..faf5054e 100644
--- a/apps/demo-angular/src/app-routing.module.ts
+++ b/apps/demo-angular/src/app-routing.module.ts
@@ -10,6 +10,7 @@ const routes: Routes = [
{ path: 'animated-circle', loadChildren: () => import('./plugin-demos/animated-circle.module').then((m) => m.AnimatedCircleModule) },
{ path: 'appavailability', loadChildren: () => import('./plugin-demos/appavailability.module').then((m) => m.AppavailabilityModule) },
{ path: 'apple-sign-in', loadChildren: () => import('./plugin-demos/apple-sign-in.module').then((m) => m.AppleSignInModule) },
+ { path: 'audio', loadChildren: () => import('./plugin-demos/audio.module').then((m) => m.AudioModule) },
{ path: 'auto-fit-text', loadChildren: () => import('./plugin-demos/auto-fit-text.module').then((m) => m.AutoFitTextModule) },
{ path: 'background-http', loadChildren: () => import('./plugin-demos/background-http.module').then((m) => m.BackgroundHttpModule) },
{ path: 'biometrics', loadChildren: () => import('./plugin-demos/biometrics.module').then((m) => m.BiometricsModule) },
diff --git a/apps/demo-angular/src/home.component.ts b/apps/demo-angular/src/home.component.ts
index a8088504..9ebd4745 100644
--- a/apps/demo-angular/src/home.component.ts
+++ b/apps/demo-angular/src/home.component.ts
@@ -15,6 +15,9 @@ export class HomeComponent {
{
name: 'apple-sign-in',
},
+ {
+ name: 'audio',
+ },
{
name: 'auto-fit-text',
},
diff --git a/apps/demo-angular/src/plugin-demos/audio.component.html b/apps/demo-angular/src/plugin-demos/audio.component.html
new file mode 100644
index 00000000..baf2a3c1
--- /dev/null
+++ b/apps/demo-angular/src/plugin-demos/audio.component.html
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/apps/demo-angular/src/plugin-demos/audio.component.ts b/apps/demo-angular/src/plugin-demos/audio.component.ts
new file mode 100644
index 00000000..a77b4d46
--- /dev/null
+++ b/apps/demo-angular/src/plugin-demos/audio.component.ts
@@ -0,0 +1,16 @@
+import { Component, NgZone } from '@angular/core';
+import { DemoSharedAudio } from '@demo/shared';
+
+@Component({
+ selector: 'demo-audio',
+ templateUrl: 'audio.component.html',
+})
+export class AudioComponent {
+ demoShared: DemoSharedAudio;
+
+ constructor(private _ngZone: NgZone) {}
+
+ ngOnInit() {
+ this.demoShared = new DemoSharedAudio();
+ }
+}
diff --git a/apps/demo-angular/src/plugin-demos/audio.module.ts b/apps/demo-angular/src/plugin-demos/audio.module.ts
new file mode 100644
index 00000000..69799827
--- /dev/null
+++ b/apps/demo-angular/src/plugin-demos/audio.module.ts
@@ -0,0 +1,10 @@
+import { NgModule, NO_ERRORS_SCHEMA } from '@angular/core';
+import { NativeScriptCommonModule, NativeScriptRouterModule } from '@nativescript/angular';
+import { AudioComponent } from './audio.component';
+
+@NgModule({
+ imports: [NativeScriptCommonModule, NativeScriptRouterModule.forChild([{ path: '', component: AudioComponent }])],
+ declarations: [AudioComponent],
+ schemas: [NO_ERRORS_SCHEMA],
+})
+export class AudioModule {}
diff --git a/apps/demo/package.json b/apps/demo/package.json
index 7692462b..6f023de1 100644
--- a/apps/demo/package.json
+++ b/apps/demo/package.json
@@ -5,7 +5,7 @@
"repository": "",
"dependencies": {
"@nativescript/core": "file:../../node_modules/@nativescript/core",
- "@nativescript/pdf": "file:../../packages/pdf",
+ "@nativescript/audio": "file:../../packages/audio",
"@nativescript/animated-circle": "file:../../packages/animated-circle",
"@nativescript/appavailability": "file:../../packages/appavailability",
"@nativescript/apple-sign-in": "file:../../packages/apple-sign-in",
@@ -33,6 +33,7 @@
"@nativescript/iqkeyboardmanager": "file:../../packages/iqkeyboardmanager",
"@nativescript/local-notifications": "file:../../packages/local-notifications",
"@nativescript/localize": "file:../../packages/localize",
+ "@nativescript/pdf": "file:../../packages/pdf",
"@nativescript/picker": "file:../../packages/picker",
"@nativescript/shared-notification-delegate": "file:../../packages/shared-notification-delegate",
"@nativescript/social-share": "file:../../packages/social-share",
diff --git a/apps/demo/src/main-page.xml b/apps/demo/src/main-page.xml
index d3dadfe5..80d39511 100644
--- a/apps/demo/src/main-page.xml
+++ b/apps/demo/src/main-page.xml
@@ -8,6 +8,7 @@
+
diff --git a/apps/demo/src/plugin-demos/audio.ts b/apps/demo/src/plugin-demos/audio.ts
new file mode 100644
index 00000000..8549ba5f
--- /dev/null
+++ b/apps/demo/src/plugin-demos/audio.ts
@@ -0,0 +1,10 @@
+import { Observable, EventData, Page } from '@nativescript/core';
+import { DemoSharedAudio } from '@demo/shared';
+import {} from '@nativescript/audio';
+
+export function navigatingTo(args: EventData) {
+ const page = args.object;
+ page.bindingContext = new DemoModel();
+}
+
+export class DemoModel extends DemoSharedAudio {}
diff --git a/apps/demo/src/plugin-demos/audio.xml b/apps/demo/src/plugin-demos/audio.xml
new file mode 100644
index 00000000..2bbdae76
--- /dev/null
+++ b/apps/demo/src/plugin-demos/audio.xml
@@ -0,0 +1,23 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/packages/audio/.eslintrc.json b/packages/audio/.eslintrc.json
new file mode 100644
index 00000000..be41074b
--- /dev/null
+++ b/packages/audio/.eslintrc.json
@@ -0,0 +1,18 @@
+{
+ "extends": ["../../.eslintrc.json"],
+ "ignorePatterns": ["!**/*", "node_modules/**/*"],
+ "overrides": [
+ {
+ "files": ["*.ts", "*.tsx", "*.js", "*.jsx"],
+ "rules": {}
+ },
+ {
+ "files": ["*.ts", "*.tsx"],
+ "rules": {}
+ },
+ {
+ "files": ["*.js", "*.jsx"],
+ "rules": {}
+ }
+ ]
+}
diff --git a/packages/audio/README.md b/packages/audio/README.md
new file mode 100644
index 00000000..7f20cda8
--- /dev/null
+++ b/packages/audio/README.md
@@ -0,0 +1,13 @@
+# @nativescript/audio
+
+```javascript
+ns plugin add @nativescript/audio
+```
+
+## Usage
+
+// TODO
+
+## License
+
+Apache License Version 2.0
diff --git a/packages/audio/android/player.ts b/packages/audio/android/player.ts
new file mode 100644
index 00000000..35c643dd
--- /dev/null
+++ b/packages/audio/android/player.ts
@@ -0,0 +1,564 @@
+import { Application, EventData, Observable, Utils } from '@nativescript/core';
+import { resolveAudioFilePath, TNSPlayerI } from '../common';
+import { AudioPlayerEvents, AudioPlayerOptions } from '../options';
+
+export enum AudioFocusDurationHint {
+ AUDIOFOCUS_GAIN = android.media.AudioManager.AUDIOFOCUS_GAIN,
+ AUDIOFOCUS_GAIN_TRANSIENT = android.media.AudioManager.AUDIOFOCUS_GAIN_TRANSIENT,
+ AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK = android.media.AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK,
+ AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE = android.media.AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE,
+}
+
+const defaultAudioFocusManagerOptions: AudioFocusManagerOptions = {
+ durationHint: AudioFocusDurationHint.AUDIOFOCUS_GAIN,
+ usage: android.media.AudioAttributes.USAGE_MEDIA,
+ contentType: android.media.AudioAttributes.CONTENT_TYPE_MUSIC,
+};
+
+export interface AudioFocusManagerOptions {
+ durationHint?: AudioFocusDurationHint;
+ usage?: number; // android.media.AudioAttributes.USAGE_MEDIA
+ contentType?: number; // android.media.AudioAttributes.CONTENT_TYPE_MUSIC
+}
+export interface AudioFocusChangeEventData extends EventData {
+ focusChange: number;
+}
+
+export class AudioFocusManager extends Observable {
+ private _audioFocusRequest: android.media.AudioFocusRequest;
+ private _mAudioFocusGranted: boolean = false;
+ private _durationHint: AudioFocusDurationHint;
+ private _audioPlayerSet = new Set();
+
+ constructor(options?: AudioFocusManagerOptions) {
+ super();
+ options = { ...defaultAudioFocusManagerOptions, ...(options || {}) };
+ this._durationHint = options.durationHint;
+ if (android.os.Build.VERSION.SDK_INT < 26) {
+ return;
+ }
+ // Request audio focus for play back
+
+ const playbackAttributes = new android.media.AudioAttributes.Builder().setUsage(options.usage).setContentType(options.contentType).build();
+ this._audioFocusRequest = new android.media.AudioFocusRequest.Builder(options.durationHint).setAudioAttributes(playbackAttributes).setAcceptsDelayedFocusGain(true).setOnAudioFocusChangeListener(this._mOnAudioFocusChangeListener).build();
+ }
+
+ private _mOnAudioFocusChangeListener = new android.media.AudioManager.OnAudioFocusChangeListener({
+ onAudioFocusChange: (focusChange: number) => {
+ this.notify({
+ eventName: 'audioFocusChange',
+ object: this,
+ focusChange,
+ });
+ },
+ });
+
+ private needsFocus(): boolean {
+ return this._audioPlayerSet.size > 0;
+ }
+ /**
+ *
+ * @param owner player requesting focus
+ * @returns if we have focus or not
+ */
+ requestAudioFocus(owner: TNSPlayer): boolean {
+ // If it does not enter the condition block, means that we already
+ // have focus. Therefore we have to start with `true`.
+ let result = true;
+ let focusResult = null;
+ if (!this._mAudioFocusGranted) {
+ const ctx = this._getAndroidContext();
+ const am = ctx.getSystemService(android.content.Context.AUDIO_SERVICE) as android.media.AudioManager;
+
+ // Request audio focus for play back
+ if (android.os.Build.VERSION.SDK_INT >= 26) {
+ focusResult = am.requestAudioFocus(this._audioFocusRequest);
+ } else {
+ focusResult = am.requestAudioFocus(this._mOnAudioFocusChangeListener, android.media.AudioManager.STREAM_MUSIC, this._durationHint);
+ }
+
+ if (focusResult === android.media.AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
+ result = true;
+ } else {
+ result = false;
+ }
+ }
+
+ this._audioPlayerSet.add(owner);
+ this._mAudioFocusGranted = result;
+
+ return result;
+ }
+ /**
+ * Abandons the audio focus for this player
+ * Audio focus request will not be made unless owner has previously requested focus or is null
+ * @param owner either a player or null if you want to manually release the audio focus
+ * @returns if we still have audio focus or not
+ */
+ abandonAudioFocus(owner: TNSPlayer | null): boolean {
+ if (owner) {
+ if (!this._audioPlayerSet.has(owner)) {
+ return this._mAudioFocusGranted;
+ }
+ this._audioPlayerSet.delete(owner);
+ }
+ if (this.needsFocus() || !this._mAudioFocusGranted) {
+ return this._mAudioFocusGranted;
+ }
+ const ctx = this._getAndroidContext();
+ const am = ctx.getSystemService(android.content.Context.AUDIO_SERVICE);
+ let result = null;
+
+ if (android.os.Build.VERSION.SDK_INT >= 26) {
+ console.log('abandonAudioFocusRequest...', this._audioFocusRequest);
+ result = am.abandonAudioFocusRequest(this._audioFocusRequest);
+ console.log('abandonAudioFocusRequest...result...', result);
+
+ // this._audioFocusRequest = null;
+ } else {
+ console.log('abandonAudioFocus...', this._mOnAudioFocusChangeListener);
+ result = am.abandonAudioFocus(this._mOnAudioFocusChangeListener);
+ }
+ if (result === android.media.AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
+ this._mAudioFocusGranted = false;
+ } else {
+ console.log('Failed to abandon audio focus.');
+ }
+ return this._mAudioFocusGranted;
+ }
+ private _getAndroidContext() {
+ let ctx = Application.android.context;
+ if (!ctx) {
+ ctx = Application.getNativeApplication().getApplicationContext();
+ }
+
+ if (ctx === null) {
+ setTimeout(() => {
+ this._getAndroidContext();
+ }, 200);
+
+ return;
+ }
+
+ return ctx;
+ }
+}
+
+let globalMixingManager: AudioFocusManager | null;
+
+function getGlobalMixingManager(): AudioFocusManager {
+ if (!globalMixingManager) {
+ globalMixingManager = new AudioFocusManager();
+ }
+ return globalMixingManager;
+}
+
+export class TNSPlayer implements TNSPlayerI {
+ private _mediaPlayer: android.media.MediaPlayer;
+ private _lastPlayerVolume; // ref to the last volume setting so we can reset after ducking
+ private _wasPlaying = false;
+ private _events: Observable;
+ private _options: AudioPlayerOptions;
+ private _audioFocusManager: AudioFocusManager | null;
+
+ constructor(durationHint: AudioFocusDurationHint | AudioFocusManager = AudioFocusDurationHint.AUDIOFOCUS_GAIN) {
+ if (!(durationHint instanceof AudioFocusManager)) {
+ this.setAudioFocusManager(
+ new AudioFocusManager({
+ durationHint: durationHint,
+ })
+ );
+ } else {
+ this.setAudioFocusManager(durationHint);
+ }
+ }
+
+ public get events() {
+ if (!this._events) {
+ this._events = new Observable();
+ }
+ return this._events;
+ }
+
+ get android(): any {
+ return this._player;
+ }
+
+ get volume(): number {
+ // TODO: find better way to get individual player volume
+ const ctx = this._getAndroidContext();
+ const mgr = ctx.getSystemService(android.content.Context.AUDIO_SERVICE);
+ return mgr.getStreamVolume(android.media.AudioManager.STREAM_MUSIC);
+ }
+
+ set volume(value: number) {
+ if (this._player && value >= 0) {
+ this._player.setVolume(value, value);
+ }
+ }
+
+ public get duration(): number {
+ if (this._player) {
+ return this._player.getDuration();
+ } else {
+ return 0;
+ }
+ }
+
+ get currentTime(): number {
+ return this._player ? this._player.getCurrentPosition() : 0;
+ }
+
+ public setAudioFocusManager(manager: AudioFocusManager) {
+ if (manager === this._audioFocusManager) {
+ return;
+ }
+ this._audioFocusManager?.off('audioFocusChange', this._onAudioFocusChange, this);
+ this._audioFocusManager?.abandonAudioFocus(this);
+ this._audioFocusManager = manager;
+ this._audioFocusManager?.on('audioFocusChange', this._onAudioFocusChange, this);
+ }
+
+ /**
+ * Initializes the player with options, will not start playing audio.
+ * @param options [AudioPlayerOptions]
+ */
+ public initFromFile(options: AudioPlayerOptions): Promise {
+ return new Promise((resolve, reject) => {
+ options.autoPlay = false;
+ this.playFromFile(options).then(resolve, reject);
+ });
+ }
+
+ public playFromFile(options: AudioPlayerOptions): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ this._options = options;
+ if (options.autoPlay !== false) {
+ options.autoPlay = true;
+ }
+
+ const audioPath = resolveAudioFilePath(options.audioFile);
+ this._player.setAudioStreamType(android.media.AudioManager.STREAM_MUSIC);
+ this._player.reset();
+ this._player.setDataSource(audioPath);
+
+ // check if local file or remote - local then `prepare` is okay https://developer.android.com/reference/android/media/MediaPlayer.html#prepare()
+ if (Utils.isFileOrResourcePath(audioPath)) {
+ this._player.prepare();
+ } else {
+ this._player.prepareAsync();
+ }
+
+ // On Info
+ if (options.infoCallback) {
+ this._player.setOnInfoListener(
+ new android.media.MediaPlayer.OnInfoListener({
+ onInfo: (player: any, info: number, extra: number) => {
+ options.infoCallback({ player, info, extra });
+ return true;
+ },
+ })
+ );
+ }
+
+ // On Prepared
+ this._player.setOnPreparedListener(
+ new android.media.MediaPlayer.OnPreparedListener({
+ onPrepared: (mp) => {
+ if (options.autoPlay) {
+ this.play();
+ }
+ resolve(null);
+ },
+ })
+ );
+ } catch (ex) {
+ this._abandonAudioFocus();
+ reject(ex);
+ }
+ });
+ }
+
+ /**
+ * Initializes the player with options, will not start playing audio.
+ * @param options
+ */
+ public initFromUrl(options: AudioPlayerOptions): Promise {
+ return new Promise((resolve, reject) => {
+ options.autoPlay = false;
+ this.playFromUrl(options).then(resolve, reject);
+ });
+ }
+
+ public playFromUrl(options: AudioPlayerOptions): Promise {
+ return new Promise((resolve, reject) => {
+ resolve(this.playFromFile(options));
+ });
+ }
+
+ public pause(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (this._player && this._player.isPlaying()) {
+ this._player.pause();
+ // We abandon the audio focus but we still preserve
+ // the MediaPlayer so we can resume it in the future
+ this._abandonAudioFocus(true);
+ this._sendEvent(AudioPlayerEvents.paused);
+ }
+
+ resolve(true);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ public play(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ console.log('player play()');
+ if (this._player && !this._player.isPlaying()) {
+ // request audio focus, this will setup the onAudioFocusChangeListener
+ if (this._options.audioMixing) {
+ // we're mixing audio, so we use a global mixing manager
+ // all players need to set this to true if they're supporting mixing
+ this.setAudioFocusManager(getGlobalMixingManager());
+ // TODO: maybe reset to a default audio manager?
+ }
+ const audioFocusGranted = this._requestAudioFocus();
+ if (!audioFocusGranted) {
+ throw new Error('Could not request audio focus');
+ }
+
+ this._sendEvent(AudioPlayerEvents.started);
+ // set volume controls
+ // https://developer.android.com/reference/android/app/Activity.html#setVolumeControlStream(int)
+ Application.android.foregroundActivity.setVolumeControlStream(android.media.AudioManager.STREAM_MUSIC);
+
+ // register the receiver so when calls or another app takes main audio focus the player pauses
+ Application.android.registerBroadcastReceiver(android.media.AudioManager.ACTION_AUDIO_BECOMING_NOISY, (context: android.content.Context, intent: android.content.Intent) => {
+ this.pause();
+ });
+
+ if (this._options?.pitch) {
+ const playBackParams = new android.media.PlaybackParams();
+ playBackParams.setPitch(this._options!.pitch);
+ this._player.setPlaybackParams(playBackParams);
+ }
+
+ this._player.start();
+ }
+ resolve(true);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ public resume(): void {
+ if (this._player) {
+ // We call play so it can request audio focus
+ this.play();
+ this._sendEvent(AudioPlayerEvents.started);
+ }
+ }
+
+ public seekTo(time: number): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (this._player) {
+ time = time * 1000;
+ this._player.seekTo(time);
+ this._sendEvent(AudioPlayerEvents.seek);
+ }
+ resolve(true);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ public changePlayerSpeed(speed) {
+ // this checks on API 23 and up
+ if (android.os.Build.VERSION.SDK_INT >= 23 && this.play) {
+ if (this._player?.isPlaying()) {
+ (this._player as any).setPlaybackParams((this._player as any).getPlaybackParams().setSpeed(speed));
+ } else {
+ (this._player as any).setPlaybackParams((this._player as any).getPlaybackParams().setSpeed(speed));
+ this._player?.pause();
+ }
+ } else {
+ console.warn('Android device API is not 23+. Cannot set the playbackRate on lower Android APIs.');
+ }
+ }
+
+ public dispose(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (this._player) {
+ this._player.stop();
+ this._player.reset();
+ // Remove _options since we are back to the Idle state
+ // (Refer to: https://developer.android.com/reference/android/media/MediaPlayer#state-diagram)
+ this._options = undefined;
+ // unregister broadcast receiver
+ Application.android.unregisterBroadcastReceiver(android.media.AudioManager.ACTION_AUDIO_BECOMING_NOISY);
+
+ this._abandonAudioFocus();
+ this.setAudioFocusManager(null);
+ }
+ resolve(null);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ public isAudioPlaying(): boolean {
+ if (this._player) {
+ return this._player.isPlaying();
+ } else {
+ return false;
+ }
+ }
+
+ public getAudioTrackDuration(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ const duration = this._player ? this._player.getDuration() : 0;
+ resolve(duration.toString());
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ /**
+ * Notify events by name and optionally pass data
+ */
+ private _sendEvent(eventName: string, data?: any) {
+ if (this.events) {
+ this.events.notify({
+ eventName,
+ object: this,
+ data: data,
+ });
+ }
+ }
+
+ /**
+ * Helper method to ensure audio focus.
+ */
+ private _requestAudioFocus(): boolean {
+ return this._audioFocusManager?.requestAudioFocus(this);
+ }
+
+ private _abandonAudioFocus(preserveMP: boolean = false): void {
+ this._audioFocusManager?.abandonAudioFocus(this);
+
+ // Normally we will preserve the MediaPlayer only when pausing
+ if (this._mediaPlayer && !preserveMP) {
+ this._mediaPlayer.release();
+ this._mediaPlayer = undefined;
+ }
+ }
+
+ private _getAndroidContext() {
+ let ctx = Application.android.context;
+ if (!ctx) {
+ ctx = Application.getNativeApplication().getApplicationContext();
+ }
+
+ if (ctx === null) {
+ setTimeout(() => {
+ this._getAndroidContext();
+ }, 200);
+
+ return;
+ }
+
+ return ctx;
+ }
+ /**
+ * This getter will instantiate the MediaPlayer if needed
+ * and register the listeners. This is done here to avoid
+ * code duplication. This is also the reason why we have
+ * a `_options`
+ */
+ private get _player() {
+ if (!this._mediaPlayer && this._options) {
+ this._mediaPlayer = new android.media.MediaPlayer();
+
+ this._mediaPlayer.setOnCompletionListener(
+ new android.media.MediaPlayer.OnCompletionListener({
+ onCompletion: (mp) => {
+ if (this._options && this._options.completeCallback) {
+ if (this._options.loop === true) {
+ mp.seekTo(5);
+ mp.start();
+ }
+ this._options.completeCallback({ player: mp });
+ }
+
+ if (this._options && !this._options.loop) {
+ // Make sure that we abandon audio focus when playback stops
+ this._abandonAudioFocus(true);
+ }
+ },
+ })
+ );
+
+ this._mediaPlayer.setOnErrorListener(
+ new android.media.MediaPlayer.OnErrorListener({
+ onError: (player: any, error: number, extra: number) => {
+ if (this._options && this._options.errorCallback) {
+ this._options.errorCallback({ player, error, extra });
+ }
+ this.dispose();
+ return true;
+ },
+ })
+ );
+ }
+
+ return this._mediaPlayer;
+ }
+
+ private _onAudioFocusChange(data: AudioFocusChangeEventData) {
+ const focusChange = data.focusChange;
+ switch (focusChange) {
+ case android.media.AudioManager.AUDIOFOCUS_GAIN:
+ // Set volume level to desired levels
+ // if last volume more than 10 just set to 1.0 float
+ if (this._lastPlayerVolume && this._lastPlayerVolume >= 10) {
+ this.volume = 1.0;
+ } else if (this._lastPlayerVolume) {
+ this.volume = parseFloat('0.' + this._lastPlayerVolume.toString());
+ }
+
+ if (this._wasPlaying) {
+ this.resume();
+ }
+ break;
+ case android.media.AudioManager.AUDIOFOCUS_GAIN_TRANSIENT:
+ // You have audio focus for a short time
+ break;
+ case android.media.AudioManager.AUDIOFOCUS_LOSS:
+ this._wasPlaying = this._player?.isPlaying() ?? false;
+ this.pause();
+ break;
+ case android.media.AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
+ // Temporary loss of audio focus - expect to get it back - you can keep your resources around
+ this._wasPlaying = this._player?.isPlaying() ?? false;
+ this.pause();
+ break;
+ case android.media.AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
+ // Lower the volume, keep playing
+ this._lastPlayerVolume = this.volume;
+ this.volume = 0.2;
+ break;
+ }
+ }
+}
diff --git a/packages/audio/android/recorder.ts b/packages/audio/android/recorder.ts
new file mode 100644
index 00000000..1551fce9
--- /dev/null
+++ b/packages/audio/android/recorder.ts
@@ -0,0 +1,167 @@
+import { Application } from '@nativescript/core';
+import { hasPermission, requestPermission } from 'nativescript-permissions';
+import { TNSRecordI } from '../common';
+import { AudioRecorderOptions } from '../options';
+
+export class TNSRecorder implements TNSRecordI {
+ private _recorder: any;
+
+ get android() {
+ return this._recorder;
+ }
+
+ public static CAN_RECORD(): boolean {
+ const pManager = Application.android.context.getPackageManager();
+ const canRecord = pManager.hasSystemFeature(android.content.pm.PackageManager.FEATURE_MICROPHONE);
+ if (canRecord) {
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ public requestRecordPermission(explanation = '') {
+ return new Promise(async (resolve, reject) => {
+ try {
+ await requestPermission((android as any).Manifest.permission.RECORD_AUDIO).catch((err) => {
+ reject(err);
+ });
+ resolve(null);
+ } catch (error) {
+ reject(error);
+ }
+ });
+ }
+
+ public hasRecordPermission() {
+ const permission = hasPermission((android as any).Manifest.permission.RECORD_AUDIO);
+ return !0 === permission ? !0 : !1;
+ }
+
+ public start(options: AudioRecorderOptions): Promise {
+ return new Promise(async (resolve, reject) => {
+ try {
+ // bake the permission into this so the dev doesn't have to call it
+ await this.requestRecordPermission().catch((err) => {
+ console.log(err);
+ reject('Permission to record audio is not granted.');
+ });
+
+ if (this._recorder) {
+ // reset for reuse
+ this._recorder.reset();
+ } else {
+ this._recorder = new android.media.MediaRecorder();
+ }
+
+ const audioSource = options.source ? options.source : android.media.MediaRecorder.AudioSource.DEFAULT; // https://developer.android.com/reference/android/media/MediaRecorder.AudioSource
+ this._recorder.setAudioSource(audioSource);
+
+ const outFormat = options.format ? options.format : android.media.AudioFormat.ENCODING_PCM_16BIT; // https://developer.android.com/reference/android/media/AudioFormat#ENCODING_PCM_16BIT
+ this._recorder.setOutputFormat(outFormat);
+
+ const encoder = options.encoder ? options.encoder : android.media.MediaRecorder.AudioEncoder.AAC; // https://developer.android.com/reference/android/media/MediaRecorder.AudioEncoder#AAC
+ this._recorder.setAudioEncoder(encoder);
+
+ if (options.channels) {
+ this._recorder.setAudioChannels(options.channels);
+ }
+
+ let sampleRate = options.sampleRate ? options.sampleRate : 44100;
+ this._recorder.setAudioSamplingRate(sampleRate);
+
+ let bitRate = options.bitRate ? options.bitRate : 128000;
+ this._recorder.setAudioEncodingBitRate(bitRate);
+
+ if (options.maxDuration) {
+ this._recorder.setMaxDuration(options.maxDuration);
+ }
+
+ this._recorder.setOutputFile(options.filename);
+
+ // On Error
+ this._recorder.setOnErrorListener(
+ new android.media.MediaRecorder.OnErrorListener({
+ onError: (recorder: any, error: number, extra: number) => {
+ options.errorCallback({ recorder, error, extra });
+ },
+ })
+ );
+
+ // On Info
+ this._recorder.setOnInfoListener(
+ new android.media.MediaRecorder.OnInfoListener({
+ onInfo: (recorder: any, info: number, extra: number) => {
+ options.infoCallback({ recorder, info, extra });
+ },
+ })
+ );
+
+ this._recorder.prepare();
+ this._recorder.start();
+
+ resolve(null);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ public getMeters(): number {
+ if (this._recorder != null) return this._recorder.getMaxAmplitude();
+ else return 0;
+ }
+
+ public pause(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (this._recorder) {
+ this._recorder.pause();
+ }
+ resolve(null);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ public resume(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (this._recorder) {
+ this._recorder.resume();
+ }
+ resolve(null);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ public stop(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (this._recorder) {
+ this._recorder.stop();
+ }
+ resolve(null);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ public dispose(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (this._recorder) {
+ this._recorder.release();
+ }
+ this._recorder = undefined;
+ resolve(null);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+}
diff --git a/packages/audio/common.ts b/packages/audio/common.ts
new file mode 100644
index 00000000..445ef65b
--- /dev/null
+++ b/packages/audio/common.ts
@@ -0,0 +1,131 @@
+import { knownFolders, path as nsFilePath, Utils } from '@nativescript/core';
+import { AudioPlayerOptions, AudioRecorderOptions } from './options';
+
+export interface TNSPlayerI {
+ /**
+ * native instance getters
+ */
+ readonly ios?: any;
+ readonly android?: any;
+
+ /**
+ * Volume getter/setter
+ */
+ volume: any;
+
+ /**
+ * Starts playing audio file from local app files.
+ */
+ playFromFile(options: AudioPlayerOptions): Promise;
+
+ /**
+ * Starts playing audio file from url
+ */
+ playFromUrl(options: AudioPlayerOptions): Promise;
+
+ /**
+ * Play audio file.
+ */
+ play(): Promise;
+
+ /**
+ * Pauses playing audio file.
+ */
+ pause(): Promise;
+
+ /**
+ * Seeks to specific time.
+ */
+ seekTo(time: number): Promise;
+
+ /**
+ * Releases resources from the audio player.
+ */
+ dispose(): Promise;
+
+ /**
+ * Check if the audio is actively playing.
+ */
+ isAudioPlaying(): boolean;
+
+ /**
+ * Get the duration of the audio file playing.
+ */
+ getAudioTrackDuration(): Promise;
+
+ /**
+ * current time
+ */
+ readonly currentTime: number;
+}
+
+export interface TNSRecordI {
+ /**
+ * Starts the native audio recording control.
+ */
+ start(options: AudioRecorderOptions): Promise;
+
+ /**
+ * Pauses the native audio recording control.
+ */
+ pause(): Promise;
+
+ /**
+ * Resumes the native audio recording control.
+ */
+ resume(): Promise;
+
+ /**
+ * Stops the native audio recording control.
+ */
+ stop(): Promise;
+
+ /**
+ * Releases resources from the recorder.
+ */
+ dispose(): Promise;
+}
+
+/**
+ * Helper function to determine if string is a url.
+ * @param value [string]
+ */
+export function isStringUrl(value: string): boolean {
+ // check if artURL is a url or local file
+ let isURL = false;
+ if (value.indexOf('://') !== -1) {
+ if (value.indexOf('res://') === -1) {
+ isURL = true;
+ }
+ }
+ if (isURL === true) {
+ return true;
+ } else {
+ return false;
+ }
+}
+
+/**
+ * Will determine if a string is a url or a local path. If the string is a url it will return the url.
+ * If it is a local path, then the file-system module will return the file system path.
+ * @param path [string]
+ */
+export function resolveAudioFilePath(path: string) {
+ if (path) {
+ const isUrl = isStringUrl(path);
+ // if it's a url just return the audio file url
+ if (isUrl === true) {
+ return path;
+ } else {
+ let audioPath;
+ let fileName = Utils.isString(path) ? path.trim() : '';
+ if (fileName.indexOf('~/') === 0) {
+ fileName = nsFilePath.join(knownFolders.currentApp().path, fileName.replace('~/', ''));
+ audioPath = fileName;
+ } else {
+ audioPath = fileName;
+ }
+ return audioPath;
+ }
+ }
+}
diff --git a/packages/audio/index.android.ts b/packages/audio/index.android.ts
new file mode 100644
index 00000000..d062bb0f
--- /dev/null
+++ b/packages/audio/index.android.ts
@@ -0,0 +1,3 @@
+export * from './android/player';
+export * from './android/recorder';
+export * from './options';
diff --git a/packages/audio/index.d.ts b/packages/audio/index.d.ts
new file mode 100644
index 00000000..4792e442
--- /dev/null
+++ b/packages/audio/index.d.ts
@@ -0,0 +1,396 @@
+import { Observable } from '@nativescript/core';
+
+export interface AudioPlayerOptions {
+ /**
+ * The audio file to play.
+ */
+ audioFile: string;
+
+ /**
+ * Set true to loop audio playback.
+ */
+ loop: boolean;
+
+ /**
+ * Prevent autoplay if desired as player autoplays be default
+ */
+ autoPlay?: boolean;
+
+ /**
+ * Set true to enable audio metering.
+ */
+ metering?: boolean;
+ audioMixing?: boolean;
+
+ pitch?: number;
+
+ /**
+ * Callback to execute when playback has completed.
+ * @returns {Object} An object containing the native values for the callback.
+ */
+ completeCallback?: Function;
+
+ /**
+ * Callback to execute when playback has an error.
+ * @returns {Object} An object containing the native values for the error callback.
+ */
+ errorCallback?: Function;
+
+ /**
+ * Callback to execute when info is emitted from the player.
+ * @returns {Object} An object containing the native values for the info callback.
+ */
+ infoCallback?: Function;
+}
+
+export interface AudioRecorderOptions {
+ /**
+ * The name of the file recorded.
+ */
+ filename: string;
+
+ /**
+ * The audio source to record *** ANDROID ONLY for now ***
+ * https://developer.android.com/reference/android/media/MediaRecorder.AudioSource.html
+ */
+ source?: any;
+
+ /**
+ * The max duration of the audio recording.
+ */
+ maxDuration?: number;
+
+ /**
+ * Set true to enable audio metering.
+ */
+ metering?: boolean;
+
+ /**
+ * The format of the audio recording.
+ */
+ format?: any;
+ channels?: any;
+ sampleRate?: any;
+ bitRate?: any;
+ encoder?: any;
+
+ /**
+ * Callback to execute when playback has an error.
+ * @returns {Object} An object containing the native values for the error callback.
+ */
+ errorCallback?: Function;
+
+ /**
+ * Callback to execute when info is emitted from the player.
+ * @returns {Object} An object containing the native values for the info callback.
+ */
+ infoCallback?: Function;
+}
+export interface TNSPlayerI {
+ readonly ios?: any;
+ readonly android?: any;
+
+ /**
+ * Set to true to enable console log output for debugging.
+ */
+ debug: boolean;
+
+ /**
+ * Volume getter/setter
+ */
+ volume: any;
+
+ /**
+ * Duration getter
+ */
+ duration: number;
+
+ initFromFile(options: AudioPlayerOptions): Promise;
+ /**
+ * Starts playing audio file from local app files.
+ */
+ playFromFile(options: AudioPlayerOptions): Promise;
+
+ initFromUrl(options: AudioPlayerOptions): Promise;
+
+ /**
+ * Starts playing audio file from url
+ */
+ playFromUrl(options: AudioPlayerOptions): Promise;
+
+ /**
+ * Play audio file.
+ */
+ play(): Promise;
+
+ /**
+ * Pauses playing audio file.
+ */
+ pause(): Promise;
+
+ /**
+ * Resume audio player.
+ */
+ resume(): void;
+
+ /**
+ * Seeks to specific time.
+ */
+ seekTo(time: number): Promise;
+
+ /**
+ * Releases resources from the audio player.
+ */
+ dispose(): Promise;
+
+ /**
+ * Check if the audio is actively playing.
+ */
+ isAudioPlaying(): boolean;
+
+ /**
+ * Get the duration of the audio file playing.
+ */
+ getAudioTrackDuration(): Promise;
+
+ /**
+ * Sets the player playback speed rate. On Android this works on API 23+.
+ * @param speed [number] - The speed of the playback.
+ */
+ changePlayerSpeed(speed: number): void;
+
+ /**
+ * ** iOS ONLY ** - Begins playback at a certain delay, relative to the current playback time.
+ * @param time [number] - The time to start playing the audio track at.
+ */
+ playAtTime(time: number);
+}
+export interface TNSRecordI {
+ start(options: AudioRecorderOptions): Promise;
+ pause(): Promise;
+ resume(): Promise;
+ stop(): Promise;
+ dispose(): Promise;
+}
+export declare class TNSPlayer {
+ static ObjCProtocols: any[];
+ readonly ios: any;
+ readonly android: any;
+ readonly events: Observable;
+
+ /**
+ * Set to true to enable console log output for debugging.
+ */
+ debug: boolean;
+
+ /**
+ * Volume getter/setter
+ */
+ volume: any;
+
+ /**
+ * duration
+ */
+ duration: number;
+
+ /**
+ * current time
+ */
+ readonly currentTime: number;
+
+ /**
+ * @param {AudioFocusDurationHint} durationHint - Determines differents behaviors by
+ * the system and the other application that previously held audio focus.
+ * See the {@link https://developer.android.com/reference/android/media/AudioFocusRequest#the-different-types-of-focus-requests different types of focus requests}
+ */
+ constructor(durationHint?: AudioFocusDurationHint | AudioFocusManager);
+
+ /**
+ * Sets the audio focus manager for this player
+ * @param manager new Audio Focus Manager
+ */
+ setAudioFocusManager(manager: AudioFocusManager);
+
+ initFromFile(options: AudioPlayerOptions): Promise;
+
+ /**
+ * Starts playing audio file from local app files.
+ */
+ playFromFile(options: AudioPlayerOptions): Promise;
+ initFromUrl(options: AudioPlayerOptions): Promise;
+
+ /**
+ * Starts playing audio file from url
+ */
+ playFromUrl(options: AudioPlayerOptions): Promise;
+
+ /**
+ * Play audio file.
+ */
+ play(): Promise;
+
+ /**
+ * Pauses playing audio file.
+ */
+ pause(): Promise;
+
+ /**
+ * Resume audio player.
+ */
+ resume(): void;
+
+ /**
+ * Seeks to specific time in seconds.
+ * @param time [number] - The position of the track duration to seek to.
+ */
+ seekTo(time: number): Promise;
+
+ /**
+ * Releases resources from the audio player.
+ */
+ dispose(): Promise;
+
+ /**
+ * Check if the audio is actively playing.
+ */
+ isAudioPlaying(): boolean;
+
+ /**
+ * Get the duration of the audio file playing.
+ */
+ getAudioTrackDuration(): Promise;
+
+ /**
+ * Android Only
+ * Will set the playback speed for Android 23+, this is not available on lower Android APIs.
+ * @param speed [number] - The speed of the playback.
+ */
+ changePlayerSpeed(speed: number): void;
+
+ audioPlayerDidFinishPlayingSuccessfully(player?: any, flag?: boolean): void;
+}
+
+export declare class TNSRecorder {
+ static ObjCProtocols: any[];
+ private _recorder;
+ private _recordingSession;
+ readonly ios: any;
+ readonly android: any;
+
+ /**
+ * Set to true to enable console log output for debugging.
+ */
+ debug: boolean;
+
+ /**
+ * Returns true if the device is capable of recording, false otherwise.
+ */
+ static CAN_RECORD(): boolean;
+
+ /**
+ * Android Only
+ * Returns true if the RECORD_AUDIO permission has been granted.
+ */
+ hasRecordPermission(): boolean;
+
+ /**
+ * Android Only
+ * Promise will resolve if the user grants the permission or if the permission has already been granted.
+ */
+ requestRecordPermission(): Promise;
+
+ /**
+ * Starts a recording session with the provided options.
+ * @param options [AudioRecorderOptions]
+ */
+ start(options: AudioRecorderOptions): Promise;
+
+ /**
+ * Pauses the recorder.
+ */
+ pause(): Promise;
+
+ /**
+ * Resumes the recorder.
+ */
+ resume(): Promise;
+
+ /**
+ * Stops the recording.
+ */
+ stop(): Promise;
+
+ /**
+ * Disposes of the recorder session.
+ */
+ dispose(): Promise;
+
+ /**
+ * Returns the maximum absolute amplitude that was sampled since the last call to this method.
+ * @param channel [number]
+ */
+ getMeters(channel?: number): any;
+
+ /**
+ * iOS Only
+ * Returns value indicating the recorder is currently recording.
+ */
+ isRecording(): any;
+ audioRecorderDidFinishRecording(recorder: any, success: boolean): void;
+}
+
+export interface IAudioPlayerEvents {
+ seek: 'seek';
+ paused: 'paused';
+ started: 'started';
+}
+
+export const AudioPlayerEvents: IAudioPlayerEvents;
+
+export enum AudioFocusDurationHint {
+ /**
+ * Expresses the fact that your application is now the sole source
+ * of audio that the user is listening to. The duration of the
+ * audio playback is unknown, and is possibly very long: after the
+ * user finishes interacting with your application, (s)he doesn’t
+ * expect another audio stream to resume.
+ */
+ AUDIOFOCUS_GAIN = android.media.AudioManager.AUDIOFOCUS_GAIN,
+ /**
+ * For a situation when you know your application is temporarily
+ * grabbing focus from the current owner, but the user expects
+ * playback to go back to where it was once your application no
+ * longer requires audio focus.
+ */
+ AUDIOFOCUS_GAIN_TRANSIENT = android.media.AudioManager.AUDIOFOCUS_GAIN_TRANSIENT,
+ /**
+ * This focus request type is similar to AUDIOFOCUS_GAIN_TRANSIENT
+ * for the temporary aspect of the focus request, but it also
+ * expresses the fact during the time you own focus, you allow
+ * another application to keep playing at a reduced volume,
+ * “ducked”.
+ */
+ AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK = android.media.AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK,
+ /**
+ * Also for a temporary request, but also expresses that your
+ * application expects the device to not play anything else. This
+ * is typically used if you are doing audio recording or speech
+ * recognition, and don’t want for examples notifications to be
+ * played by the system during that time.
+ */
+ AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE = android.media.AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK,
+}
+
+export interface AudioFocusManagerOptions {
+ durationHint?: AudioFocusDurationHint;
+ usage?: number; // android.media.AudioAttributes.USAGE_MEDIA
+ contentType?: number; // android.media.AudioAttributes.CONTENT_TYPE_MUSIC
+}
+export interface AudioFocusChangeEventData extends EventData {
+ focusChange: number;
+}
+
+export class AudioFocusManager extends Observable {
+ constructor(options?: AudioFocusManagerOptions);
+ on(event: 'audioFocusChange', callback: (data: AudioFocusChangeEventData) => void, thisArg?: any);
+}
diff --git a/packages/audio/index.ios.ts b/packages/audio/index.ios.ts
new file mode 100644
index 00000000..a23428ac
--- /dev/null
+++ b/packages/audio/index.ios.ts
@@ -0,0 +1,3 @@
+export * from './ios/player';
+export * from './ios/recorder';
+export * from './options';
diff --git a/packages/audio/ios/player.ts b/packages/audio/ios/player.ts
new file mode 100644
index 00000000..2ec64c77
--- /dev/null
+++ b/packages/audio/ios/player.ts
@@ -0,0 +1,360 @@
+import { knownFolders, Observable, path as nsFilePath, Utils } from '@nativescript/core';
+import { TNSPlayerI } from '../common';
+import { AudioPlayerOptions } from '../options';
+
+declare var AVAudioPlayer;
+
+@NativeClass()
+class TNSPlayerDelegate extends NSObject implements AVAudioPlayerDelegate {
+ static ObjCProtocols = [AVAudioPlayerDelegate];
+ private _owner: WeakRef;
+
+ static initWithOwner(owner: TNSPlayer) {
+ const delegate = TNSPlayerDelegate.new();
+ delegate._owner = new WeakRef(owner);
+ return delegate;
+ }
+
+ audioPlayerDidFinishPlayingSuccessfully(player?: any, flag?: boolean) {
+ const owner = this._owner.get();
+ if (owner) {
+ if (flag && owner.completeCallback) {
+ owner.completeCallback({ player, flag });
+ } else if (!flag && owner.errorCallback) {
+ owner.errorCallback({ player, flag });
+ }
+ }
+ }
+
+ audioPlayerDecodeErrorDidOccurError(player: any, error: NSError) {
+ const owner = this._owner.get();
+ if (owner) {
+ if (owner.errorCallback) {
+ owner.errorCallback({ player, error });
+ }
+ }
+ }
+}
+
+export { TNSPlayerDelegate };
+
+export class AudioFocusManager extends Observable {}
+
+export class TNSPlayer extends Observable implements TNSPlayerI {
+ completeCallback: any;
+ errorCallback: any;
+ infoCallback: any;
+
+ private _player: AVAudioPlayer;
+ private _task: NSURLSessionDataTask;
+ private delegate: TNSPlayerDelegate;
+
+ get ios(): any {
+ return this._player;
+ }
+
+ get volume(): number {
+ return this._player ? this._player.volume : 0;
+ }
+
+ set volume(value: number) {
+ if (this._player && value >= 0) {
+ this._player.volume = value;
+ }
+ }
+
+ public get duration() {
+ if (this._player) {
+ return this._player.duration;
+ } else {
+ return 0;
+ }
+ }
+
+ get currentTime(): number {
+ return this._player ? this._player.currentTime : 0;
+ }
+
+ public setAudioFocusManager(manager: any) {}
+
+ public initFromFile(options: AudioPlayerOptions): Promise {
+ return new Promise((resolve, reject) => {
+ // init only
+ options.autoPlay = false;
+ this.playFromFile(options).then(resolve, reject);
+ });
+ }
+
+ public playFromFile(options: AudioPlayerOptions): Promise {
+ return new Promise((resolve, reject) => {
+ // only if not explicitly set, default to true
+ if (options.autoPlay !== false) {
+ options.autoPlay = true;
+ }
+
+ try {
+ let fileName = Utils.isString(options.audioFile) ? options.audioFile.trim() : '';
+ if (fileName.indexOf('~/') === 0) {
+ fileName = nsFilePath.join(knownFolders.currentApp().path, fileName.replace('~/', ''));
+ }
+
+ this.completeCallback = options.completeCallback;
+ this.errorCallback = options.errorCallback;
+ this.infoCallback = options.infoCallback;
+
+ const audioSession = AVAudioSession.sharedInstance();
+ if (options.audioMixing) {
+ audioSession.setCategoryWithOptionsError(AVAudioSessionCategoryAmbient, AVAudioSessionCategoryOptions.MixWithOthers);
+ } else {
+ audioSession.setCategoryWithOptionsError(AVAudioSessionCategoryAmbient, AVAudioSessionCategoryOptions.DuckOthers);
+ }
+
+ const output = audioSession.currentRoute.outputs.lastObject.portType;
+
+ if (output.match(/Receiver/)) {
+ try {
+ audioSession.setCategoryError(AVAudioSessionCategoryPlayAndRecord);
+ audioSession.overrideOutputAudioPortError(AVAudioSessionPortOverride.Speaker);
+ audioSession.setActiveError(true);
+ } catch (err) {
+ console.error('setting audioSession catergory failed', err);
+ }
+ }
+
+ const errorRef = new interop.Reference();
+ this._player = AVAudioPlayer.alloc().initWithContentsOfURLError(NSURL.fileURLWithPath(fileName), errorRef);
+ if (errorRef && errorRef.value) {
+ reject(errorRef.value);
+ return;
+ } else if (this._player) {
+ if (this.delegate === undefined) this.delegate = TNSPlayerDelegate.initWithOwner(this);
+ this._player.delegate = this.delegate;
+ // enableRate to change playback speed
+ this._player.enableRate = true;
+
+ if (options.metering) {
+ this._player.meteringEnabled = true;
+ }
+
+ if (options.loop) {
+ this._player.numberOfLoops = -1;
+ }
+
+ if (options.autoPlay) {
+ this._player.play();
+ }
+
+ resolve(null);
+ } else {
+ reject();
+ }
+ } catch (ex) {
+ if (this.errorCallback) {
+ this.errorCallback({ ex });
+ }
+ reject(ex);
+ }
+ });
+ }
+
+ public initFromUrl(options: AudioPlayerOptions): Promise {
+ return new Promise((resolve, reject) => {
+ // init only
+ options.autoPlay = false;
+ this.playFromUrl(options).then(resolve, reject);
+ });
+ }
+
+ public playFromUrl(options: AudioPlayerOptions): Promise {
+ return new Promise((resolve, reject) => {
+ // only if not explicitly set, default to true
+ if (options.autoPlay !== false) {
+ options.autoPlay = true;
+ }
+
+ try {
+ this._task = NSURLSession.sharedSession.dataTaskWithURLCompletionHandler(NSURL.URLWithString(options.audioFile), (data, response, error) => {
+ if (error !== null) {
+ if (this.errorCallback) {
+ this.errorCallback({ error });
+ }
+
+ reject();
+ }
+
+ this.completeCallback = options.completeCallback;
+ this.errorCallback = options.errorCallback;
+ this.infoCallback = options.infoCallback;
+
+ const audioSession = AVAudioSession.sharedInstance();
+ if (options.audioMixing) {
+ audioSession.setCategoryWithOptionsError(AVAudioSessionCategoryAmbient, AVAudioSessionCategoryOptions.MixWithOthers);
+ } else {
+ audioSession.setCategoryWithOptionsError(AVAudioSessionCategoryAmbient, AVAudioSessionCategoryOptions.DuckOthers);
+ }
+ const output = audioSession.currentRoute.outputs.lastObject.portType;
+
+ if (output.match(/Receiver/)) {
+ try {
+ audioSession.setCategoryError(AVAudioSessionCategoryPlayAndRecord);
+ audioSession.overrideOutputAudioPortError(AVAudioSessionPortOverride.Speaker);
+ audioSession.setActiveError(true);
+ } catch (err) {
+ console.error('Setting audioSession category failed.', err);
+ }
+ }
+
+ const errorRef = new interop.Reference();
+ this._player = AVAudioPlayer.alloc().initWithDataError(data, errorRef);
+ if (errorRef && errorRef.value) {
+ reject(errorRef.value);
+ return;
+ } else if (this._player) {
+ this._player.delegate = TNSPlayerDelegate.initWithOwner(this);
+
+ // enableRate to change playback speed
+ this._player.enableRate = true;
+
+ this._player.numberOfLoops = options.loop ? -1 : 0;
+
+ if (options.metering) {
+ this._player.meteringEnabled = true;
+ }
+
+ if (options.autoPlay) {
+ this._player.play();
+ }
+
+ resolve(null);
+ } else {
+ reject();
+ }
+ });
+
+ this._task.resume();
+ } catch (ex) {
+ if (this.errorCallback) {
+ this.errorCallback({ ex });
+ }
+ reject(ex);
+ }
+ });
+ }
+
+ public pause(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (this._player && this._player.playing) {
+ this._player.pause();
+ }
+ resolve(true);
+ } catch (ex) {
+ if (this.errorCallback) {
+ this.errorCallback({ ex });
+ }
+ reject(ex);
+ }
+ });
+ }
+
+ public play(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (!this.isAudioPlaying()) {
+ this._player.play();
+ }
+ resolve(true);
+ } catch (ex) {
+ if (this.errorCallback) {
+ this.errorCallback({ ex });
+ }
+ reject(ex);
+ }
+ });
+ }
+
+ public resume(): void {
+ if (this._player) {
+ this._player.play();
+ }
+ }
+
+ public playAtTime(time: number): void {
+ if (this._player) {
+ this._player.playAtTime(time);
+ }
+ }
+
+ public seekTo(time: number): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (this._player) {
+ this._player.currentTime = time;
+ }
+ resolve(true);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ public dispose(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (this._player && this.isAudioPlaying()) {
+ this._player.stop();
+ }
+ const audioSession = AVAudioSession.sharedInstance();
+ audioSession.setActiveError(false);
+ this._reset();
+ resolve(null);
+ } catch (ex) {
+ if (this.errorCallback) {
+ this.errorCallback({ ex });
+ }
+ reject(ex);
+ }
+ });
+ }
+
+ public isAudioPlaying(): boolean {
+ return this._player ? this._player.playing : false;
+ }
+
+ public getAudioTrackDuration(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ const duration = this._player ? this._player.duration : 0;
+ resolve(duration.toString());
+ } catch (ex) {
+ if (this.errorCallback) {
+ this.errorCallback({ ex });
+ }
+ reject(ex);
+ }
+ });
+ }
+
+ public changePlayerSpeed(speed) {
+ if (this._player && speed) {
+ // make sure speed is a number/float
+ if (typeof speed === 'string') {
+ speed = parseFloat(speed);
+ }
+ this._player.rate = speed;
+ }
+ }
+
+ private _reset() {
+ if (this._player) {
+ this._player = undefined;
+ }
+ if (this.delegate) {
+ this.delegate = undefined;
+ }
+ if (this._task) {
+ this._task.cancel();
+ this._task = undefined;
+ }
+ }
+}
diff --git a/packages/audio/ios/recorder.ts b/packages/audio/ios/recorder.ts
new file mode 100644
index 00000000..38d395b0
--- /dev/null
+++ b/packages/audio/ios/recorder.ts
@@ -0,0 +1,211 @@
+import { Observable } from '@nativescript/core';
+import { TNSRecordI } from '../common';
+import { AudioRecorderOptions } from '../options';
+declare var kAudioFormatAppleLossless, kAudioFormatMPEG4AAC;
+@NativeClass()
+class TNSRecorderDelegate extends NSObject implements AVAudioRecorderDelegate {
+ static ObjCProtocols = [AVAudioRecorderDelegate];
+ private _owner: WeakRef;
+
+ static initWithOwner(owner: TNSRecorder) {
+ const delegate = TNSRecorderDelegate.new();
+ delegate._owner = new WeakRef(owner);
+ return delegate;
+ }
+
+ audioRecorderDidFinishRecording(recorder: any, success: boolean) {
+ console.log(`audioRecorderDidFinishRecording: ${success}`);
+ const owner = this._owner.get();
+ if (owner) {
+ // owner.notify({
+ // eventName: 'RecorderFinished',
+ // })
+ }
+ }
+
+ audioRecorderDidFinishRecordingSuccessfully(recorder: AVAudioRecorder, flag) {
+ console.log(`audioRecorderDidFinishRecordingSuccessfully: ${flag}`);
+ const owner = this._owner.get();
+ if (owner) {
+ // owner.notify({
+ // eventName: 'RecorderFinishedSuccessfully',
+ // })
+ }
+ }
+}
+
+export { TNSRecorderDelegate };
+
+export class TNSRecorder extends Observable implements TNSRecordI {
+ private _recorder: any;
+ private _recordingSession: any;
+
+ private _recorderOptions: AudioRecorderOptions;
+
+ static CAN_RECORD(): boolean {
+ return true;
+ }
+
+ get ios() {
+ return this._recorder;
+ }
+
+ requestRecordPermission() {
+ return new Promise((resolve, reject) => {
+ this._recordingSession.requestRecordPermission((allowed: boolean) => {
+ if (allowed) {
+ resolve(true);
+ } else {
+ reject('Record permissions denied');
+ }
+ });
+ });
+ }
+
+ start(options: AudioRecorderOptions): Promise {
+ this._recorderOptions = options;
+ return new Promise((resolve, reject) => {
+ try {
+ this._recordingSession = AVAudioSession.sharedInstance();
+ let errorRef = new interop.Reference();
+ this._recordingSession.setCategoryError(AVAudioSessionCategoryPlayAndRecord, errorRef);
+ if (errorRef) {
+ console.error(`setCategoryError: ${errorRef.value}, ${errorRef}`);
+ }
+
+ this._recordingSession.setActiveError(true, null);
+ this._recordingSession.requestRecordPermission((allowed: boolean) => {
+ if (allowed) {
+ const recordSetting = NSMutableDictionary.alloc().init();
+ let format = options.format ? options.format : kAudioFormatAppleLossless;
+ console.log(`setting format: ${format}`);
+ recordSetting.setValueForKey(NSNumber.numberWithInt(format), 'AVFormatIDKey');
+
+ let avAudioQualityValue = AVAudioQuality.Medium;
+ if (options.iosAudioQuality) {
+ if (options.iosAudioQuality == 'Min') {
+ avAudioQualityValue = AVAudioQuality.Min;
+ } else if (options.iosAudioQuality == 'Low') {
+ avAudioQualityValue = AVAudioQuality.Low;
+ } else if (options.iosAudioQuality == 'Medium') {
+ avAudioQualityValue = AVAudioQuality.Medium;
+ } else if (options.iosAudioQuality == 'High') {
+ avAudioQualityValue = AVAudioQuality.High;
+ } else if (options.iosAudioQuality == 'Max') {
+ avAudioQualityValue = AVAudioQuality.Max;
+ }
+ }
+ console.log(`setting format: ${avAudioQualityValue}`); // https://developer.apple.com/documentation/avfaudio/avaudioquality;
+ recordSetting.setValueForKey(NSNumber.numberWithInt(avAudioQualityValue), 'AVEncoderAudioQualityKey');
+
+ let sampleRate: number = 44100.0;
+ if (options.sampleRate) sampleRate = parseFloat(parseInt(options.sampleRate).toFixed(1));
+ console.log(`setting sampleRate: ${sampleRate}`);
+ recordSetting.setValueForKey(NSNumber.numberWithFloat(sampleRate), 'AVSampleRateKey');
+
+ let channels = options.channels ? options.channels : 1;
+ console.log(`setting channels: ${channels}`);
+ recordSetting.setValueForKey(NSNumber.numberWithInt(channels), 'AVNumberOfChannelsKey');
+
+ errorRef = new interop.Reference();
+
+ const url = NSURL.fileURLWithPath(options.filename);
+
+ this._recorder = (AVAudioRecorder.alloc()).initWithURLSettingsError(url, recordSetting, errorRef);
+ if (errorRef && errorRef.value) {
+ console.error(`initWithURLSettingsError errorRef: ${errorRef.value}, ${errorRef}`);
+ } else {
+ this._recorder.delegate = TNSRecorderDelegate.initWithOwner(this);
+ if (options.metering) {
+ this._recorder.meteringEnabled = true;
+ }
+ if (options.maxDuration) {
+ this._recorder.recordForDuration(options.maxDuration / 1000);
+ } else {
+ this._recorder.prepareToRecord();
+ this._recorder.record();
+ }
+
+ resolve(null);
+ }
+ }
+ });
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ pause(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (this._recorder) {
+ this._recorder.pause();
+ }
+ resolve(null);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ resume(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (this._recorder) {
+ this._recorder.record();
+ }
+ resolve(null);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ stop(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (this._recorder) {
+ this._recorder.stop();
+ }
+ // may need this in future
+ // this._recordingSession.setActiveError(false, null);
+ this._recorder.meteringEnabled = false;
+ resolve(null);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ dispose(): Promise {
+ return new Promise((resolve, reject) => {
+ try {
+ if (this._recorder) {
+ this._recorder.stop();
+ this._recorder.meteringEnabled = false;
+ this._recordingSession.setActiveError(false, null);
+ this._recorder.release();
+ this._recorder = undefined;
+ }
+ resolve(null);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ }
+
+ isRecording() {
+ return this._recorder && this._recorder.recording;
+ }
+
+ getMeters(channel?: number) {
+ if (this._recorder) {
+ if (!this._recorder.meteringEnabled) {
+ this._recorder.meteringEnabled = true;
+ }
+ this._recorder.updateMeters();
+ return this._recorder.averagePowerForChannel(channel);
+ }
+ }
+}
diff --git a/packages/audio/options.ts b/packages/audio/options.ts
new file mode 100644
index 00000000..224dc3d8
--- /dev/null
+++ b/packages/audio/options.ts
@@ -0,0 +1,117 @@
+/**
+ * Provides options for the audio player.
+ */
+export interface AudioPlayerOptions {
+ /**
+ * Gets or sets the audio file url.
+ */
+ audioFile: string;
+
+ /**
+ * Gets or sets the callback when the currently playing audio file completes.
+ * @returns {Object} An object containing the native values for the callback.
+ */
+ completeCallback?: Function;
+
+ /**
+ * Get or sets the player to loop playback.
+ */
+ loop: boolean;
+
+ /**
+ * Prevent autoplay if desired as player autoplays be default
+ */
+ autoPlay?: boolean;
+
+ /**
+ * Enable metering. Off by default.
+ */
+ metering?: boolean;
+
+ audioMixing?: boolean;
+
+ pitch?: number;
+ /**
+ * Gets or sets the callback when an error occurs with the audio player.
+ * @returns {Object} An object containing the native values for the error callback.
+ */
+ errorCallback?: Function;
+
+ /**
+ * Gets or sets the callback to be invoked to communicate some info and/or warning about the media or its playback.
+ * @returns {Object} An object containing the native values for the info callback.
+ */
+ infoCallback?: Function;
+}
+
+export interface AudioRecorderOptions {
+ /**
+ * Gets or sets the recorded file name.
+ */
+ filename: string;
+
+ /**
+ * Sets the source for recording ***ANDROID ONLY for now ***
+ */
+ source?: any;
+
+ /**
+ * Gets or set the max duration of the recording session.
+ * Input in milliseconds, which is Android's format.
+ * Will be converted appropriately for iOS.
+ */
+ maxDuration?: number;
+
+ /**
+ * Enable metering. Off by default.
+ */
+ metering?: boolean;
+
+ /**
+ * Format
+ */
+ format?: any;
+
+ /**
+ * Channels
+ */
+ channels?: any;
+
+ /**
+ * Sampling rate
+ */
+ sampleRate?: any;
+
+ /**
+ * Bit rate
+ */
+ bitRate?: any;
+
+ /**
+ * Encoding
+ */
+ encoder?: any;
+
+ /**
+ * Sets the ios audio quality setting. Options are Min|Low|Medium|High|Max. Set to Medium by default.
+ */
+ iosAudioQuality?: string;
+
+ /**
+ * Gets or sets the callback when an error occurs with the media recorder.
+ * @returns {Object} An object containing the native values for the error callback.
+ */
+ errorCallback?: Function;
+
+ /**
+ * Gets or sets the callback to be invoked to communicate some info and/or warning about the media or its playback.
+ * @returns {Object} An object containing the native values for the info callback.
+ */
+ infoCallback?: Function;
+}
+
+export const AudioPlayerEvents = {
+ seek: 'seek',
+ paused: 'paused',
+ started: 'started',
+};
diff --git a/packages/audio/package.json b/packages/audio/package.json
new file mode 100644
index 00000000..0dd23583
--- /dev/null
+++ b/packages/audio/package.json
@@ -0,0 +1,35 @@
+{
+ "name": "@nativescript/audio",
+ "version": "1.0.0",
+ "description": "Add a plugin description",
+ "main": "index",
+ "typings": "index.d.ts",
+ "nativescript": {
+ "platforms": {
+ "ios": "6.0.0",
+ "android": "6.0.0"
+ }
+ },
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/NativeScript/plugins.git"
+ },
+ "keywords": [
+ "NativeScript",
+ "JavaScript",
+ "TypeScript",
+ "iOS",
+ "Android"
+ ],
+ "author": {
+ "name": "NativeScript",
+ "email": "oss@nativescript.org"
+ },
+ "bugs": {
+ "url": "https://github.com/NativeScript/plugins/issues"
+ },
+ "license": "Apache-2.0",
+ "homepage": "https://github.com/NativeScript/plugins",
+ "readmeFilename": "README.md",
+ "bootstrapper": "@nativescript/plugin-seed"
+}
diff --git a/packages/audio/project.json b/packages/audio/project.json
new file mode 100644
index 00000000..b1af7a02
--- /dev/null
+++ b/packages/audio/project.json
@@ -0,0 +1,64 @@
+{
+ "$schema": "../../node_modules/nx/schemas/project-schema.json",
+ "projectType": "library",
+ "sourceRoot": "packages/audio",
+ "targets": {
+ "build": {
+ "executor": "@nrwl/js:tsc",
+ "options": {
+ "outputPath": "dist/packages/audio",
+ "tsConfig": "packages/audio/tsconfig.json",
+ "packageJson": "packages/audio/package.json",
+ "main": "packages/audio/index.d.ts",
+ "assets": [
+ "packages/audio/*.md",
+ "packages/audio/index.d.ts",
+ "LICENSE",
+ {
+ "glob": "**/*",
+ "input": "packages/audio/platforms/",
+ "output": "./platforms/"
+ }
+ ],
+ "dependsOn": [
+ {
+ "target": "build.all",
+ "projects": "dependencies"
+ }
+ ]
+ }
+ },
+ "build.all": {
+ "executor": "@nrwl/workspace:run-commands",
+ "options": {
+ "commands": ["node tools/scripts/build-finish.ts audio"],
+ "parallel": false
+ },
+ "outputs": ["dist/packages/audio"],
+ "dependsOn": [
+ {
+ "target": "build.all",
+ "projects": "dependencies"
+ },
+ {
+ "target": "build",
+ "projects": "self"
+ }
+ ]
+ },
+ "focus": {
+ "executor": "@nrwl/workspace:run-commands",
+ "options": {
+ "commands": ["nx g @nativescript/plugin-tools:focus-packages audio"],
+ "parallel": false
+ }
+ },
+ "lint": {
+ "executor": "@nrwl/linter:eslint",
+ "options": {
+ "lintFilePatterns": ["packages/audio/**/*.ts"]
+ }
+ }
+ },
+ "tags": []
+}
diff --git a/packages/audio/references.d.ts b/packages/audio/references.d.ts
new file mode 100644
index 00000000..22bac92c
--- /dev/null
+++ b/packages/audio/references.d.ts
@@ -0,0 +1 @@
+///
diff --git a/packages/audio/tsconfig.json b/packages/audio/tsconfig.json
new file mode 100644
index 00000000..52cfe522
--- /dev/null
+++ b/packages/audio/tsconfig.json
@@ -0,0 +1,9 @@
+{
+ "extends": "../../tsconfig.base.json",
+ "compilerOptions": {
+ "outDir": "../../dist/out-tsc",
+ "rootDir": "."
+ },
+ "exclude": ["**/*.spec.ts", "**/*.test.ts", "angular"],
+ "include": ["**/*.ts", "references.d.ts"]
+}
diff --git a/tools/demo/audio/index.ts b/tools/demo/audio/index.ts
new file mode 100644
index 00000000..825f72db
--- /dev/null
+++ b/tools/demo/audio/index.ts
@@ -0,0 +1,201 @@
+import { DemoSharedBase } from '../utils';
+import { AudioRecorderOptions, TNSPlayer, TNSRecorder } from '@nativescript/audio';
+import { Application, Dialogs, isAndroid, isIOS, knownFolders } from '@nativescript/core';
+
+export class DemoSharedAudio extends DemoSharedBase {
+ private _player: TNSPlayer;
+ private _recorder: TNSRecorder;
+ public isPlaying: boolean = false;
+ private _meterInterval: any;
+ public audioMeter: string;
+
+ public currentTime: any = '0';
+ private _timeInterval: any;
+ private _recordingTimerInterval: any;
+ public isRecording: boolean;
+ public recordingTimer: any;
+ public prettyTime: string;
+
+ constructor() {
+ super();
+ this._player = new TNSPlayer();
+ this._player.initFromUrl({
+ audioFile: 'https://davecoffin.com/assets/thistooshallpass.mp3',
+ loop: false,
+ audioMixing: true,
+ autoPlay: false,
+ completeCallback: (data) => {
+ console.log('song is done.');
+ },
+ errorCallback: (error) => {
+ console.log('error playing track', error);
+ },
+ });
+
+ this._recorder = new TNSRecorder();
+ this._recorder.debug = true; // set true for tns_recorder logs
+ }
+
+ play() {
+ this._player
+ .play()
+ .then(() => {
+ this.set('isPlaying', true);
+ this._initTimer();
+ })
+ .catch((error) => console.log(error));
+ }
+
+ pause() {
+ this._player.pause();
+ this.set('isPlaying', false);
+ this.isPlaying = false;
+ }
+
+ seek() {
+ this._player.seekTo(60).then(() => {
+ this.play();
+ });
+ }
+ startOver() {
+ this._player.seekTo(0).then(() => this.play());
+ }
+
+ private resetTimer() {
+ clearInterval(this._timeInterval);
+ this._timeInterval = undefined;
+ }
+
+ private _initTimer() {
+ this.resetTimer();
+ this._timeInterval = setInterval(() => {
+ this.set('currentTime', this._player.currentTime.toFixed(0));
+ console.log(this.currentTime);
+ }, 1000);
+ }
+
+ public filename: string;
+ public recordingPath: string;
+ public record() {
+ if (!TNSRecorder.CAN_RECORD()) {
+ Dialogs.alert('This device cannot record audio.');
+ return;
+ }
+ let audioFolder;
+ if (isIOS) {
+ audioFolder = knownFolders.documents().getFolder('audio');
+ } else {
+ audioFolder = knownFolders.currentApp().getFolder('audio');
+ }
+ let androidFormat;
+ let androidEncoder;
+ if (isAndroid) {
+ androidFormat = android.media.AudioFormat.ENCODING_PCM_16BIT;
+ androidEncoder = android.media.MediaRecorder.AudioEncoder.AAC;
+ }
+ this.filename = 'foofile.' + this.platformExtension();
+ this.recordingPath = `${audioFolder.path}/${this.filename}`;
+ let bitDepth = 16;
+ let sampleRate = 44100;
+ let bitRate = sampleRate * bitDepth;
+ const recorderOptions: AudioRecorderOptions = {
+ filename: this.recordingPath,
+ format: androidFormat,
+ encoder: androidEncoder,
+ sampleRate: sampleRate,
+ bitRate: bitRate,
+ metering: true,
+ infoCallback: (infoObject) => {
+ console.log(JSON.stringify(infoObject));
+ },
+ errorCallback: (errorObject) => {
+ console.log(JSON.stringify(errorObject));
+ },
+ };
+ this._recorder
+ .start(recorderOptions)
+ .then(() => {
+ this._initRecordingTimer();
+ this.isRecording = true;
+ if (recorderOptions.metering) {
+ this._initMeter();
+ }
+ })
+ .catch((error) => {
+ this.isRecording = false;
+ this._resetMeter();
+ Dialogs.alert(error);
+ });
+ }
+
+ private platformExtension() {
+ // 'mp3'
+ return `${Application.android ? 'm4a' : 'caf'}`;
+ }
+
+ private _initRecordingTimer() {
+ this._resetRecordingTimer();
+ this._recordingTimerInterval = setInterval(() => {
+ this.recordingTimer += 1;
+ this.prettyTime = this.hhmmss(this.recordingTimer);
+ }, 1000);
+ }
+
+ private _resetRecordingTimer() {
+ if (this._recordingTimerInterval) {
+ this.recordingTimer = 0;
+ this.prettyTime = '0:00';
+ clearInterval(this._recordingTimerInterval);
+ this._recordingTimerInterval = undefined;
+ }
+ }
+
+ private _resetMeter() {
+ if (this._meterInterval) {
+ this.set('audioMeter', '0');
+ clearInterval(this._meterInterval);
+ this._meterInterval = undefined;
+ }
+ }
+
+ private _initMeter() {
+ this._resetMeter();
+ this._meterInterval = setInterval(() => {
+ // this.audioMeter = this._player.
+ // if (isIOS) {
+ // this.handleMeterUI(this.audioMeter+200)
+ // } else {
+ // let db = (20 * Math.log10(parseInt(this.audioMeter) / .1));
+ // let percentage = db + 85;
+ // this.handleMeterUI(percentage)
+ // }
+ }, 150);
+ }
+
+ public hhmmss(secs, pretty = false) {
+ if (secs) {
+ if (typeof secs === 'string') secs = parseInt(secs);
+ secs = secs.toFixed(0);
+ var minutes: any = Math.floor(secs / 60);
+ secs = secs % 60;
+ var hours = Math.floor(minutes / 60);
+ minutes = minutes % 60;
+ if (pretty) {
+ if (hours) {
+ return hours + 'h ' + minutes + 'm';
+ } else {
+ return minutes ? minutes + 'm' : '<1m';
+ }
+ } else {
+ if (hours) {
+ if (minutes < 10) minutes = '0' + minutes;
+ return `${hours}:${minutes}:${('0' + secs).slice(-2)}`;
+ } else {
+ return `${minutes}:${('0' + secs).slice(-2)}`;
+ }
+ }
+ } else {
+ return '0s';
+ }
+ }
+}
diff --git a/tools/demo/index.ts b/tools/demo/index.ts
index e2688505..1aa78691 100644
--- a/tools/demo/index.ts
+++ b/tools/demo/index.ts
@@ -2,6 +2,7 @@ export * from './utils';
export * from './animated-circle';
export * from './appavailability';
export * from './apple-sign-in';
+export * from './audio';
export * from './auto-fit-text';
export * from './background-http';
export * from './biometrics';
diff --git a/tools/workspace-scripts.js b/tools/workspace-scripts.js
index 8c85a7b5..15ce49e1 100644
--- a/tools/workspace-scripts.js
+++ b/tools/workspace-scripts.js
@@ -293,12 +293,19 @@ module.exports = {
},
},
// @nativescript/pdf
- 'pdf': {
+ pdf: {
build: {
script: 'nx run pdf:build.all',
description: '@nativescript/pdf: Build',
},
},
+ // @nativescript/audio
+ audio: {
+ build: {
+ script: 'nx run audio:build.all',
+ description: '@nativescript/audio: Build',
+ },
+ },
'build-all': {
script: 'nx run-many --target=build.all --all',
description: 'Build all packages',
@@ -441,10 +448,14 @@ module.exports = {
script: 'nx run haptics:focus',
description: 'Focus on @nativescript/haptics',
},
- 'pdf': {
+ pdf: {
script: 'nx run pdf:focus',
description: 'Focus on @nativescript/pdf',
},
+ audio: {
+ script: 'nx run audio:focus',
+ description: 'Focus on @nativescript/audio',
+ },
reset: {
script: 'nx g @nativescript/plugin-tools:focus-packages',
description: 'Reset Focus',
diff --git a/tsconfig.base.json b/tsconfig.base.json
index b890d8e3..b20b1baf 100644
--- a/tsconfig.base.json
+++ b/tsconfig.base.json
@@ -60,7 +60,8 @@
"@nativescript/social-share": ["packages/social-share/index.d.ts"],
"@nativescript/theme-switcher": ["packages/theme-switcher/index.ts"],
"@nativescript/twitter": ["packages/twitter/index.d.ts"],
- "@nativescript/zip": ["packages/zip/index.d.ts"]
+ "@nativescript/zip": ["packages/zip/index.d.ts"],
+ "@nativescript/audio": ["packages/audio/index.d.ts"]
}
},
"exclude": ["node_modules", "tmp"]
diff --git a/workspace.json b/workspace.json
index 5319d47e..f0f2bab3 100644
--- a/workspace.json
+++ b/workspace.json
@@ -36,7 +36,8 @@
"social-share": "packages/social-share",
"theme-switcher": "packages/theme-switcher",
"twitter": "packages/twitter",
- "zip": "packages/zip"
+ "zip": "packages/zip",
+ "audio": "packages/audio"
},
"$schema": "./node_modules/nx/schemas/workspace-schema.json"
}