jiuyiUniapp/jiuyi/node_modules/@agora-js/media/rtc-sdk-media_en.d.ts

3442 lines
137 KiB
TypeScript
Raw Normal View History

2024-12-18 15:46:27 +08:00
import { AgoraRTCError } from '@agora-js/shared';
import { CheckVisibleResult } from '@agora-js/shared';
import { ElectronDesktopCapturerSource } from '@agora-js/shared';
import { EventEmitter } from '@agora-js/shared';
import { IAudioProcessor } from 'agora-rte-extension';
import type { IAudioProcessorContext } from 'agora-rte-extension';
import { IBaseProcessor } from 'agora-rte-extension';
import type { IProcessorContext } from 'agora-rte-extension';
import { isElectron } from '@agora-js/shared';
import type { Kind } from 'agora-rte-extension';
import type { NetworkQuality } from '@agora-js/shared';
import type { ProcessorStats } from 'agora-rte-extension';
import { PromiseMutex } from '@agora-js/shared';
import { RequiredOnlyOneOf } from '@agora-js/shared';
import { SDKStore } from '@agora-js/shared';
import { UID } from '@agora-js/shared';
import { Usage } from 'agora-rte-extension';
import type { UsageWithDirection } from 'agora-rte-extension';
export declare const __TRACK_LIST__: Track[];
export declare function addTrack(track: Track): void;
export declare interface AgoraRTCCompatibility {
getDisplayMedia: boolean;
getStreamFromExtension: boolean;
supportUnifiedPlan: boolean;
supportMinBitrate: boolean;
supportSetRtpSenderParameters: boolean;
supportDualStream: boolean;
webAudioMediaStreamDest: boolean;
supportReplaceTrack: boolean;
supportWebGL: boolean;
webAudioWithAEC: boolean;
supportRequestFrame: boolean;
supportShareAudio: boolean;
supportDualStreamEncoding: boolean;
supportDataChannel: boolean;
supportPCSetConfiguration: boolean;
supportWebRTCEncodedTransform: boolean;
supportWebRTCInsertableStream: boolean;
supportRequestVideoFrameCallback: boolean;
supportWebCrypto: boolean;
}
declare class AgoraRTCPlayer extends VideoPlayer {
private container?;
private slot;
constructor(config: PlayerConfig);
updateConfig(config: PlayerConfig): void;
updateVideoTrack(track?: MediaStreamTrack): void;
play(sessionId?: string): void;
getCurrentFrame(): ImageData;
getCurrentFrameToUint8Array(type: string, quality?: number): Promise<ImageTypedData>;
destroy(): void;
private createElements;
private mountedVideoElement;
private unmountedVideoElement;
protected resetVideoElement(): void;
getContainerElement(): HTMLDivElement | undefined;
}
/**
* Statistics of the call, which can be retrieved by calling [AgoraRTCClient.getRTCStats]{@link IAgoraRTCClient.getRTCStats}.
*/
export declare interface AgoraRTCStats {
/**
* Call duration in seconds.
*/
Duration: number;
/**
* The total bitrate (bps) of the received audio and video, represented by an instantaneous value.
*/
RecvBitrate: number;
/**
* The total number of bytes received, represented by an aggregate value.
*/
RecvBytes: number;
/**
* The total bitrate (bps) of the sent audio and video, represented by an instantaneous value.
*/
SendBitrate: number;
/**
* The total number of bytes sent, represented by an aggregate value.
*/
SendBytes: number;
/**
* The number of users in the channel.
*
* - Communication profile: The number of users in the channel.
* - Live Broadcast profile:
* - If the local user is an audience: The number of users in the channel = The number of hosts in the channel + 1.
* - If the local user is a host: The number of users in the channel = The number of hosts in the channel.
*/
UserCount: number;
/**
* RTT (Round-Trip Time) between the SDK and Agora's edge server, in ms.
*/
RTT: number;
/**
* The estimated bandwidth (Kbps) of the uplink network.
*/
OutgoingAvailableBandwidth: number;
}
export declare enum AUDIO_CONTEXT_EVENT {
IOS_15_16_INTERRUPTION_START = "ios15_16-interruption-start",
IOS_15_16_INTERRUPTION_END = "ios15_16-interruption-end",
IOS_INTERRUPTION_START = "ios-interruption-start",
IOS_INTERRUPTION_END = "ios-interruption-end",
STATE_CHANGE = "state-change"
}
/**
* @ignore
*/
export declare const AUDIO_ENCODER_CONFIG_SETTINGS: {
speech_low_quality: AudioEncoderConfiguration;
speech_standard: AudioEncoderConfiguration;
music_standard: AudioEncoderConfiguration;
standard_stereo: AudioEncoderConfiguration;
high_quality: AudioEncoderConfiguration;
high_quality_stereo: AudioEncoderConfiguration;
};
export declare enum AUDIO_TRACK_EVENT {
UPDATE_TRACK_SOURCE = "update-track-source"
}
declare class AudioBufferSource extends AudioSource {
private audioBuffer;
protected sourceNode?: AudioBufferSourceNode;
private startPlayTime;
private startPlayOffset;
private pausePlayTime;
private options;
private currentLoopCount;
private currentPlaybackSpeed;
set currentState(state: AudioSourceState);
get currentState(): AudioSourceState;
private _currentState;
constructor(buffer: AudioBuffer, options?: AudioSourceOptions);
createWebAudioDiagram(): GainNode;
get duration(): number;
get playbackSpeed(): number;
get currentTime(): number;
updateOptions(options: AudioSourceOptions): void;
startProcessAudioBuffer(): void;
pauseProcessAudioBuffer(): void;
seekAudioBuffer(time: number): void;
resumeProcessAudioBuffer(): void;
stopProcessAudioBuffer(): void;
destroy(): void;
setAudioBufferPlaybackSpeed(speed: number): void;
private startSourceNode;
private createSourceNode;
private handleSourceNodeEnded;
private reset;
}
export declare const audioContextState: AudioState;
declare class AudioElementPlayCenter {
onAutoplayFailed?: () => void;
private elementMap;
private elementStateMap;
private elementsNeedToResume;
private sinkIdMap;
constructor();
setSinkID(trackId: string, deviceID: string): Promise<void>;
play(track: MediaStreamTrack, trackId: string, volume: number, sessionId?: string): void;
updateTrack(trackId: string, track: MediaStreamTrack): void;
isPlaying(trackId: string): boolean;
setVolume(trackId: string, volume: number): void;
stop(trackId: string): void;
private bindAudioElementEvents;
getPlayerState(trackId: string): string;
private autoResumeAudioElement;
autoResumeAfterInterruption: (force?: boolean) => void;
private autoResumeAfterInterruptionOnIOS15_16;
}
export declare const audioElementPlayCenter: AudioElementPlayCenter;
/**
*
* `AudioEncoderConfiguration` is the interface that defines the audio encoder configurations.
*
* You can customize the audio encoder configurations when calling [AgoraRTC.createCustomAudioTrack]{@link IAgoraRTC.createCustomAudioTrack}, [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack} or [AgoraRTC.createBufferSourceAudioTrack]{@link IAgoraRTC.createBufferSourceAudioTrack}.
*/
export declare interface AudioEncoderConfiguration {
/**
* Sample rate of the audio (Hz).
*/
sampleRate?: number;
/**
* Sample size of the audio.
*/
sampleSize?: number;
/**
* Whether to enable stereo.
*/
stereo?: boolean;
/**
* Bitrate of the audio (Kbps).
*/
bitrate?: number;
}
/**
* The preset audio encoder configurations.
*
* You can pass the preset video encoder configurations when calling the following methods:
* - [AgoraRTC.createCustomAudioTrack]{@link IAgoraRTC.createCustomAudioTrack}
* - [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack}
* - [AgoraRTC.createBufferSourceAudioTrack]{@link IAgoraRTC.createBufferSourceAudioTrack}
*
* The following table lists all the preset audio profiles. The SDK uses `"music_standard"` by default.
*
* | Audio Profile | Configurations |
* | -------- | --------------- |
* |`"speech_low_quality"`|Sample rate 16 kHz, mono, encoding bitrate 24 Kbps|
* |`"speech_standard"`|Sample rate 32 kHz, mono, encoding bitrate 24 Kbps|
* |`"music_standard"`|Sample rate 48 kHz, mono, encoding bitrate 32 Kbps|
* |`"standard_stereo"`|Sample rate 48 kHz, stereo, encoding bitrate 64 Kbps|
* |`"high_quality"`|Sample rate 48 kHz, mono, encoding bitrate 128 Kbps|
* |`"high_quality_stereo"`|Sample rate 48 kHz, stereo, encoding bitrate 192 Kbps| Kbps.
* @public
*/
export declare type AudioEncoderConfigurationPreset = keyof typeof AUDIO_ENCODER_CONFIG_SETTINGS;
export declare interface AudioPlaybackOptions {
origin?: boolean;
mixing?: boolean;
effect?: boolean;
}
export declare class AudioProcessorContext extends EventEmitter implements IAudioProcessorContext {
private constraintsMap;
private statsRegistry;
private readonly audioContext;
private readonly trackId;
private readonly direction;
private usageRegistry;
private _chained;
set chained(chained: boolean);
get chained(): boolean;
constructor(audioContext: AudioContext, trackId: string, direction: "local" | "remote");
getConstraints(): Promise<MediaTrackConstraints>;
getAudioContext(): AudioContext;
requestApplyConstraints(constraints: MediaTrackConstraints, processor: IBaseProcessor): Promise<void>;
requestRevertConstraints(processor: IBaseProcessor): Promise<void>;
registerStats(processor: IBaseProcessor, type: string, cb: () => any): void;
unregisterStats(processor: IBaseProcessor, type: string): void;
gatherStats(): ProcessorStats[];
registerUsage(processor: IBaseProcessor, cb: () => Usage): void;
unregisterUsage(processor: IBaseProcessor): void;
gatherUsage(): Promise<UsageWithDirection[]>;
getDirection(): "local" | "remote";
}
export declare class AudioProcessorDestination extends EventEmitter implements IAudioProcessor {
name: string;
ID: string;
private inputTrack?;
private inputNode?;
private readonly audioProcessorContext;
_source?: IAudioProcessor;
constructor(audioProcessorContext: AudioProcessorContext);
get kind(): Kind;
get enabled(): boolean;
pipe(): IAudioProcessor;
unpipe(): void;
enable(): void;
disable(): void;
reset(): void;
updateInput(inputOptions: {
track?: MediaStreamTrack;
node?: AudioNode;
context: IAudioProcessorContext;
}): void;
}
declare abstract class AudioSource extends EventEmitter {
outputNode: GainNode;
outputTrack?: MediaStreamTrack;
isPlayed: boolean;
protected abstract sourceNode?: AudioNode;
context: AudioContext;
private audioBufferNode?;
private destNode?;
private audioOutputLevel;
protected volumeLevelAnalyser: VolumeLevelAnalyser;
private _processedNode;
get processSourceNode(): AudioNode | undefined;
set processedNode(node: AudioNode | undefined);
get processedNode(): AudioNode | undefined;
protected playNode: AudioNode;
protected isDestroyed: boolean;
protected onNoAudioInput?: () => void;
protected isNoAudioInput: boolean;
private _noAudioInputCount;
constructor();
startGetAudioBuffer(bufferSize: number): void;
stopGetAudioBuffer(): void;
createOutputTrack(): MediaStreamTrack;
play(dest?: AudioNode): void;
stop(): void;
getAccurateVolumeLevel(): number;
checkHasAudioInput(times?: number): Promise<boolean>;
getAudioVolume(): number;
setVolume(level: number): void;
destroy(): void;
protected disconnect(): void;
protected connect(): void;
}
export declare enum AudioSourceEvents {
AUDIO_SOURCE_STATE_CHANGE = "audio_source_state_change",
RECEIVE_TRACK_BUFFER = "receive_track_buffer",
ON_AUDIO_BUFFER = "on_audio_buffer",
UPDATE_SOURCE = "update_source"
}
/**
* Options for processing the audio buffer. You need to set the options for processing the audio buffer when calling [startProcessAudioBuffer]{@link IBufferSourceAudioTrack.startProcessAudioBuffer}.
*/
export declare interface AudioSourceOptions {
/**
* How many times the audio loops.
*/
cycle?: number;
/**
* Whether to loop the audio infinitely.
*/
loop?: boolean;
/**
* The playback position (seconds).
*/
startPlayTime?: number;
}
/**
* Processing state of the audio buffer:
* - `"stopped"`: The SDK stops processing the audio buffer. Reasons may include:
* - The SDK finishes processing the audio buffer.
* - The user manually stops the processing of the audio buffer.
* - `"playing"`: The SDK is processing the audio buffer.
* - `"paused"`: The SDK pauses processing the audio buffer.
*
* You can get the state with [BufferSourceAudioTrack.on("source-state-change")]{@link IBufferSourceAudioTrack.event_source_state_change}.
*/
export declare type AudioSourceState = "stopped" | "playing" | "paused";
declare class AudioState extends EventEmitter {
prevState: AudioContextState | "interrupted" | undefined;
curState: AudioContextState | "interrupted" | undefined;
currentTime?: number;
currentTimeStuckAt?: number;
private interruptDetectorTrack?;
get duringInterruption(): boolean;
private onLocalAudioTrackMute;
private onLocalAudioTrackUnmute;
bindInterruptDetectorTrack(track: MicrophoneAudioTrack): void;
unbindInterruptDetectorTrack(track: MicrophoneAudioTrack): void;
}
export declare function audioTimerLoop(callback: (time: number) => any, frequency: number): () => void;
declare class AudioTrackSource extends AudioSource {
protected sourceNode: MediaStreamAudioSourceNode;
track: MediaStreamTrack;
clonedTrack?: MediaStreamTrack;
private audioElement;
private isCurrentTrackCloned;
private isRemoteTrack;
private originVolumeLevelAnalyser?;
get isFreeze(): boolean;
constructor(track: MediaStreamTrack, isRemoteTrack?: boolean, originTrack?: MediaStreamTrack);
private rebuildWebAudio;
updateTrack(track: MediaStreamTrack): void;
destroy(): void;
createMediaStreamSourceNode(track: MediaStreamTrack): MediaStreamAudioSourceNode;
updateOriginTrack(originTrack: MediaStreamTrack): void;
getOriginVolumeLevel(): number;
}
declare class AutoPlayGestureEventEmitter extends EventEmitter {
onAutoplayFailed?: () => void;
onAudioAutoplayFailed?: () => void;
}
export declare const autoPlayGestureEventEmitter: AutoPlayGestureEventEmitter;
/**
* @ignore
*
* Image enhancement options. You need to set the image enhancement options when calling [setBeautyEffect]{@link ILocalVideoTrack.setBeautyEffect}.
*/
export declare interface BeautyEffectOptions {
/**
*
* The smoothness level.
*
* The value range is [0.0, 1.0]. The original smoothness level is 0.0. The default value is 0.5. This parameter is usually used to remove blemishes.
*/
smoothnessLevel?: number;
/**
* The brightness level.
*
* The value range is [0.0, 1.0]. The original brightness level is 0.0. The default value is 0.7.
*/
lighteningLevel?: number;
/**
* The redness level.
*
* The value range is [0.0, 1.0]. The original redness level is 0.0. The default value is 0.1. This parameter adjusts the red saturation level.
*/
rednessLevel?: number;
/**
* The contrast level. Use this together with {@link lighteningLevel}.
* - 0: Low contrast level.
* - 1: (Default) The original contrast level.
* - 2: High contrast level.
*/
lighteningContrastLevel?: 0 | 1 | 2;
}
export declare interface BeautyWebGLParameters {
denoiseLevel?: number;
lightLevel?: number;
rednessLevel?: number;
lighteningContrastLevel?: number;
}
export declare const blob2Uint8Array: (blob: Blob) => Promise<Uint8Array>;
export declare class BufferSourceAudioTrack extends LocalAudioTrack implements IBufferSourceAudioTrack {
source: string | File | AudioBuffer | null;
private _bufferSource;
get __className__(): string;
constructor(source: string | File | AudioBuffer, bufferSource: AudioBufferSource, encodingConfig?: AudioEncoderConfiguration, trackId?: string);
get currentState(): AudioSourceState;
get duration(): number;
get playbackSpeed(): number;
getCurrentTime(): number;
startProcessAudioBuffer(options?: AudioSourceOptions): void;
pauseProcessAudioBuffer(): void;
seekAudioBuffer(time: number): void;
resumeProcessAudioBuffer(): void;
stopProcessAudioBuffer(): void;
close(): void;
setAudioBufferPlaybackSpeed(speed: number): void;
}
/**
* Configurations for the audio track from an audio file or `AudioBuffer` object. Set these configurations when calling [AgoraRTC.createBufferSourceAudioTrack]{@link IAgoraRTC.createBufferSourceAudioTrack}.
*/
export declare interface BufferSourceAudioTrackInitConfig {
/**
* The type of the audio source:
* - `File`: An [File](https://developer.mozilla.org/en-US/docs/Web/API/File) object, representing a local audio file.
* - `string`: The online audio file retrieved from an HTTPS address. Ensure the address supports HTTPS and CORS.
* - `AudioBuffer`: An [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer) object, representing the raw data in PCM format.
*/
source: File | string | AudioBuffer;
/**
* Whether to cache the online file:
* - `true`: Cache the online file.
* - `false`: (default) Do not cache the online file.
*/
cacheOnlineFile?: boolean;
/**
* The audio encoder configurations.
*
* You can set the audio encoder configurations in either of the following ways:
* - Pass the preset audio encoder configurations by using [[AudioEncoderConfigurationPreset]].
* - Pass your customized audio encoder configurations by using [[AudioEncoderConfiguration]].
*
* > Firefox does not support setting the audio encoding rate.
*/
encoderConfig?: AudioEncoderConfiguration | AudioEncoderConfigurationPreset;
}
export declare class CameraVideoTrack extends LocalVideoTrack implements ICameraVideoTrack {
private _config;
private _originalConstraints;
private _constraints;
_enabled: boolean;
_deviceName: string;
get __className__(): string;
constructor(track: MediaStreamTrack, config: CameraVideoTrackInitConfig, constraints: MediaTrackConstraints, scalabilityConfig?: SVCConfiguration, optimizationMode?: OptimizationMode | "balanced", trackId?: string);
setDevice(deviceId: string | RequiredOnlyOneOf<{
facingMode: VideoFacingModeEnum;
deviceId: string;
}>): Promise<void>;
private _setDeviceById;
private _setDeviceByFacingModel;
setEnabled(enabled: boolean, skipChangeState?: boolean): Promise<void>;
setEncoderConfiguration(config: VideoEncoderConfiguration | VideoEncoderConfigurationPreset, doNotRenegoation?: boolean): Promise<void>;
protected _getDefaultPlayerConfig(): Partial<PlayerConfig>;
protected onTrackEnded(): void;
renewMediaStreamTrack(newConstraints?: MediaTrackConstraints): Promise<void>;
tryResumeVideoForIOS15_16WeChat: () => Promise<void>;
close(): void;
clone(config?: VideoEncoderConfiguration | VideoEncoderConfigurationPreset, cloneTrack?: boolean): CameraVideoTrack;
bindProcessorContextEvents(): void;
}
/**
* Configurations for the video track from the video captured by a camera. Set these configurations when calling [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}.
*/
export declare interface CameraVideoTrackInitConfig {
/**
* The video encoder configurations.
*
* You can set the video encoder configurations in either of the following ways:
* - Pass the preset video encoder configurations by using [[VideoEncoderConfigurationPreset]].
* - Pass your customized video encoder configurations by using [[VideoEncoderConfiguration]].
* - Leave this property empty to use the SDK's default value, `"480p_1"` (resolution: 640 × 480, frame rate: 15 fps, bitrate: 500 Kbps).
*/
encoderConfig?: VideoEncoderConfiguration | VideoEncoderConfigurationPreset;
/**
* Whether to user the front camera or the rear camera.
*
* You can use this parameter to choose between the front camera and the rear camera on a mobile device:
* - `"user"`: The front camera.
* - `"environment"`: The rear camera.
*/
facingMode?: VideoFacingModeEnum;
/**
* Specifies the camera ID.
*
* You can get a list of the available cameras by calling [AgoraRTC.getCameras]{@link IAgoraRTC.getCameras}.
*/
cameraId?: string;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.2.0*
*
* Sets the video transmission optimization mode.
*
* You can call this method during a video call, a live streaming or screen sharing to dynamically change the optimization mode. For example, during the screen sharing, before you change the shared content from text to video, you can change the optimization mode from `"detail"` to `"motion"` to ensure smoothness in poor network conditions.
*
* > Note: This method supports Chrome only.
*
* @param mode The video transmission optimization mode:
* - `"detail"`: Prioritizes video quality.
* - The SDK ensures high-quality images by automatically calculating a minimum bitrate based on the capturing resolution and frame rate. No matter how poor the network condition is, the sending bitrate will never be lower than the minimum value.
* - In most cases, the SDK does not reduce the sending resolution, but may reduce the frame rate.
* - `"motion"`: Since 4.21.0, the SDK prioritizes video smoothness.
* - In poor network conditions, the SDK reduces the sending bitrate to minimize video freezes.
* - In most cases, the SDK does not reduce the frame rate, but may reduce the sending resolution.
*/
optimizationMode?: OptimizationMode;
/**
* @ignore
*
* @since
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* Configurations for Scalable Video Coding (SVC).
*
* You can set the configurations using one of the following options:
* - Use the preset SVC configurations provided by the SDK through {@link SVCConfigurationPreset}.
* - Use your custom SVC configurations through {@link SVCConfiguration}.
*/
scalabiltyMode?: SVCConfiguration | SVCConfigurationPreset;
}
export declare function checkMediaStreamTrackResolution(track: MediaStreamTrack): Promise<[number, number]>;
export declare function checkTrackState(): (target: any, propertyKey: any, descriptor: PropertyDescriptor) => TypedPropertyDescriptor<any> | undefined;
/**
* The visibility of the `<video>` tag.
*
* Get the visibility of the `<video>` tag through the following methods and events:
* - [localVideoTrack.on("video-element-visible-status")]{@link ILocalVideoTrack.event_video_element_visible_status}
* - [localVideoTrack.getVideoElementVisibleStatus]{@link ILocalVideoTrack.getVideoElementVisibleStatus}
* - [remoteVideoTrack.on("video-element-visible-status")]{@link IRemoteVideoTrack.event_video_element_visible_status_2}
* - [remoteVideoTrack.getVideoElementVisibleStatus]{@link IRemoteVideoTrack.getVideoElementVisibleStatus}
*
* This object contains the following parameters:
* - `visible`: Boolean, whether the `<video>` tag is visible or not.
* - `reason`: This parameter is only valid when `visible` is `false`, which indicates the reason:
* - `"SIZE"`: The size of `<video>` is too small to see.
* - `"STYLE"`: The CSS styles of `<video>` or its ancestor element cause `<video>` to be invisible.
* - `"POSITION"`: The `<video>` tag or its ancestor element are positioned outside the viewport.
* - `"COVERED"`: The `<video>` tag is covered by other elements.
*/
export declare type CheckVideoVisibleResult = CheckVisibleResult;
/**
* Specifies a constraint for a property, such as the resolution or bitrate for video capture in [[VideoEncoderConfiguration]].
*/
export declare interface ConstrainLong {
/**
* The lower limit of the property.
*/
min?: number;
/**
* The upper limit of the property.
*/
max?: number;
/**
* An ideal value of a property. If the video capture device cannot output this value, it outputs the closest value instead.
*/
ideal?: number;
/**
* A required value of a property. If the video capture device cannot output this value, the video capture fails.
*/
exact?: number;
}
export declare function createBufferSourceAudioTrack(config: BufferSourceAudioTrackInitConfig): Promise<BufferSourceAudioTrack>;
export declare function createCameraVideoTrack(config?: CameraVideoTrackInitConfig): Promise<CameraVideoTrack>;
export declare function createCustomAudioTrack(config: CustomAudioTrackInitConfig): LocalAudioTrack;
export declare function createCustomVideoTrack(config: CustomVideoTrackInitConfig): LocalVideoTrack;
export declare function createMicrophoneAndCameraTracks(audioConfig?: MicrophoneAudioTrackInitConfig, videoConfig?: CameraVideoTrackInitConfig): Promise<[MicrophoneAudioTrack, CameraVideoTrack]>;
export declare function createMicrophoneAudioTrack(config?: MicrophoneAudioTrackInitConfig): Promise<MicrophoneAudioTrack>;
export declare function createMixingAudioTrack(trackList: LocalAudioTrack[]): MixingAudioTrack;
export declare function createScreenVideoTrack(config: ScreenVideoTrackInitConfig, withAudio: "enable"): Promise<[LocalVideoTrack, LocalAudioTrack]>;
export declare function createScreenVideoTrack(config?: ScreenVideoTrackInitConfig, withAudio?: "disable"): Promise<LocalVideoTrack>;
export declare function createScreenVideoTrack(config: ScreenVideoTrackInitConfig, withAudio: "auto"): Promise<[LocalVideoTrack, LocalAudioTrack] | LocalVideoTrack>;
/**
* Configurations for the custom audio track. Set these configurations when calling [AgoraRTC.createCustomAudioTrack]{@link IAgoraRTC.createCustomAudioTrack}.
*/
export declare interface CustomAudioTrackInitConfig {
/**
* Your [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
*/
mediaStreamTrack: MediaStreamTrack;
/**
* The audio encoder configurations.
*
* You can set the audio encoder configurations in either of the following ways:
* - Pass the preset audio encoder configurations by using [[AudioEncoderConfigurationPreset]].
* - Pass your customized audio encoder configurations by using [[AudioEncoderConfiguration]].
*
* > Firefox does not support setting the audio encoding rate.
*/
encoderConfig?: AudioEncoderConfiguration | AudioEncoderConfigurationPreset;
}
/**
* Configurations for the custom video track. Set these configurations when calling [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack}.
*/
export declare interface CustomVideoTrackInitConfig {
/**
* Your [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
*/
mediaStreamTrack: MediaStreamTrack;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* Width of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
width?: number | ConstrainLong;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* Height of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
height?: number | ConstrainLong;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* Frame rate of the video (fps).
*
* You can pass a `number`, or a constraint such as `{ max: 30, min: 5 }`.
*
* For details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
frameRate?: number | ConstrainLong;
/**
* The minimum bitrate of sending the video track (Kbps).
*/
bitrateMin?: number;
/**
* The maximum bitrate of sending the video track (Kbps).
*/
bitrateMax?: number;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.2.0*
*
* Sets the video transmission optimization mode.
*
* You can call this method during a video call, a live streaming or screen sharing to dynamically change the optimization mode. For example, during the screen sharing, before you change the shared content from text to video, you can change the optimization mode from `"detail"` to `"motion"` to ensure smoothness in poor network conditions.
*
* > Note: This method supports Chrome only.
*
* @param mode The video transmission optimization mode:
* - `"balanced"`: Uses the default optimization mode.
* - For a screen-sharing video track, the default transmission optimization strategy is to prioritizes clarity.
* - For the other types of video tracks, the SDK may reduce the frame rate or the sending resolution in poor network conditions.
* - `"detail"`: Prioritizes video quality.
* - The SDK ensures high-quality images by automatically calculating a minimum bitrate based on the capturing resolution and frame rate. No matter how poor the network condition is, the sending bitrate will never be lower than the minimum value.
* - In most cases, the SDK does not reduce the sending resolution, but may reduce the frame rate.
* - `"motion"`: Since 4.21.0, the SDK prioritizes video smoothness.
* - In poor network conditions, the SDK reduces the sending bitrate to minimize video freezes.
* - In most cases, the SDK does not reduce the frame rate, but may reduce the sending resolution.
*/
optimizationMode?: "motion" | "detail";
/**
* @ignore
*
* @since
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* Configurations for Scalable Video Coding (SVC).
*
* You can set `scalabiltyMode` using one of the following options:
* - Use the preset SVC configurations provided by the SDK through {@link SVCConfigurationPreset}.
* - Use your custom SVC configurations through {@link SVCConfiguration}.
*/
scalabiltyMode?: SVCConfiguration | SVCConfigurationPreset;
}
export declare abstract class DataChannel extends EventEmitter implements IDataChannel {
readonly trackMediaType: TrackMediaType.DATA;
private _version;
private _type;
_config: IDataChannelConfig;
_originDataChannel?: RTCDataChannel;
protected _dataStreamPacketHeader: ArrayBuffer;
protected _dataStreamPacketHandler: IDataStream;
private _datachannelEventMap;
constructor(config: IDataChannelConfig, datachannel?: RTCDataChannel);
useDataStream(dataStream: IDataStream): void;
get id(): number;
get ordered(): boolean;
get maxRetransmits(): number;
get metadata(): string;
get readyState(): RTCDataChannelState;
get _originDataChannelId(): number | null;
getChannelId(): number;
getConfig(): IDataChannelConfig;
_close(): void;
_waitTillOpen(): Promise<void>;
_updateOriginDataChannel(datachannel: RTCDataChannel): void;
private _initPacketHeader;
private _bandDataChannelEvents;
private _unbindDataChannelEvents;
}
export declare enum DataChannelEvents {
OPEN = "open",
MESSAGE = "message",
CLOSE = "close",
CLOSING = "closing",
ERROR = "error"
}
export declare function decodeAudioData(buffer: ArrayBuffer): Promise<AudioBuffer>;
export declare const DEFAULT_LOCAL_AUDIO_TRACK_STATS: LocalAudioTrackStats;
export declare const DEFAULT_LOCAL_VIDEO_TRACK_STATS: LocalVideoTrackStats;
export declare const DEFAULT_NETWORK_QUALITY_STATS: NetworkQuality;
export declare const DEFAULT_REMOTE_AUDIO_TRACK_STATS: RemoteAudioTrackStats;
export declare const DEFAULT_REMOTE_VIDEO_TRACK_STATS: RemoteVideoTrackStats;
export declare interface DenoiserStats {
ns: number;
ebn: number;
ean: number;
vl: number;
}
export declare function detectSupportAudioElementSetSinkId(): boolean;
/**
* Information of the media input device.
*
* - You can get the audio sampling device information through [onMicrophoneChanged]{@link onMicrophoneChanged}.
* - You can get the video capture device information through [onCameraChanged]{@link onCameraChanged}.
* - You can get the audio playback device information through [onPlaybackDeviceChanged]{@link onPlaybackDeviceChanged}.
*/
export declare interface DeviceInfo {
/**
* The latest time when the state of the media input device was updated.
*
* A Unix timestamp in milliseconds.
*/
updateAt: number;
/**
* The time when the SDK first detects the media input device.
*
* A Unix timestamp in milliseconds.
*/
initAt: number;
/**
* The state of the capture device.
*/
state: DeviceState;
/**
* Device information of the media input device. See [MediaDeviceInfo](https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo) for details.
*/
device: MediaDeviceInfo;
}
export declare class DeviceManager extends EventEmitter {
private _state;
get state(): DeviceManagerState;
set state(state: DeviceManagerState);
private isAccessMicrophonePermission;
private isAccessCameraPermission;
private lastAccessMicrophonePermission;
private lastAccessCameraPermission;
private checkdeviceMatched;
private deviceInfoMap;
constructor();
enumerateDevices(audio: boolean, video: boolean, skipPermissionCheck?: boolean): Promise<MediaDeviceInfo[]>;
getRecordingDevices(skipPermissionCheck?: boolean): Promise<MediaDeviceInfo[]>;
getCamerasDevices(skipPermissionCheck?: boolean): Promise<MediaDeviceInfo[]>;
getSpeakers(skipPermissionCheck?: boolean): Promise<MediaDeviceInfo[]>;
searchDeviceIdByName(deviceName: string): string | null;
getDeviceById(deviceId: string): Promise<MediaDeviceInfo>;
private init;
private updateDevicesInfo;
private checkMediaDeviceInfoIsOk;
}
export declare const deviceManager: DeviceManager;
export declare enum DeviceManagerEvent {
STATE_CHANGE = "state_change",
RECORDING_DEVICE_CHANGED = "recordingDeviceChanged",
PLAYOUT_DEVICE_CHANGED = "playoutDeviceChanged",
CAMERA_DEVICE_CHANGED = "cameraDeviceChanged"
}
export declare enum DeviceManagerState {
IDLE = "IDLE",
INITING = "INITING",
INITEND = "INITEND"
}
/**
* The state of the media input device.
* - `"ACTIVE"`: The device is plugged in.
* - `"INACTIVE"`: The device is unplugged.
*/
export declare type DeviceState = "ACTIVE" | "INACTIVE";
export declare const emptyImage2TypedArray: (type: string) => Promise<ImageTypedData>;
/**
* @ignore
*
* Occurs when the device is overloaded after you call [setBeautyEffect]{@link ILocalVideoTrack.setBeautyEffect} to enable image enhancement.
*
* You can listen for this event to notify users of the device overload and disable image enhancement.
*
* ```javascript
* localVideoTrack.on("beauty-effect-overload", () => {
* console.log("beauty effect overload, disable beauty effect");
* localVideoTrack.setBeautyEffect(false);
* });
* ```
* @event
* @asMemberOf ILocalVideoTrack
*/
declare function event_beauty_effect_overload(): void;
/**
* Occurs when the first remote audio or video frame is decoded.
*
* @event
* @asMemberOf IRemoteTrack
*/
declare function event_first_frame_decoded(): void;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.20.1*
*
* Parsing SEI data from the H.264 video stream triggers this event, which returns the SEI data.
*
* @param sei SEI data in Uint8Array
* @asMemberOf ILocalVideoTrack
* @event
*/
declare function event_sei_received(sei: Uint8Array): void;
/**
* Occurs when the state of processing the audio buffer in [BufferSourceAudioTrack]{@link IBufferSourceAudioTrack} changes.
*
* @param currentState The state of processing the audio buffer:
* - `"stopped"`: The SDK stops processing the audio buffer. Reasons may include:
* - The SDK finishes processing the audio buffer.
* - The user manually stops the processing of the audio buffer.
* - `"paused"`: The SDK pauses the processing of the audio buffer.
* - `"playing"`: The SDK is processing the audio buffer.
*
* @event
* @asMemberOf IBufferSourceAudioTrack
*/
declare function event_source_state_change(currentState: AudioSourceState): void;
/**
* Occurs when a audio or video track ends.
*
* Reasons may include:
* - Camera is unplugged.
* - Microphone is unplugged.
* - The local user stops screen sharing.
* - The local user closes the underlying `MediaStreamTrack`.
* - A local media device malfunctions.
* - The device permission is revoked.
*
* @event
* @asMemberOf ILocalTrack
*/
declare function event_track_ended(): void;
/**
* Triggers when a media track is updated.
* @param track The media stream track. See [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack).
* @event
*/
export declare function event_track_updated(track: MediaStreamTrack): void;
/**
* Occurs when the `RTCRtpTransceiver` instance of the current track is updated.
*
* @param transceiver The new `RTCRtpTransceiver` instance.
* @param type The type of the video stream to which the current track belongs. See {@link StreamType}.
*
* @event
* @asMemberOf ILocalTrack
*/
declare function event_transceiver_updated(transceiver: RTCRtpTransceiver, type?: StreamType): void;
/**
* Occurs when the `RTCRtpTransceiver` instance of the current track is updated.
*
* @param transceiver The new `RTCRtpTransceiver` instance.
* @event
* @asMemberOf IRemoteTrack
*/
declare function event_transceiver_updated_2(transceiver: RTCRtpTransceiver): void;
/**
* @ignore
*/
declare function event_user_datachannel_close(): void;
/**
* @ignore
*/
declare function event_user_datachannel_error(ev: Event): void;
/**
* @ignore
*/
declare function event_user_datachannel_message(data: ArrayBuffer): void;
/**
* @ignore
*/
declare function event_user_datachannel_open(): void;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.8.0*
*
* Indicates the visibility of the `<video>` HTML tag.
*
* The SDK triggers this event every 30 seconds.
*
* After you call `localVideoTrack.play`, the SDK creates an [`<video>`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video) tag for playing video tracks. When `localVideoTrack.isPlaying` is `true` but you cannot see any video, this event helps you check whether the `<video>` tag is visible or not and learn the reason when the `<video>` tag is invisible.
*
* @param data The visibility of the `<video>` tag.
* @asMemberOf ILocalVideoTrack
* @event
*/
declare function event_video_element_visible_status(data?: CheckVideoVisibleResult): void;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.8.0*
*
* Indicates the visibility of the `<video>` HTML tag.
*
* The SDK triggers this event every 30 seconds.
*
* After you call `remoteVideoTrack.play`, the SDK creates an [`<video>`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video) tag for playing video tracks. When `remoteVideoTrack.isPlaying` is `true` but you cannot see any video, this event helps you check whether the `<video>` tag is visible or not and learn the reason when the `<video>` tag is invisible.
*
* @param data The visibility of the `<video>` tag.
* @asMemberOf IRemoteVideoTrack
* @event
*/
declare function event_video_element_visible_status_2(data?: CheckVideoVisibleResult): void;
/**
* Occurs when the video state changes.
*
* @event
* @asMemberOf IRemoteTrack
*/
declare function event_video_state_changed(videoState: VideoState): void;
export declare interface ExtendedVideoFrameCallbackMetadata extends VideoFrameCallbackMetadata {
timestamp: DOMHighResTimeStamp;
}
export declare interface ExternalMethods {
getDenoiserStats?: () => DenoiserStats | undefined;
}
declare class FakeAudioNode {
disconnect(): void;
connect(): void;
}
declare class FakeTrackSource extends EventEmitter {
context: any;
processSourceNode: undefined;
outputTrack: undefined;
processedNode: undefined;
clonedTrack: undefined;
outputNode: FakeAudioNode;
get isPlayed(): boolean;
get isFreeze(): boolean;
constructor();
setVolume(): void;
createOutputTrack(): MediaStreamTrack;
getOriginVolumeLevel(): number;
getAccurateVolumeLevel(): number;
stopGetAudioBuffer(): void;
startGetAudioBuffer(): void;
play(): void;
stop(): void;
destroy(): void;
updateTrack(): void;
updateOriginTrack(): void;
createMediaStreamSourceNode(): undefined;
}
export declare const frameData2CryptoBuffer: (imageData: ImageTypedData, appid: string, channelName: string) => Promise<Uint8Array>;
export declare function getAudioContext(): AudioContext;
export declare function getAudioEncoderConfiguration(profile: AudioEncoderConfigurationPreset | AudioEncoderConfiguration): AudioEncoderConfiguration;
export declare function getBitrateConstrainRange(width: number, height: number, frameRate: number, bitrateMin?: number, bitrateMax?: number): DoubleRange;
export declare function getBitrateFromResAndFps(width: number, height: number, fps: number): Required<DoubleRange>;
export declare function getCompatibility(): AgoraRTCCompatibility;
export declare function getConstraintsFromCameraConfig(config: CameraVideoTrackInitConfig): MediaTrackConstraints;
export declare function getConstraintsFromMicrophoneConfig(config: MicrophoneAudioTrackInitConfig): MediaTrackConstraints;
export declare function getConstraintsFromScreenConfig(config: ScreenVideoTrackInitConfig): ScreenConstraintsWithElectron;
export declare function getElectronScreenSources(type?: ScreenSourceType): Promise<ElectronDesktopCapturerSource[]>;
export declare function getElectronScreenStream(sourceId: string, config: ScreenConstraintsWithElectron, captureAudio?: boolean): Promise<MediaStream>;
export declare function getElectronScreenStreamByUserSelect(config: ScreenConstraintsWithElectron, captureAudio?: boolean): Promise<MediaStream>;
export declare function getLocalStream(config: GetUserMediaConfig, id: string): Promise<MediaStream>;
export declare const getOriginSenderConfig: (track: LocalVideoTrack) => {
frameRate: number;
bitrateMax: number;
bitrateMin: number;
scaleResolutionDownBy: number;
scale: number;
};
export declare function getScalabilityConfiguration(profile: SVCConfiguration | SVCConfigurationPreset): SVCConfiguration;
export declare function getScreenEncoderConfiguration(profile: VideoEncoderConfiguration | ScreenEncoderConfigurationPreset | string): VideoEncoderConfiguration;
export declare function getSilenceAudioTrack(): MediaStreamTrack;
export declare function getSilenceSamplesDuration(buffer: AudioBuffer): number;
export declare function getStaticTrackStream(track: MediaStreamTrack, interval?: number): MediaStreamTrack | void;
export declare interface GetUserMediaConfig {
video?: MediaTrackConstraints;
audio?: MediaTrackConstraints;
screen?: ScreenConstraintsWithElectron;
videoSource?: MediaStreamTrack;
audioSource?: MediaStreamTrack;
screenAudio?: boolean;
}
export declare function getVideoEncoderConfiguration(profile: VideoEncoderConfigurationPreset | VideoEncoderConfiguration | undefined): VideoEncoderConfiguration;
export declare function handleGetUserMediaError(errorName: string, message?: string): AgoraRTCError;
export declare let HAS_GUM_AUDIO: boolean;
export declare let HAS_GUM_VIDEO: boolean;
export declare function hasAudioContext(): boolean;
/**
* Inherited from [LocalAudioTrack]{@link ILocalAudioTrack}, `BufferSourceAudioTrack` is an interface for the audio from a local audio file and adds several functions for controlling the processing of the audio buffer, such as starting processing, stopping processing, and seeking a specified time location.
*
* You can create an audio track from an audio file by calling [AgoraRTC.createBufferSourceAudioTrack]{@link IAgoraRTC.createBufferSourceAudioTrack}.
*/
export declare interface IBufferSourceAudioTrack extends ILocalAudioTrack {
/**
* The [source]{@link BufferSourceAudioTrackInitConfig.source} specified when creating an audio track.
*/
source: string | File | AudioBuffer | null;
/**
* The current state of audio processing, such as start, pause, or stop.
*/
currentState: AudioSourceState;
/**
* The total duration of the audio (seconds).
*/
duration: number;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* The playback speed of the current audio file. Valid range is [50, 400], where:
* - `50`: Half the original speed.
* - `100`: (Default) The original speed.
* - `400`: Four times the original speed.
*/
playbackSpeed: number;
/**
* @param event The event name.
* @param listener See [source-state-change]{@link event_source_state_change}.
*/
on(event: "source-state-change", listener: typeof event_source_state_change): void;
/**
* When the specified event happens, the SDK triggers the callback that you pass.
*
* @param event The event name.
* @param listener The callback function.
*/
on(event: string, listener: Function): void;
/**
* Gets the progress (seconds) of the audio buffer processing.
*
* @returns The progress (seconds) of the audio buffer processing.
*/
getCurrentTime(): number;
/**
* Starts processing the audio buffer.
*
* > Starting processing the audio buffer means that the processing unit in the SDK has received the audio data. If the audio track has been published, the remote user can hear the audio.
* > Whether the local user can hear the audio depends on whether the SDK calls the [[play]] method and sends the audio data to the sound card.
*
* @param options Options for processing the audio buffer. See [[AudioSourceOptions]].
*/
startProcessAudioBuffer(options?: AudioSourceOptions): void;
/**
* Pauses processing the audio buffer.
*/
pauseProcessAudioBuffer(): void;
/**
* Jumps to a specified time point.
*
* @param time The specified time point (seconds).
*/
seekAudioBuffer(time: number): void;
/**
* Resumes processing the audio buffer.
*/
resumeProcessAudioBuffer(): void;
/**
* Stops processing the audio buffer.
*/
stopProcessAudioBuffer(): void;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* Sets the playback speed for the current audio file.
*
* You can call this method before or after joining a channel.
*
* @param speed The playback speed. Valid range is [50, 400], where:
* - `50`: Half the original speed.
* - `100`: (Default) The original speed.
* - `400`: Four times the original speed.
*/
setAudioBufferPlaybackSpeed(speed: number): void;
}
/**
*
* Inherited from [LocalVideoTrack]{@link ILocalVideoTrack}, `CameraVideoTrack` is an interface for the video captured by a local camera and adds functions such as switching devices and adjusting video encoder configurations.
*
* You can create a local camera video track by calling [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}.
*/
export declare interface ICameraVideoTrack extends ILocalVideoTrack {
/**
* Sets the device for capturing video.
*
* > You can call this method either before or after publishing the video track.
*
* @param deviceId Device ID, which can be passed in using the following ways:
* - Pass a string: Pass the `deviceId` obtained using [AgoraRTC.getCameras]{@link IAgoraRTC.getCameras}.
* - Pass an object: Starting from version 4.19.0, you can pass an object
* containing `facingMode` or `deviceId`, but only one of these properties
* can be specified. `deviceId` can be obtained through [AgoraRTC.getCameras]{@link IAgoraRTC.getCameras}, and `facingMode` supports the following values:
* - `"environment"`: Use the rear camera.
* - `"user"`: Use the front camera.
*/
setDevice(deviceId: string | RequiredOnlyOneOf<{
facingMode: VideoFacingModeEnum;
deviceId: string;
}>): Promise<void>;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.0.0*
*
* Enables/Disables the track.
*
* After a track is disabled, the SDK stops playing and publishing the track.
*
* > - Disabling a track does not trigger the [LocalTrack.on("track-ended")]{@link event_track_ended} event.
* > - If a track is published, disabling this track triggers the [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} event on the remote client, and re-enabling this track triggers the [user-published]{@link IAgoraRTCClient.event_user_published} event.
* > - Do not call `setEnabled` and `setMuted` together.
*
* @param enabled Whether to enable the track:
* - `true`: Enable the track.
* - `false`: Disable the track.
*/
setEnabled(enabled: boolean): Promise<void>;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.19.0*
*
* Clones the current video track to create a new video track.
*
* In scenarios such as video conferencing and online education, you can use this method to display the same video stream with two sets of display parameters, including resolution, frame rate, and bitrate. For example, you can have one display set to high-definition and the other to low-definition.
*
* @param config The encoding configuration for the new video track. You can pass in the SDK's built-in encoding configuration through [[VideoEncoderConfiguration]], or customize the video encoding configuration by passing in a [[VideoEncoderConfigurationPreset]].
* @param cloneTrack Whether to clone the current track. Default is `true`.
* @returns The newly generated video track.
*/
clone(config?: VideoEncoderConfiguration | VideoEncoderConfigurationPreset, cloneTrack?: boolean): ICameraVideoTrack;
}
/**
* @ignore
*/
export declare interface IDataChannel extends EventEmitter {
readonly id: number;
readonly maxRetransmits: number | null;
readonly ordered: boolean;
readonly readyState: RTCDataChannelState;
readonly metadata: string;
getChannelId(): number;
getConfig(): IDataChannelConfig;
}
/**
* @ignore
*/
export declare interface IDataChannelConfig {
id: number;
ordered: boolean;
metadata: string;
}
declare interface IDataStream {
serialize: (payload: ArrayBuffer) => ArrayBuffer;
deserialize: (packet: ArrayBuffer) => ArrayBuffer;
}
/**
* `LocalAudioTrack` is the basic interface for local audio tracks, providing main methods of local audio tracks.
*
* You can create a local audio track by calling [AgoraRTC.createCustomAudioTrack]{@link IAgoraRTC.createCustomAudioTrack}.
*
* The following interfaces are inherited from `LocalAudioTrack`:
* - [MicrophoneAudioTrack]{@link IMicrophoneAudioTrack}, the interface for the audio sampled by a local microphone, which adds several microphone-related functions.
* - [BufferSourceAudioTrack]{@link IBufferSourceAudioTrack}, the interface for the audio from a local audio file, which adds several audio-file-related functions.
*/
export declare interface ILocalAudioTrack extends ILocalTrack {
/**
* Sets the volume of a local audio track.
*
* @param volume The volume. The value ranges from 0 (mute) to 1000 (maximum). A value of 100 is the original volume The volume change may not be obvious to human ear. If local track has been published, setting volume will affect the volume heard by remote users.
*/
setVolume(volume: number): void;
/**
* Gets the audio level of a local audio track.
*
* @returns The audio level. The value range is [0,1]. 1 is the highest audio level.
* Usually a user with audio level above 0.6 is a speaking user.
*
*/
getVolumeLevel(): number;
/**
* Sets the callback for getting raw audio data in PCM format.
*
* After you successfully set the callback, the SDK constantly returns the audio frames of a local audio track in this callback by using [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer).
*
* > You can set the `frameSize` parameter to determine the frame size in each callback, which affects the interval between the callbacks. The larger the frame size, the longer the interval between them.
*
* ```js
* track.setAudioFrameCallback((buffer) => {
* for (let channel = 0; channel < buffer.numberOfChannels; channel += 1) {
* // Float32Array with PCM data
* const currentChannelData = buffer.getChannelData(channel);
* console.log("PCM data in channel", channel, currentChannelData);
* }
* }, 2048);
*
* // ....
* // Stop getting the raw audio data
* track.setAudioFrameCallback(null);
* ```
*
* @param audioFrameCallback The callback function for receiving the [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer) object. If you set `audioBufferCallback` as `null`, the SDK stops getting raw audio data.
* @param frameSize The number of samples of each audio channel that an `AudioBuffer` object contains. You can set `frameSize` as 256, 512, 1024, 2048, 4096, 8192, or 16384. The default value is 4096.
*/
setAudioFrameCallback(audioFrameCallback: null | ((buffer: AudioBuffer) => void), frameSize?: number): void;
/**
* Plays a local audio track.
*
* > When playing a audio track, you do not need to pass any DOM element.
*/
play(): void;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.1.0*
*
* > Note:
* > - As of v4.7.0, this method no longer takes effect. Use [IRemoteAudioTrack.setPlaybackDevice]{@link IRemoteAudioTrack.setPlaybackDevice} instead.
*
* Sets the playback device (speaker) for the remote audio stream.
*
* @param deviceId The device ID, which can be retrieved by calling [[getPlaybackDevices]].
*/
setPlaybackDevice(deviceId: string): Promise<void>;
/**
* Gets the statistics of a local audio track.
*
* @deprecated from v4.1.0. Use [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats} and [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats} instead.
*/
getStats(): LocalAudioTrackStats;
/**
* Inserts a `Processor` to the local audio track.
*
* @param processor The `Processor` instance. Each extension has a corresponding type of `Processor`.
*
* @returns The `Processor` instance.
*/
pipe(processor: IAudioProcessor): IAudioProcessor;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.10.0*
*
* Removes the `Processor` inserted to the local audio track.
*/
unpipe(): void;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.10.0*
*
* The destination of the current processing pipeline on the local audio track.
*/
processorDestination: IAudioProcessor;
}
/**
* @ignore
*/
export declare interface ILocalDataChannel extends IDataChannel {
send(data: ArrayBuffer): void;
/** @internal */
on(event: "open", listener: typeof event_user_datachannel_open): void;
/** @internal */
on(event: "close", listener: typeof event_user_datachannel_close): void;
/** @internal */
on(event: "error", listener: typeof event_user_datachannel_error): void;
}
/**
* `LocalTrack` is the basic interface for local tracks, providing public methods for [LocalAudioTrack]{@link ILocalAudioTrack} and [LocalVideoTrack]{@link ILocalVideoTrack}.
*/
export declare interface ILocalTrack extends ITrack {
/**
* @param event The event name.
* @param listener See [track-updated]{@link event_track_updated}.
*/
on(event: "track-updated", listener: typeof event_track_updated): void;
/**
* @param event The event name.
* @param listener See [track-ended]{@link event_track_ended}.
*/
on(event: "track-ended", listener: typeof event_track_ended): void;
/**
* Adds an event listener.
* @param event The event name.
* @param listener See [ILocalTrack.transceiver-updated]{@link event_transceiver_updated}.
*/
on(event: "transceiver-updated", listener: typeof event_transceiver_updated): void;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.0.0*
*
* Enables/Disables the track.
*
* After a track is disabled, the SDK stops playing and publishing the track.
*
* > - Disabling a track does not trigger the [LocalTrack.on("track-ended")]{@link event_track_ended} event.
* > - If a track is published, disabling this track triggers the [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} event on the remote client, and re-enabling this track triggers the [user-published]{@link IAgoraRTCClient.event_user_published} event.
* > - Do not call `setEnabled` and `setMuted` together.
*
* @param enabled Whether to enable the track:
* - `true`: Enable the track.
* - `false`: Disable the track.
*/
setEnabled(enabled: boolean): Promise<void>;
/**
* @deprecated from v4.1.0. Use [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats} and [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats} instead.
*
* Gets the statistics of a local track.
*
* > Note: When getting the statistics of a local video track, you cannot get the `encodeDelay` property on iOS.
*/
getStats(): LocalVideoTrackStats | LocalAudioTrackStats;
/**
* Gets the label of a local track.
*
* @return The label that the SDK returns may include:
* - The [MediaDeviceInfo.label](https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo/label) property, if the track is created by calling `createMicrophoneAudioTrack` or `createCameraVideoTrack`.
* - The `sourceId` property, if the track is created by calling `createScreenVideoTrack`.
* - The [MediaStreamTrack.label](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/label) property, if the track is created by calling `createCustomAudioTrack` or `createCustomVideoTrack`.
*/
getTrackLabel(): string;
/**
* Sends or stops sending the media data of the track.
*
* @since
* <br>&emsp;&emsp;&emsp;*4.6.0*
*
* If the track is published, a successful call of `setMuted(true)` triggers the [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} event on the remote client, and a successful call of `setMuted(false)` triggers the [user-published]{@link IAgoraRTCClient.event_user_published} event.
*
* > - Calling `setMuted(true)` does not stop capturing audio or video and takes shorter time to take effect than [[setEnabled]]. For details, see [What are the differences between setEnabled and setMuted?](https://docs.agora.io/en/interactive-live-streaming/develop/product-workflow?platform=web#setenabled-and-setmuted).
* > - Do not call `setEnabled` and `setMuted` together.
*
* @param muted Whether to stop sending the media data of the track:
* - `true`: Stop sending the media data of the track.
* - `false`: Resume sending the media data of the track.
*/
setMuted(muted: boolean): Promise<void>;
/**
* Closes a local track and releases the audio and video resources that it occupies.
*
* Once you close a local track, you can no longer reuse it.
*/
close(): void;
muted: boolean;
enabled: boolean;
}
/**
* `LocalVideoTrack` is the basic interface for local video tracks, providing the main methods for local video tracks.
*
* You can get create a local video track by calling [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack} or [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack} method.
*
* Inherited from `LocalVideoTrack`, [CameraVideoTrack]{@link ICameraVideoTrack} is an interface for the video captured by a local camera and adds several camera-related functions.
*/
export declare interface ILocalVideoTrack extends ILocalTrack {
/**
* @param event The event name.
* @param listener See [track-updated]{@link event_track_updated}.
*/
on(event: "track-updated", listener: typeof event_track_updated): void;
/** @internal */
on(event: "beauty-effect-overload", listener: typeof event_beauty_effect_overload): void;
/**
* @param event The event name.
* @param listener See [track-ended]{@link event_track_ended}.
*/
on(event: "track-ended", listener: typeof event_track_ended): void;
/**
* @param event The event name.
* @param listener See [video-element-visible-status]{@link event_video_element_visible_status}.
*/
on(event: "video-element-visible-status", listener: typeof event_video_element_visible_status): void;
/**
* Adds an event listener.
* @param event The event name.
* @param listener See [ILocalTrack.transceiver-updated]{@link event_transceiver_updated}.
*/
on(event: "transceiver-updated", listener: typeof event_transceiver_updated): void;
/**
* Plays a remote video track on the web page.
*
* @param element Specifies a DOM element. The SDK will create a `<video>` element under the specified DOM element to play the video track. You can specify a DOM element in either of the following ways:
* - `string`: Specify the ID of the DOM element.
* - `HTMLElement`: Pass a DOM object.
* @param config Sets the playback configurations, such as display mode and mirror mode. See [[VideoPlayerConfig]]. By default, the SDK enables mirror mode for a local video track.
*/
play(element: string | HTMLElement, config?: VideoPlayerConfig): void;
/**
* Gets the statistics of a local video track.
*
* @deprecated from v4.1.0. Use [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats} and [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats} instead.
*/
getStats(): LocalVideoTrackStats;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.19.0*
*
* Clones the current video track to create a new video track.
*
* In scenarios such as video conferencing and online education, you can use this method to display the same video stream with two sets of display parameters, including resolution, frame rate, and bitrate. For example, you can have one display set to high-definition and the other to low-definition.
*
* @param config The encoding configuration for the new video track. You can pass in the SDK's built-in encoding configuration through [[VideoEncoderConfiguration]], or customize the video encoding configuration by passing in a [[VideoEncoderConfigurationPreset]].
* @param cloneTrack Whether to clone the current track. Default is `true`.
* @returns The newly generated video track.
*/
clone(config?: VideoEncoderConfiguration | VideoEncoderConfigurationPreset, cloneTrack?: boolean): ILocalVideoTrack;
/**
* @ignore
*
* Enables/Disables image enhancement and sets the options.
*
* > Notes:
* > - Agora is planning to sunset this built-in image enhancement feature.
* You can use the upgraded Image Enhancement Extension instead. For details, see
* [Use the Image Enhancement Extension](https://docs.agora.io/en/Video/beauty_effect_web_ng?platform=Web).
* > - This method supports the following browsers:
* > - Safari 12 or later.
* > - Chrome 65 or later.
* > - Firefox 70.0.1 or later.
* > - This function is not supported on mobile devices.
* > - If you enable dual-stream mode, the image enhancement options only apply to the high-quality video stream.
*
* @param enabled Whether to enable image enhancement:
* - `true`: Enable image enhancement.
* - `false`: Disable image enhancement.
* @param options Sets image enhancement options. See [[BeautyEffectOptions]].
*/
setBeautyEffect(enabled: boolean, options?: BeautyEffectOptions): Promise<void>;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.1.0*
*
* Gets the data of the video frame being rendered.
*
* > You should call this method after calling [[play]]. Otherwise, the method call returns null.
*
* @returns An `ImageData` object that stores RGBA data. `ImageData` is a web API supported by the browser. For details, see [ImageData](https://developer.mozilla.org/en-US/docs/Web/API/ImageData).
*/
getCurrentFrameData(): ImageData;
/**
*
* @ignore
*/
getCurrentFrameImage(imageType: string, quality: number): Promise<ImageTypedData>;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.2.0*
*
* Sets the video transmission optimization mode.
*
* You can call this method during a video call, a live streaming or screen sharing to dynamically change the optimization mode. For example, during the screen sharing, before you change the shared content from text to video, you can change the optimization mode from `"detail"` to `"motion"` to ensure smoothness in poor network conditions.
*
* > Note: This method supports Chrome only.
*
* @param mode The video transmission optimization mode:
* - `"balanced"`: Uses the default optimization mode.
* - For a screen-sharing video track, the default transmission optimization strategy is to prioritizes clarity.
* - For the other types of video tracks, the SDK may reduce the frame rate or the sending resolution in poor network conditions.
* - `"detail"`: Prioritizes video quality.
* - The SDK ensures high-quality images by automatically calculating a minimum bitrate based on the capturing resolution and frame rate. No matter how poor the network condition is, the sending bitrate will never be lower than the minimum value.
* - In most cases, the SDK does not reduce the sending resolution, but may reduce the frame rate.
* - `"motion"`: Since 4.21.0, the SDK prioritizes video smoothness.
* - In poor network conditions, the SDK reduces the sending bitrate to minimize video freezes.
* - In most cases, the SDK does not reduce the frame rate, but may reduce the sending resolution.
*/
setOptimizationMode(mode: "balanced" | "motion" | "detail"): Promise<void>;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.8.0*
*
* Gets the visibility of the `<video>` HTML tag.
*
* After you call `localVideoTrack.play`, the SDK creates an [`<video>`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video) tag for playing video tracks. When `localVideoTrack.isPlaying` is `true` but you cannot see any video, call this method to check whether the `<video>` tag is visible or not and learn the reason when the `<video>` tag is invisible.
*
* @returns The [[CheckVideoVisibleResult]] object. If this method returns `undefined`, it may be due to the following reasons:
* - `localVideoTrack.isPlaying` is `false`.
* - The `<video>` tag does not exist.
* - The `<video>` tag is not created by calling the `play` method.
*/
getVideoElementVisibleStatus(): CheckVideoVisibleResult | undefined;
/**
* Inserts a `Processor` to the local video track.
*
* @param processor The `Processor` instance. Each extension has a corresponding type of `Processor`.
*
* @returns The `Processor` instance.
*/
pipe(processor: IBaseProcessor): IBaseProcessor;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.10.0*
*
* Removes the `Processor` inserted to the local video track.
*/
unpipe(): void;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.10.0*
*
* The destination of the current processing pipeline on the local video track.
*/
processorDestination: IBaseProcessor;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.17.0*
*
* Replaces the local video track.
*
* You can call this method before or after publishing the local video stream:
* - If you call this method before publishing, the new video track is played locally.
* - If you call this method after publishing, the new video track is received by the remote user.
*
* The new video track can be retrieved by the {@link ILocalVideoTrack.getMediaStreamTrack} or `mediaStream.getVideoTracks` method. You can choose to either stop or retain the replaced track.
*
* > Notes:
* > - This method supports Chrome 65+, Safari, and the latest Firefox.
* > - This method might not take effect on some mobile devices.
* > - Agora recommends switching between video tracks that are of the same type and have the same encoder configurations for the following reasons:
* > - If the video track types are different, such as replacing a `CameraVideoTrack` object with a `ScreenVideoTrack` object, the video is flipped horizontally due to the mirror effect enabled by default on `CameraVideoTrack` (see {@link VideoPlayerConfig.mirror} for details).
* > - If the encoder configurations (`encoderConfig`) are different, the actual sending resolution or frame rate might be different from what you set.
* > - The subscriber will not be notified if the track gets replaced.
* > - To switch the media input devices, Agora recommends using {@link ICameraVideoTrack.setDevice}.
*
* **Example**
* ```javascript
* // Current video track
* const localVideoTrack = await AgoraRTC.createCameraVideoTrack();
* // Gets the new video track (option one)
* const newTrack = localVideoTrack.getMediaStreamTrack();
* // Gets the new video track (option two)
* const newTrack = await navigator.mediaDevices.getUserMedia({audio: true, video: true}).then(mediaStream => mediaStream.getVideoTracks()[0]);
* // Replaces and stops the current video track
* await localVideoTrack.replaceTrack(newTrack, true);
* ```
* @param track The new video track, which is a [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
* @param stopOldTrack Whether to stop the old video track:
* - true: Stops the old video track.
* - false: Retains the old video track.
*/
replaceTrack(track: MediaStreamTrack, stopOldTrack: boolean): Promise<void>;
/**
* Sets the video encoder configurations, such as resolution, frame rate, and bitrate.
*
* @param config The video encoder configurations. You can pass either [[VideoEncoderConfigurationPreset]] or a customized [[VideoEncoderConfiguration]] object.
*/
setEncoderConfiguration(config: VideoEncoderConfiguration | VideoEncoderConfigurationPreset): Promise<void>;
/**
* Add the SEI data to the H.264 video stream.
*
* @param config SEI data.
*/
sendSeiData(sei: Uint8Array): void;
}
export declare interface ImageTypedData {
buffer: Uint8Array;
width: number;
height: number;
}
/**
* Inherited from [LocalAudioTrack]{@link ILocalAudioTrack}, `MicrophoneAudioTrack` is an interface for the audio sampled by a local microphone and adds several functions such as switching devices.
*
* You can create a local microphone audio track by calling [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack}.
*/
export declare interface IMicrophoneAudioTrack extends ILocalAudioTrack {
/**
* Sets the device for sampling audio.
*
* > You can call the method either before or after publishing an audio track.
*
* @param deviceId The ID of the specified device. You can get the `deviceId` by calling [AgoraRTC.getMicrophones]{@link IAgoraRTC.getMicrophones}.
*/
setDevice(deviceId: string): Promise<void>;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.0.0*
*
* Enables/Disables the track.
*
* After a track is disabled, the SDK stops playing and publishing the track.
*
* > - Disabling a track does not trigger the [LocalTrack.on("track-ended")]{@link event_track_ended} event.
* > - If a track is published, disabling this track triggers the [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} event on the remote client, and re-enabling this track triggers the [user-published]{@link IAgoraRTCClient.event_user_published} event.
* > - Do not call `setEnabled` and `setMuted` together.
*
* @param enabled Whether to enable the track:
* - `true`: Enable the track.
* - `false`: Disable the track.
*/
setEnabled(enabled: boolean): Promise<void>;
}
export declare function interceptLocalAudioFrame(sender: RTCRtpSender): Promise<void>;
export declare function interceptLocalVideoFrame(sender: RTCRtpSender, localVideoTrack: LocalVideoTrack): Promise<void>;
export declare function interceptRemoteAudioFrame(receiver: RTCRtpReceiver): Promise<void>;
export declare function interceptRemoteVideoFrame(receiver: RTCRtpReceiver, options?: InterceptVideoFrameOptions): Promise<void>;
declare interface InterceptVideoFrameOptions {
onSei?: (sei: Uint8Array) => void;
}
/**
* `RemoteAudioTrack` is the basic interface for the remote audio track.
*
* You can get create a remote audio track by the [AgoraRTCRemoteUser.audioTrack]{@link IAgoraRTCRemoteUser.audioTrack} object after calling [subscribe]{@link IAgoraRTCClient.subscribe}.
*/
export declare interface IRemoteAudioTrack extends IRemoteTrack {
/**
* Gets the statistics of a remote audio track.
*
* @return An [[RemoteAudioTrackStats]] object.
*/
getStats(): RemoteAudioTrackStats;
/**
* Plays a remote audio track.
*
* > When playing the audio track, you do not need to pass any DOM element.
*/
play(): void;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.1.0*
*
* Sets the audio playback device, for example, the speaker.
*
* > This method supports Chrome and Edge on desktop devices only. Other browsers throw a `NOT_SUPPORTED` error when calling this method.
* @param deviceId Device ID, which can be retrieved by calling [[getPlaybackDevices]].
*/
setPlaybackDevice(deviceId: string): Promise<void>;
/**
* Sets the callback for getting raw audio data in PCM format.
*
* After you successfully set the callback, the SDK constantly returns the audio frames of a remote audio track in this callback by using [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer).
*
* > You can set the `frameSize` parameter to determine the frame size in each callback, which affects the interval between the callbacks. The larger the frame size, the longer the interval between them.
*
* ```js
* track.setAudioFrameCallback((buffer) => {
* for (let channel = 0; channel < buffer.numberOfChannels; channel += 1) {
* // Float32Array with PCM data
* const currentChannelData = buffer.getChannelData(channel);
* console.log("PCM data in channel", channel, currentChannelData);
* }
* }, 2048);
*
* // ....
* // Stop getting the raw audio data
* track.setAudioFrameCallback(null);
* ```
*
* @param audioFrameCallback The callback function for receiving the [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer) object. If you set `audioBufferCallback` as `null`, the SDK stops getting raw audio data.
* @param frameSize The number of samples of each audio channel that an `AudioBuffer` object contains. You can set `frameSize` as 256, 512, 1024, 2048, 4096, 8192, or 16384. The default value is 4096.
*/
setAudioFrameCallback(audioFrameCallback: null | ((buffer: AudioBuffer) => void), frameSize?: number): void;
/**
* Sets the volume of a remote audio track.
*
* @param volume The volume. The value ranges from 0 (mute) to 100 (maximum). A value of 100 is the current volume.
*/
setVolume(volume: number): void;
/**
* Gets the audio level of a remote audio track.
*
* @returns The audio level. The value range is [0,1]. 1 is the highest audio level.
* Usually a user with audio level above 0.6 is a speaking user.
*/
getVolumeLevel(): number;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* Inserts a `Processor` to the remote audio track.
*
* @param processor The `Processor` instance. Each extension has a corresponding type of `Processor`.
*
* @returns The `Processor` instance.
*/
pipe(processor: IAudioProcessor): IAudioProcessor;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* Removes the `Processor` inserted to the remote audio track.
*/
unpipe(): void;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* The destination of the current processing pipeline on the remote audio track.
*/
processorDestination: IAudioProcessor;
}
/**
* @ignore
*/
export declare interface IRemoteDataChannel extends IDataChannel {
/** @internal */
on(event: "open", listener: typeof event_user_datachannel_open): void;
/** @internal */
on(event: "close", listener: typeof event_user_datachannel_close): void;
/** @internal */
on(event: "error", listener: typeof event_user_datachannel_error): void;
/** @internal */
on(event: "message", listener: typeof event_user_datachannel_message): void;
}
/**
* `RemoteTrack` is the basic interface for remote tracks, providing public methods for [RemoteAudioTrack]{@link IRemoteAudioTrack} and [RemoteVideoTrack]{@link IRemoteVideoTrack}.
*/
export declare interface IRemoteTrack extends ITrack {
/**
* @param event The event name.
* @param listener See [track-updated]{@link event_track_updated}.
*/
on(event: "track-updated", listener: typeof event_track_updated): void;
/**
* @param event The event name.
* @param listener See [first-frame-decoded]{@link event_first_frame_decoded}.
*/
on(event: "first-frame-decoded", listener: typeof event_first_frame_decoded): void;
/**
* Adds an event listener.
* @param event The event name.
* @param listener See [IRemoteTrack.transceiver-updated]{@link event_transceiver_updated_2}.
*/
on(event: "transceiver-updated", listener: typeof event_transceiver_updated_2): void;
/**
* Gets the `uid` of the remote user who publishes the remote track.
*
* @return The `uid` of the remote user.
*/
getUserId(): UID;
/**
* Gets the statistics of a remote track.
*
* @deprecated from v4.1.0. Use [AgoraRTCClient.getRemoteVideoStats]{@link IAgoraRTCClient.getRemoteVideoStats} and [AgoraRTCClient.getRemoteAudioStats]{@link IAgoraRTCClient.getRemoteAudioStats} instead.
* @return An [[RemoteAudioTrackStats]] or [[RemoteVideoTrackStats]] object.
*/
getStats(): RemoteAudioTrackStats | RemoteVideoTrackStats;
}
/**
* `RemoteVideoTrack` is the basic interface for the remote video track.
*
* You can get create a remote video track by the [AgoraRTCRemoteUser.videoTrack]{@link IAgoraRTCRemoteUser.videoTrack} object after calling [subscribe]{@link IAgoraRTCClient.subscribe}.
*/
export declare interface IRemoteVideoTrack extends IRemoteTrack {
/**
* @param event The event name.
* @param listener See [track-updated]{@link event_track_updated}.
*/
on(event: "track-updated", listener: typeof event_track_updated): void;
/**
* @param event The event name.
* @param listener See [video-state-changed]{@link event_video_state_changed}.
*/
on(event: "video-state-changed", listener: typeof event_video_state_changed): void;
/**
* @param event The event name.
* @param listener See [first-frame-decoded]{@link event_first_frame_decoded}.
*/
on(event: "first-frame-decoded", listener: typeof event_first_frame_decoded): void;
/**
* @param event The event name.
* @param listener See [video-element-visible-status]{@link event_video_element_visible_status_2}.
*/
on(event: "video-element-visible-status", listener: typeof event_video_element_visible_status_2): void;
/**
* Adds an event listener.
* @param event The event name.
* @param listener See [IRemoteTrack.transceiver-updated]{@link event_transceiver_updated_2}.
*/
on(event: "transceiver-updated", listener: typeof event_transceiver_updated_2): void;
/**
* Adds an event listener.
* @param event The event name.
* @param listener See [ILocalTrack.sei-received]{@link event_sei_received}.
*/
on(event: "sei-received", listener: typeof event_sei_received): void;
/**
* Gets the statistics of a remote video track.
*
* @return An [[RemoteVideoTrackStats]] object
*/
getStats(): RemoteVideoTrackStats;
/**
* Plays a remote video track on the web page.
*
* @param element Specifies a DOM element. The SDK will create a `<video>` element under the specified DOM element to play the video track. You can specify a DOM element in either of following ways:
* - `string`: Specify the ID of the DOM element.
* - `HTMLElement`: Pass a DOM object.
* @param config Sets the playback configurations, such as display mode and mirror mode. See [[VideoPlayerConfig]]. By default, the SDK enables mirror mode for a local video track.
*/
play(element: string | HTMLElement, config?: VideoPlayerConfig): void;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.1.0*
*
* Gets the data of the video frame being rendered.
*
* > You should call this method after calling [[play]]. Otherwise, the method call returns null.
*
* @returns An `ImageData` object that stores RGBA data. `ImageData` is a web API supported by the browser. For details, see [ImageData](https://developer.mozilla.org/en-US/docs/Web/API/ImageData).
*/
getCurrentFrameData(): ImageData;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.8.0*
*
* Gets the visibility of the `<video>` HTML tag.
*
* After you call `remoteVideoTrack.play`, the SDK creates an [`<video>`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video) tag for playing video tracks. When `remoteVideoTrack.isPlaying` is `true` but you cannot see any video, call this method to check whether the `<video>` tag is visible or not and learn the reason when the `<video>` tag is invisible.
*
* @returns The [[CheckVideoVisibleResult]] object. If this method returns `undefined`, it may be due to the following reasons:
* - `remoteVideoTrack.isPlaying` is `false`.
* - The `<video>` tag does not exist.
* - The `<video>` tag is not created by calling the `play` method.
*/
getVideoElementVisibleStatus(): CheckVideoVisibleResult | undefined;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* Inserts a `Processor` to the remote video track.
*
* @param processor The `Processor` instance. Each extension has a corresponding type of `Processor`.
*
* @returns The `Processor` instance.
*/
pipe(processor: IBaseProcessor): IBaseProcessor;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* Removes the `Processor` inserted to the remote video track.
*/
unpipe(): void;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* The destination of the current processing pipeline on the remote video track.
*/
processorDestination: IBaseProcessor;
}
export declare function isAudioEncoderConfiguration(config: AudioEncoderConfiguration): config is AudioEncoderConfiguration;
export declare function isAudioEncoderConfigurationOrPreset(profile: any): profile is AudioEncoderConfigurationPreset | AudioEncoderConfiguration;
export declare function isBeautyEffectOptions(options: BeautyEffectOptions): options is BeautyEffectOptions;
export { isElectron }
export declare function isLowStreamParameter(streamParameter: any): streamParameter is LowStreamParameter;
export declare function isPlanB(): boolean;
export declare function isScreenSourceType(mediaSource: any): mediaSource is ScreenSourceType;
export declare function isVideoEncoderConfiguration(config: VideoEncoderConfiguration): config is VideoEncoderConfiguration;
export declare function isVideoEncoderConfigurationOrPreset(profile: any): profile is VideoEncoderConfigurationPreset | VideoEncoderConfiguration;
export declare interface ITrack extends EventEmitter {
/**
* The type of a media track:
* - `"audio"`: Audio track.
* - `"video"`: Video track.
*/
trackMediaType: "audio" | "video";
/**
* Whether a media track is playing on the webpage:
* - `true`: The media track is playing on the webpage.
* - `false`: The media track is not playing on the webpage.
*/
isPlaying: boolean;
/**
* Gets the ID of a media track, a unique identifier generated by the SDK.
*
* @return The media track ID.
*/
getTrackId(): string;
/**
* Gets an [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
*
* @return An [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
*/
getMediaStreamTrack(): MediaStreamTrack;
/**
* Gets the [RTCRtpTransceiver](https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver) instance of the current track.
*
* This method is currently mainly used for end-to-end encryption of video streams (Beta).
*
* > If the SDK experiences a reconnection, the `RTCRtpTransceiver` instance corresponding to the current track might change. You can obtain the new `RTCRtpTransceiver` instance through the following callbacks:
* > - For a local track: [ILocalTrack.transceiver-updated]{@link event_transceiver_updated}
* > - For a remote track: [IRemoteTrack.transceiver-updated]{@link event_transceiver_updated_2}
*
* @param type The type of the video stream. See {@link StreamType}.
* @returns The [RTCRtpTransceiver](https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver) instance of the current track.
*/
getRTCRtpTransceiver(type?: StreamType): RTCRtpTransceiver | undefined;
/**
* Plays a media track on the webpage.
*
* @param element Specifies a DOM element. The SDK will create a `<video>` element under the specified DOM element to play the video track. You can specify a DOM element in either of following ways:
* - `string`: Specify the ID of the DOM element.
* - `HTMLElement`: Pass a DOM object.
*/
play(element?: string | HTMLElement): void;
/**
* Stops playing the media track.
*/
stop(): void;
}
export declare class LocalAudioTrack extends LocalTrack implements ILocalAudioTrack {
readonly trackMediaType: "audio" | "video";
_encoderConfig?: AudioEncoderConfiguration;
_trackSource: AudioTrackSource | FakeTrackSource;
get _source(): AudioTrackSource;
set _source(source: AudioTrackSource | FakeTrackSource);
_enabled: boolean;
private _volume;
_useAudioElement: boolean;
_bypassWebAudio: boolean;
protected processor?: IAudioProcessor;
protected _processorContext: AudioProcessorContext | undefined;
protected get processorContext(): AudioProcessorContext;
_processorDestination: AudioProcessorDestination | undefined;
get processorDestination(): AudioProcessorDestination;
protected _getOriginVolumeLevel: boolean;
get isPlaying(): boolean;
get __className__(): string;
constructor(track: MediaStreamTrack, encoderConfig?: AudioEncoderConfiguration, trackId?: string, getOriginVolumeLevel?: boolean);
setVolume(volume: number): void;
getVolumeLevel(): number;
setPlaybackDevice(deviceId: string): Promise<void>;
setEnabled(enabled: boolean, _?: any, skipChangeState?: boolean): Promise<void>;
protected _setEnabled(enabled: boolean, _?: any, skipChangeState?: boolean): Promise<void>;
setMuted(muted: boolean): Promise<void>;
getStats(): LocalAudioTrackStats;
setAudioFrameCallback(callback: null | ((buffer: AudioBuffer) => void), frameSize?: number): void;
play(): void;
stop(): void;
close(): void;
protected _updatePlayerSource(updateWebAudioSource?: boolean): void;
protected _updateOriginMediaStreamTrack(track: MediaStreamTrack, stopOldTrack: boolean): Promise<void>;
renewMediaStreamTrack(newConstraints?: MediaTrackConstraints): Promise<void>;
pipe(processor: IAudioProcessor): IAudioProcessor;
unpipe(): void;
private bindProcessorDestinationEvents;
private unbindProcessorDestinationEvents;
protected bindProcessorContextEvents(processorContext: AudioProcessorContext): void;
protected unbindProcessorContextEvents(processorContext: AudioProcessorContext): void;
private initWebAudio;
private initProcessor;
}
/**
* Information of the local audio track, which can be retrieved by calling [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats}.
*/
export declare interface LocalAudioTrackStats {
/**
* The audio codec.
*
* - `"opus"`: The audio codec is OPUS
* - `"aac"`: The audio codec is AAC
* - `"pcmu"`: Reserved for future use.
* - `"pcma"`: Reserved for future use.
* - `"g722"`: Reserved for future use.
*
* > Firefox does not support this property.
*/
codecType?: "opus" | "aac" | "PCMU" | "PCMA" | "G722";
/**
* The energy level of the sent audio.
*
* The value range is [0,32767].
*
* > This value is retrieved by calling WebRTC-Stats and may not be up-to-date. To get the real-time sound volume, call [LocalAudioTrack.getVolumeLevel]{@link ILocalAudioTrack.getVolumeLevel}.
*/
sendVolumeLevel: number;
/**
* The bitrate (bps) of the sent audio.
*/
sendBitrate: number;
/**
* The total bytes of the sent audio.
*/
sendBytes: number;
/**
* The total packets of the sent audio.
*/
sendPackets: number;
/**
* The total number of lost audio packets that were sent.
*
* > You can not get this property on Safari.
*/
sendPacketsLost: number;
/**
* Jitter (ms) of the audio packets that were sent.
*/
sendJitterMs: number;
/**
* Round-trip time delay (ms) of the audio packets that were sent.
*/
sendRttMs: number;
/**
* The packet loss rate of the sent audio in 400ms.
*/
currentPacketLossRate: number;
}
export declare class LocalDataChannel extends DataChannel implements ILocalDataChannel {
send(data: ArrayBuffer): void;
}
export declare abstract class LocalTrack extends Track implements ILocalTrack {
_enabled: boolean;
_muted: boolean;
_isExternalTrack: boolean;
get isExternalTrack(): boolean;
get muted(): boolean;
get enabled(): boolean;
_isClosed: boolean;
protected _enabledMutex: PromiseMutex;
protected processor?: IBaseProcessor;
protected abstract _processorContext: IProcessorContext | undefined;
protected get processorContext(): IProcessorContext | undefined;
abstract get processorDestination(): IBaseProcessor;
constructor(track: MediaStreamTrack, trackId?: string);
abstract getStats(): LocalVideoTrackStats | LocalAudioTrackStats;
abstract setMuted(enabled: boolean): Promise<void>;
abstract setEnabled(enabled: boolean): Promise<void>;
getTrackLabel(): string;
close(): void;
protected _updateOriginMediaStreamTrack(track: MediaStreamTrack, stopOldTrack: boolean, isExternalTrack?: boolean): Promise<void>;
protected abstract _updatePlayerSource(): void;
protected _getDefaultPlayerConfig(): Partial<PlayerConfig>;
protected _handleTrackEnded: () => void;
protected onTrackEnded(): void;
protected stateCheck(stateName: "enabled" | "muted", state: boolean): void;
abstract renewMediaStreamTrack(): Promise<void>;
getProcessorStats(): ProcessorStats[];
getProcessorUsage(): Promise<UsageWithDirection[]>;
}
export declare enum LocalTrackEvents {
SOURCE_STATE_CHANGE = "source-state-change",
TRACK_ENDED = "track-ended",
BEAUTY_EFFECT_OVERLOAD = "beauty-effect-overload",
VIDEO_ELEMENT_VISIBLE_STATUS = "video-element-visible-status",
CLOSED = "closed"
}
export declare class LocalVideoTrack extends LocalTrack implements ILocalVideoTrack {
readonly trackMediaType: "audio" | "video";
_player?: AgoraRTCPlayer | VideoPlayer;
isUseScaleResolutionDownBy: boolean;
private _videoVisibleTimer;
private _previousVideoVisibleStatus;
private _clearPreviousVideoVisibleStatus;
_encoderConfig?: Partial<VideoEncoderConfiguration>;
_scalabilityMode?: SVCConfiguration;
_optimizationMode?: OptimizationMode | "balanced";
private _videoHeight?;
private _videoWidth?;
get videoHeight(): number | undefined;
get videoWidth(): number | undefined;
_forceBitrateLimit?: {
max_bitrate: number;
min_bitrate: number;
};
_enabled: boolean;
get isPlaying(): boolean;
protected _processorDestination: VideoProcessorDestination;
get processorDestination(): VideoProcessorDestination;
protected _processorContext: VideoProcessorContext;
protected get processorContext(): VideoProcessorContext;
protected set processorContext(ctx: VideoProcessorContext);
get __className__(): string;
constructor(track: MediaStreamTrack, encoderConfig?: Partial<VideoEncoderConfiguration>, scalabilityConfig?: SVCConfiguration, optimizationMode?: OptimizationMode | "balanced", trackId?: string, hints?: TrackHint[]);
play(element: HTMLElement | HTMLVideoElement | string, config?: VideoPlayerConfig): void;
stop(): void;
setEnabled(enabled: boolean, skipChangeState?: boolean): Promise<void>;
setMuted(muted: boolean): Promise<void>;
setEncoderConfiguration(config: VideoEncoderConfiguration | VideoEncoderConfigurationPreset, doNotRenegoation?: boolean): Promise<void>;
getStats(): LocalVideoTrackStats;
setBeautyEffect(enabled: boolean, options?: BeautyEffectOptions): Promise<void>;
getCurrentFrameData(): ImageData;
getCurrentFrameImage(imageType: string, quality?: number): Promise<ImageTypedData>;
setBitrateLimit(bitrateLimit: {
max_bitrate: number;
min_bitrate: number;
}): Promise<void>;
setOptimizationMode(mode: OptimizationMode | "balanced"): Promise<void>;
setScalabiltyMode(mode: SVCConfiguration): void;
updateMediaStreamTrackResolution(): void;
protected _updatePlayerSource(): void;
protected _getDefaultPlayerConfig(): Partial<VideoPlayerConfig>;
protected setSenderConfiguration(config: VideoEncoderConfiguration | VideoEncoderConfigurationPreset): Promise<void>;
updateBitrateFromProfile(): void;
getVideoElementVisibleStatus(): CheckVideoVisibleResult | undefined;
renewMediaStreamTrack(newConstraints?: MediaTrackConstraints): Promise<void>;
pipe(processor: IBaseProcessor): IBaseProcessor;
unpipe(): void;
close(): void;
clone(config?: VideoEncoderConfiguration | VideoEncoderConfigurationPreset, cloneTrack?: boolean): LocalVideoTrack;
replaceTrack(track: MediaStreamTrack, stopOldTrack: boolean): Promise<void>;
sendSeiData(sei: Uint8Array): never;
private bindProcessorDestinationEvents;
private unbindProcessorDestinationEvents;
private unbindProcessorContextEvents;
}
/**
* Information of the local video track, which can be retrieved by calling [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats}.
*/
export declare interface LocalVideoTrackStats {
/**
* The video codec.
*
* - `"H264"`: The video codec is H.264.
* - `"VP8"`: The video codec is VP8.
* - `"VP9"`: The video codec is VP9.
* - `"AV1X"`: Reserved for future use.
* - `"AV1"`: The video codec is AV1.
*
* > You can not get this property on Firefox.
*/
codecType?: "H264" | "H265" | "VP8" | "VP9" | "AV1X" | "AV1";
/**
* The total bytes of the sent video.
*/
sendBytes: number;
/**
* The frame rate (fps) of the sent video.
*
* > You can not get this property on Firefox.
*/
sendFrameRate?: number;
/**
* The frame rate (fps) of the captured video.
*
* > You can not get this property on Safari and Firefox.
*/
captureFrameRate?: number;
/**
* The total packets of the sent video.
*/
sendPackets: number;
/**
* The total number of lost video packets that were sent.
*
* > - You can not get this property on Safari.
* > - This property is inaccurate on Firefox.
*/
sendPacketsLost: number;
/**
* Jitter (ms) of the video packets that were sent.
*/
sendJitterMs: number;
/**
* Round-trip time delay (ms) of the video packets that were sent.
*/
sendRttMs: number;
/**
* The resolution height (pixel) of the sent video.
*/
sendResolutionHeight: number;
/**
* The resolution width (pixel) of the sent video.
*/
sendResolutionWidth: number;
/**
* The resolution height (pixel) of the captured video.
*/
captureResolutionHeight: number;
/**
* The resolution width (pixel) of the captured video.
*/
captureResolutionWidth: number;
/**
* The time (ms) required for encoding the captured video.
*/
encodeDelay?: number;
/**
* The bitrate (bps) of the sent video.
*/
sendBitrate: number;
/**
* The target bitrate (bps) of the sent video, namely the bitrate set in {@link VideoEncoderConfiguration}.
*/
targetSendBitrate: number;
/**
* The total duration of the sent video in seconds.
*/
totalDuration: number;
/**
* The total freeze time of the encoded video in seconds.
*/
totalFreezeTime: number;
/**
* The packet loss rate of the sent video in 400ms.
*/
currentPacketLossRate: number;
}
/**
* The video profile of the low-quality video stream. Set the the video profile of the low-quality video stream when calling [setLowStreamParameter]{@link IAgoraRTCClient.setLowStreamParameter}.
*/
export declare interface LowStreamParameter {
/**
* Width of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
width: ConstrainULong;
/**
* Height of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
height: ConstrainULong;
/**
* Frame rate of the video (fps).
*
* You can pass a `number`, or a constraint such as `{ max: 30, min: 5 }`.
*
* For details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
framerate?: ConstrainULong;
/**
* Bitrate of the video (Kbps).
*/
bitrate?: number;
}
export declare const MediaElementNumStatus: {
[n in MediaElementStatus | "uninit"]: number;
};
export declare enum MediaElementStatus {
NONE = "none",
INIT = "init",
CANPLAY = "canplay",
PLAYING = "playing",
PAUSED = "paused",
SUSPEND = "suspend",
STALLED = "stalled",
WAITING = "waiting",
ERROR = "error",
DESTROYED = "destroyed",
ABORT = "abort",
ENDED = "ended",
EMPTIED = "emptied",
LOADEDDATA = "loadeddata"
}
export declare interface MediaStats {
resolution: Resolution;
}
export declare class MicrophoneAudioTrack extends LocalAudioTrack implements IMicrophoneAudioTrack {
_config: MicrophoneAudioTrackInitConfig;
_deviceName: string;
private _constraints;
private readonly _originalConstraints;
_enabled: boolean;
get __className__(): string;
constructor(track: MediaStreamTrack, config: MicrophoneAudioTrackInitConfig, constraints: MediaTrackConstraints, trackId?: string);
setDevice(deviceId: string): Promise<void>;
setEnabled(enabled: boolean, notCloseDevice?: boolean, skipChangeState?: boolean): Promise<void>;
close(): void;
protected onTrackEnded(): void;
renewMediaStreamTrack(newConstraints?: MediaTrackConstraints): Promise<void>;
protected bindProcessorContextEvents(processorContext: AudioProcessorContext): void;
protected unbindProcessorContextEvents(processorContext: AudioProcessorContext): void;
}
/**
* Configurations for the audio track from the audio captured by a microphone. Set these configurations when calling [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack}.
*/
export declare interface MicrophoneAudioTrackInitConfig {
/**
* The audio encoder configurations.
*
* You can set the audio encoder configurations in either of the following ways:
* - Pass the preset audio encoder configurations by using [[AudioEncoderConfigurationPreset]].
* - Pass your customized audio encoder configurations by using [[AudioEncoderConfiguration]].
*
* > Firefox does not support setting the audio encoding rate.
*/
encoderConfig?: AudioEncoderConfiguration | AudioEncoderConfigurationPreset;
/**
* Whether to enable acoustic echo cancellation:
* - `true`: Enable acoustic echo cancellation.
* - `false`: Do not enable acoustic echo cancellation.
*/
AEC?: boolean;
/**
* Whether to enable audio gain control:
* - `true`: Enable audio gain control.
* - `false`: Do not enable audio gain control.
*/
AGC?: boolean;
/**
* Whether to enable automatic noise suppression:
* - `true`: Enable automatic noise suppression.
* - `false`: Do not automatic noise suppression.
*/
ANS?: boolean;
/**
* @ignore
*/
DTX?: boolean;
/**
* Specifies the microphone ID.
*
* You can get a list of the available microphones by calling [AgoraRTC.getMicrophones]{@link IAgoraRTC.getMicrophones}.
*/
microphoneId?: string;
/**
* @ignore
* Specifies whether or not audio track pass through WebAudio.
*/
bypassWebAudio?: boolean;
}
export declare class MixingAudioTrack extends LocalAudioTrack {
get __className__(): string;
trackList: LocalAudioTrack[];
private destNode;
get isActive(): boolean;
constructor();
hasAudioTrack(track: LocalAudioTrack): boolean;
addAudioTrack(track: LocalAudioTrack): void;
removeAudioTrack(track: LocalAudioTrack): void;
private updateEncoderConfig;
_updateRtpTransceiver(transceiver: RTCRtpTransceiver): void;
}
export declare type OptimizationMode = "motion" | "detail";
export declare interface PlayerConfig extends VideoPlayerConfig {
trackId: string;
element: HTMLElement;
}
export declare function polyfillAudioNode(node: AudioNode): void;
export declare enum PROCESSOR_CONTEXT_EVENTS {
REQUEST_UPDATE_CONSTRAINTS = "request_update_constraints",
REQUEST_CONSTRAINTS = "request_constraints"
}
export declare enum PROCESSOR_DESTINATION_EVENTS {
ON_TRACK = "on_track",
ON_NODE = "on_node"
}
export declare interface ProcessorUsageStats {
id: string;
value: number;
level: number;
totalTs: number;
}
export declare class RemoteAudioTrack extends RemoteTrack implements IRemoteAudioTrack {
readonly trackMediaType: TrackMediaType.AUDIO;
_source: AudioTrackSource | FakeTrackSource;
_useAudioElement: boolean;
private _volume;
protected processorContext: AudioProcessorContext;
processorDestination: AudioProcessorDestination;
private _played;
private _bypassWebAudio;
get isPlaying(): boolean;
get __className__(): string;
constructor(track: MediaStreamTrack, userId: UID, uintId: number, store: SDKStore);
setAudioFrameCallback(callback: null | ((buffer: AudioBuffer) => void), frameSize?: number): void;
setVolume(volume: number): void;
setPlaybackDevice(deviceId: string): Promise<void>;
getVolumeLevel(): number;
getStats(): RemoteAudioTrackStats;
play(): void;
stop(): void;
_destroy(): void;
_isFreeze(): boolean;
protected _updatePlayerSource(updateWebAudioSource?: boolean): void;
pipe(processor: IAudioProcessor): IAudioProcessor;
unpipe(): void;
private bindProcessorDestinationEvents;
private unbindProcessorDestinationEvents;
}
/**
* Statistics of the remote audio track, such as connection and transmission statistics, which can be retrieved by calling [AgoraRTCClient.getRemoteAudioStats]{@link IAgoraRTCClient.getRemoteAudioStats}.
*/
export declare interface RemoteAudioTrackStats {
/**
* Transmission delay (ms).
*
* The delay (ms) between a remote client sending the audio and the local client receiving the audio.
*/
transportDelay: number;
/**
* The audio codec.
*
* - `"opus"`: The audio codec is OPUS
* - `"aac"`: The audio codec is AAC
* - `"pcmu"`: Reserved for future use.
* - `"pcma"`: Reserved for future use.
* - `"g722"`: Reserved for future use.
*
* > Firefox does not support this property.
*/
codecType?: "opus" | "aac" | "PCMU" | "PCMA" | "G722";
/**
* End-to-end delay (ms).
*
* The delay (ms) between a remote client sampling the audio and the local client playing the audio.
* This delay does not include the time spent in encoding at the remote client and the time spent in decoding at the local client.
*/
end2EndDelay: number;
/**
* The bitrate (bps) of the received audio.
*/
receiveBitrate: number;
/**
* The energy level of the received audio.
*
* The value range is [0,32767].
*
* > This value is retrieved by calling WebRTC-Stats and may not be up-to-date. To get the real-time sound volume, call [RemoteAudioTrack.getVolumeLevel]{@link IRemoteAudioTrack.getVolumeLevel}.
*/
receiveLevel: number;
/**
* The total bytes of the received audio.
*/
receiveBytes: number;
/**
* The delay (ms) between a remote client sending the audio and the local client playing the audio.
*
* > This property is inaccurate on Safari and Firefox.
*/
receiveDelay: number;
/**
* The total packets of the received audio.
*/
receivePackets: number;
/**
* The total number of lost audio packets that should be received.
*/
receivePacketsLost: number;
/**
* The number of packets discarded by the jitter buffer due to early or late arrival.
*/
receivePacketsDiscarded: number;
/**
* The packet loss rate of the received audio.
*/
packetLossRate: number;
/**
* The packet loss rate of the received audio.
*/
currentPacketLossRate: number;
/**
* The total duration of the received audio in seconds.
*/
totalDuration: number;
/**
* The total freeze time of the received audio in seconds.
*/
totalFreezeTime: number;
/**
* The freeze rate of the received audio.
*/
freezeRate: number;
publishDuration: number;
}
export declare class RemoteDataChannel extends DataChannel implements IRemoteDataChannel {
private _messageListener;
constructor(config: IDataChannelConfig);
_updateOriginDataChannel(datachannel: RTCDataChannel): void;
_close(): void;
private _bandRemoteDataChannelEvents;
}
/**
* The stream fallback option. Set the stream fallback option when calling [setStreamFallbackOption]{@link IAgoraRTCClient.setStreamFallbackOption}.
*
*/
export declare enum RemoteStreamFallbackType {
/**
* 0: Disable the fallback.
*/
DISABLE = 0,
/**
* 1: Automatically subscribe to the low-video stream under poor network conditions. */
LOW_STREAM = 1,
/**
* 2: Subscribe to the low-quality video stream when the network conditions worsen, and subscribe to audio only when the conditions become too poor to support video transmission.
*/
AUDIO_ONLY = 2,
HIGH_STREAM_LAYER1 = 3,
HIGH_STREAM_LAYER2 = 4,
HIGH_STREAM_LAYER3 = 5,
HIGH_STREAM_LAYER4 = 6,
HIGH_STREAM_LAYER5 = 7,
HIGH_STREAM_LAYER6 = 8
}
/**
* The video type of the remote stream. Set the video type of the remote stream when calling [setRemoteVideoStreamType]{@link IAgoraRTCClient.setRemoteVideoStreamType}.
*/
export declare enum RemoteStreamType {
/**
* 0: High-quality video stream (high-bitrate, high-resolution).
*/
HIGH_STREAM = 0,
/**
* 1: Low-quality video stream (low-bitrate, low-resolution).
*/
LOW_STREAM = 1,
HIGH_STREAM_LAYER1 = 4,
HIGH_STREAM_LAYER2 = 5,
HIGH_STREAM_LAYER3 = 6,
HIGH_STREAM_LAYER4 = 7,
HIGH_STREAM_LAYER5 = 8,
HIGH_STREAM_LAYER6 = 9
}
export declare abstract class RemoteTrack extends Track implements IRemoteTrack {
private _userId;
/** @internal */
_uintId: number;
_isDestroyed: boolean;
protected store: SDKStore;
protected processor?: IBaseProcessor;
protected abstract processorContext: IProcessorContext;
getUserId(): UID;
abstract getStats(): RemoteAudioTrackStats | RemoteVideoTrackStats;
constructor(track: MediaStreamTrack, userId: UID, uintId: number, store: SDKStore);
_updateOriginMediaStreamTrack(track: MediaStreamTrack): void;
_destroy(): void;
protected abstract _updatePlayerSource(): void;
getProcessorStats(): ProcessorStats[];
getProcessorUsage(): Promise<UsageWithDirection[]>;
}
export declare enum RemoteTrackEvents {
FIRST_FRAME_DECODED = "first-frame-decoded",
VIDEO_ELEMENT_VISIBLE_STATUS = "video-element-visible-status",
VIDEO_STATE_CHANGED = "video-state-changed"
}
export declare class RemoteVideoTrack extends RemoteTrack implements IRemoteVideoTrack {
private _videoVisibleTimer;
private _previousVideoVisibleStatus;
private _clearPreviousVideoVisibleStatus;
readonly trackMediaType: TrackMediaType.VIDEO;
_videoWidth?: number;
_videoHeight?: number;
_player?: AgoraRTCPlayer | VideoPlayer;
processorDestination: VideoProcessorDestination;
protected processorContext: VideoProcessorContext;
get isPlaying(): boolean;
get __className__(): string;
constructor(track: MediaStreamTrack, userId: UID, uintId: number, store: SDKStore);
getStats(): RemoteVideoTrackStats;
play(element: string | HTMLElement | HTMLVideoElement, config?: VideoPlayerConfig): void;
stop(): void;
getCurrentFrameData(): ImageData;
updateMediaStreamTrackResolution(): void;
protected _updatePlayerSource(): void;
getVideoElementVisibleStatus(): CheckVideoVisibleResult | undefined;
pipe(processor: IBaseProcessor): IBaseProcessor;
unpipe(): void;
private bindProcessorDestinationEvents;
private unbindProcessorDestinationEvents;
_destroy(): void;
_onSei(sei: Uint8Array): void;
}
/**
* Statistics of the remote video track, such as connection and transmission statistics, which can be retrieved by calling [AgoraRTCClient.getRemoteVideoStats]{@link IAgoraRTCClient.getRemoteVideoStats}.
*/
export declare interface RemoteVideoTrackStats {
/**
* Transmission delay (ms).
*
* The delay (ms) between a remote client sending the video and the local client receiving the video.
*/
transportDelay: number;
/**
* The video codec.
*
* - `"H264"`: The video codec is H.264.
* - `"VP8"`: The video codec is VP8.
* - `"VP9"`: The video codec is VP9.
* - `"AV1X"`: Reserved for future use.
* - `"AV1"`: The video codec is AV1.
*
* > You can not get this property on Firefox.
*/
codecType?: "H264" | "H265" | "VP8" | "VP9" | "AV1X" | "AV1";
/**
* End-to-end delay (ms).
*
* The delay (ms) a remote client capturing the video and the local client playing the video.
* This delay does not include the time spent in encoding at the remote client and the time spent in decoding at the local client.
*/
end2EndDelay: number;
/**
* The bitrate (bps) of the received video.
*/
receiveBitrate: number;
/**
* The delay (ms) between a remote client sending the video and the local client playing the video.
*
* > This property is inaccurate on Safari and Firefox.
*/
receiveDelay: number;
/**
* The total byes of the received video.
*/
receiveBytes: number;
/**
* The frame rate (fps) of the decoded video.
*/
decodeFrameRate?: number;
/**
* The frame rate (fps) of the received video.
*/
receiveFrameRate?: number;
/**
* The rendering frame rate (fps) of the decoded video.
*/
renderFrameRate?: number;
/**
* The total bytes of the received video.
*/
receivePackets: number;
/**
* The total number of lost video packets that should be received.
*/
receivePacketsLost: number;
/**
* The packet loss rate of the received video.
*/
packetLossRate: number;
/**
* The packet loss rate of the received video.
*/
currentPacketLossRate: number;
/**
* The resolution height (pixel) of the received video.
*/
receiveResolutionHeight: number;
/**
* The resolution width (pixel) of the received video.
*/
receiveResolutionWidth: number;
/**
* The total duration of the received video in seconds.
*/
totalDuration: number;
/**
* The total freeze time of the received video in seconds.
*/
totalFreezeTime: number;
/**
* The freeze rate of the received video.
*/
freezeRate: number;
publishDuration: number;
}
export declare function removeTrack(track: Track): void;
export declare function requestAutoplayGesture(): void;
export declare interface Resolution {
width: number;
height: number;
}
export declare const SAFARI_GLOBAL_GUM_LOCK: PromiseMutex;
export declare interface ScreenConstraintsWithElectron extends MediaTrackConstraints {
sourceId?: string;
extensionId?: string;
mandatory?: any;
mediaSource?: ScreenSourceType;
width?: ConstrainULong;
height?: ConstrainULong;
displaySurface?: "browser" | "window" | "monitor";
selfBrowserSurface?: "include" | "exclude";
surfaceSwitching?: "include" | "exclude";
systemAudio?: "include" | "exclude";
}
/**
* The preset video encoder configurations for screen sharing.
*
* You can pass the preset video encoder configurations when calling [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}.
*
* The following table lists all the preset video profiles for screen sharing.
*
* | Video Profile | Resolution (Width×Height) | Frame Rate (fps) |
* | -------- | --------------- | ----------- |
* | "480p" | 640 × 480 | 5 |
* | "480p_1" | 640 × 480 | 5 |
* | "480p_2" | 640 × 480 | 30 |
* | "480p_3" | 640 × 480 | 15 |
* | "720p" | 1280 × 720 | 5 |
* | "720p_1" | 1280 × 720 | 5 |
* | "720p_2" | 1280 × 720 | 30 |
* | "720p_3" | 1280 × 720 | 15 |
* | "720p_auto" <sup></sup> | 1280 × 720 | 30 |
* | "1080p" | 1920 × 1080 | 5 |
* | "1080p_1" | 1920 × 1080 | 5 |
* | "1080p_2" | 1920 × 1080 | 30 |
* | "1080p_3" | 1920 × 1080 | 15 |
*
* > <sup></sup> `"720p_auto"` is only recommended to be set on Safari to ensure dynamic adjustment of the encoding resolution. For details, see the release notes.
*/
export declare type ScreenEncoderConfigurationPreset = keyof typeof SUPPORT_SCREEN_ENCODER_CONFIG_LIST;
/**
* The type of the source for screen sharing.
* - `"screen"`: Sharing the whole screen.
* - `"application"`: Sharing all windows of an app.
* - `"window"`: Sharing a window of an app.
*/
export declare type ScreenSourceType = "screen" | "window" | "application";
/**
* Configurations for the video track for screen sharing. Set these configurations when calling [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}.
*/
export declare interface ScreenVideoTrackInitConfig {
/**
* The video encoder configurations for screen sharing.
*
* You can set the video encoder configurations in either of the following ways:
* - Pass the preset video encoder configurations by using [[ScreenEncoderConfigurationPreset]].
* - Pass your customized video encoder configurations by using [[VideoEncoderConfiguration]].
* - Leave this property empty to use the SDK's default value, `"1080p_2"` (resolution: 1920 × 1080, frame rate: 30 fps, bitrate: 3000 Kbps).
*/
encoderConfig?: VideoEncoderConfiguration | ScreenEncoderConfigurationPreset;
/**
* The `sourceId` when you share the screen through Electron.
*/
electronScreenSourceId?: string;
/**
* The `extensionId` when you share the screen with a Chrome extension.
*/
extensionId?: string;
/**
*
* @deprecated from v4.17.1. Use {@link displaySurface} instead.
*
* The type of the source for screen sharing.
*/
screenSourceType?: ScreenSourceType;
/**
* @since
* <br>&emsp;&emsp;&emsp;*4.2.0*
*
* Sets the video transmission optimization mode.
*
* You can call this method during a video call, a live streaming or screen sharing to dynamically change the optimization mode. For example, during the screen sharing, before you change the shared content from text to video, you can change the optimization mode from `"detail"` to `"motion"` to ensure smoothness in poor network conditions.
*
* > Note: This method supports Chrome only.
*
* @param mode The video transmission optimization mode:
* - `"detail"`: Prioritizes video quality.
* - The SDK ensures high-quality images by automatically calculating a minimum bitrate based on the capturing resolution and frame rate. No matter how poor the network condition is, the sending bitrate will never be lower than the minimum value.
* - In most cases, the SDK does not reduce the sending resolution, but may reduce the frame rate.
* - `"motion"`: Since 4.21.0, the SDK prioritizes video smoothness.
* - In poor network conditions, the SDK reduces the sending bitrate to minimize video freezes.
* - In most cases, the SDK does not reduce the frame rate, but may reduce the sending resolution.
*/
optimizationMode?: "motion" | "detail";
/**
* @ignore
*
* @since
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* Configurations for Scalable Video Coding (SVC).
*
* You can set the configurations using one of the following options:
* - Use the preset SVC configurations provided by the SDK through {@link SVCConfigurationPreset}.
* - Use your custom SVC configurations through {@link SVCConfiguration}.
*/
scalabiltyMode?: SVCConfiguration | SVCConfigurationPreset;
/**
*
* @since
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* The pre-selected pane in the media picker. See [displaySurface](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#displaySurface) for details.
*
* > Note: This property is supported on Chrome 107 and later, as well as Edge 107 and later.
*/
displaySurface?: "browser" | "window" | "monitor";
/**
*
* @since
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* Whether to allow the user to share the current tab:
* - `"include"`: (Default) Allows the user to share the current tab.
* - `"exclude"`: Prevents the user from sharing the current tab.
*
* See [displaySurface](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#displaySurface) for details.
*
* > Note:
* > - This property is defaulted to `"exclude"` on Chrome 107. For better compatibility with earlier versions, the SDK changes the default value to "include", which ensures that users can still share the current tab after upgrading to Chrome 107.
* > - This property is supported on Chrome 107 and later, as well as Edge 107 and later.
*/
selfBrowserSurface?: "include" | "exclude";
/**
*
* @since
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* Whether to allow the user to dynamically switch between shared tabs:
* - `"include"`: (Default) The user can dynamically switch between shared tabs.
* - `"exclude"`: The user cannot dynamically switch between shared tabs.
*
* See [surfaceSwitching](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#surfaceSwitching) for details.
*
* > Note: This property is supported on Chrome 107 and later, as well as Edge 107 and later.
*/
surfaceSwitching?: "include" | "exclude";
/**
*
* @since
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* Whether to capture system audio:
* - `"include"`: (Default) Captures system audio.
* - `"exclude"`: Avoids capturing system audio
*
* See [systemAudio](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#systemAudio) for details.
*
* > Note: This property is supported on Chrome 105 and later (Windows only), as well as Edge 105 and later (Windows only).
*/
systemAudio?: "include" | "exclude";
}
export declare type SenderConfig = {
bitrateMax: number;
bitrateMin: number;
scaleResolutionDownBy: number;
};
export declare function silenceScriptProcessHandler(e: AudioProcessingEvent): AudioBuffer;
export declare interface StatsRegistry {
processorID: string;
processorName: string;
type: string;
cb: Function;
}
export declare enum StreamType {
/**
* 0: High-quality video stream (high-bitrate, high-resolution).
*/
HIGH_STREAM = 0,
/**
* 1: Low-quality video stream (low-bitrate, low-resolution).
*/
LOW_STREAM = 1
}
export declare const SUPPORT_720P_AUTO_CONFIG_LIST: Required<{
scaleResolutionDownBy: number;
width: number;
height: number;
frameRate: number;
bitrateMin: number;
bitrateMax: number;
}>[];
/**
* @ignore
*/
export declare const SUPPORT_SCREEN_ENCODER_CONFIG_LIST: Record<string, VideoEncoderConfiguration>;
/**
* @ignore
*/
export declare const SUPPORT_SVC_CONFIG_LIST: Record<string, SVCConfiguration>;
/**
* @ignore
*/
export declare const SUPPORT_VIDEO_ENCODER_CONFIG_LIST: Record<string, VideoEncoderConfiguration>;
/**
* @ignore
* @since
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* Custom SVC encoding configurations.
*
* You can control the SVC configurations for local video by passing `SVCConfiguration` in the following methods:
* - [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}
* - [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack}
* - [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}
*/
export declare interface SVCConfiguration {
/**
* Specifies the number of spatial layer in SVC.
*/
numSpatialLayers: 1 | 2 | 3;
/**
* Specifies the number of temporal layer in SVC.
*/
numTemporalLayers: 1 | 3;
}
/**
* @ignore
*
* @since
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* The preset SVC configurations provided by the SDK.
* - `"1SL1TL"`: 1 spatial layer, 1 temporal layer.
* - `"3SL3TL"`: 3 spatial layers, 3 temporal layers.
* - `"2SL3TL"`: 2 spatial layers, 3 temporal layers.
*
* You can control the SVC configurations for local video by passing these preset values in the following methods:
* - [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}
* - [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack}
* - [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}
*
*/
export declare type SVCConfigurationPreset = keyof typeof SUPPORT_SVC_CONFIG_LIST;
export declare abstract class Track extends EventEmitter implements ITrack {
abstract readonly trackMediaType: "audio" | "video";
private _ID;
protected _rtpTransceiver?: RTCRtpTransceiver;
protected _lowRtpTransceiver?: RTCRtpTransceiver;
abstract get isPlaying(): boolean;
_hints: TrackHint[];
_isClosed: boolean;
_originMediaStreamTrack: MediaStreamTrack;
mediaStreamTrack: MediaStreamTrack;
set _mediaStreamTrack(track: MediaStreamTrack);
get _mediaStreamTrack(): MediaStreamTrack;
_external: ExternalMethods;
constructor(track: MediaStreamTrack, trackId?: string);
toString(): string;
getTrackId(): string;
getMediaStreamTrack(fromInternal?: boolean): MediaStreamTrack;
getRTCRtpTransceiver(type?: StreamType): RTCRtpTransceiver | undefined;
getMediaStreamTrackSettings(): MediaTrackSettings;
protected close(): void;
abstract play(element?: HTMLElement | string): void;
abstract stop(): void;
_updateRtpTransceiver(transceiver?: RTCRtpTransceiver, type?: StreamType): void;
}
export declare enum TrackEvents {
TRANSCEIVER_UPDATED = "transceiver-updated",
SEI_TO_SEND = "sei-to-send",
SEI_RECEIVED = "sei-received",
TRACK_UPDATED = "track-updated"
}
export declare enum TrackHint {
SCREEN_TRACK = "screen_track",
CUSTOM_TRACK = "custome_track",
LOW_STREAM = "low_stream",
SCREEN_LOW_TRACK = "screen_low_track"
}
export declare enum TrackInternalEvent {
NEED_RENEGOTIATE = "@need_renegotiate",
NEED_REPLACE_TRACK = "@need_replace_track",
NEED_REPLACE_MIXING_TRACK = "@need_replace_mixing_track",
NEED_CLOSE = "@need_close",
NEED_ENABLE_TRACK = "@need_enable_track",
NEED_DISABLE_TRACK = "@need_disable_track",
NEED_SESSION_ID = "@need_sid",
SET_OPTIMIZATION_MODE = "@set_optimization_mode",
GET_STATS = "@get_stats",
GET_RTC_STATS = "@get_rtc_stats",
GET_LOW_VIDEO_TRACK = "@get_low_video_track",
NEED_RESET_REMOTE_SDP = "@need_reset_remote_sdp",
NEED_UPDATE_VIDEO_ENCODER = "@need_update_video_encoder",
NEED_UPDATE_VIDEO_SEND_PARAMETERS = "@need_update_video_send_parameters",
NEED_MUTE_TRACK = "@need_mute_track",
NEED_UNMUTE_TRACK = "@need_unmute_track"
}
export declare enum TrackMediaType {
AUDIO = "audio",
VIDEO = "video",
DATA = "data"
}
export declare function updateAgoraRTCCompatibility(): void;
export declare interface UsageRegistry {
processorID: string;
processorName: string;
cb: () => Usage | Promise<Usage>;
}
/**
* `VideoEncoderConfiguration` is the interface that defines the video encoder configurations.
*
* You can customize the video encoder configurations when calling [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack} or [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}.
*
* The SDK provides the preset video encoder configurations. For more information, see [[VideoEncoderConfigurationPreset]].
*
* > The actual bitrate may differ slightly from the value you set due to the limitations of the operation system or the web browser. Agora recommends setting the bitrate between 100 Kbps and 5000 Kbps.
*/
export declare interface VideoEncoderConfiguration {
/**
* Width of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
width?: number | ConstrainLong;
/**
* Height of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
height?: number | ConstrainLong;
/**
* Frame rate of the video (fps).
*
* You can pass a `number`, or a constraint such as `{ max: 30, min: 5 }`.
*
* For details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
frameRate?: number | ConstrainLong;
/**
* The minimum bitrate of the video (Kbps).
*/
bitrateMin?: number;
/**
* The maximum bitrate of the video (Kbps).
*/
bitrateMax?: number;
/**
* @ignore
*/
scaleResolutionDownBy?: number;
}
/**
*
* The preset video encoder configurations.
*
* You can pass the preset video encoder configurations when calling [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack} or [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}.
*
* The following table lists all the preset video profiles. The SDK uses `"480p_1"` by default.
*
* | Video Profile | Resolution (Width×Height) | Frame Rate (fps) | Chrome | Firefox | Safari |
* | -------- | --------------- | ----------- | ------ | ------- | ------ |
* | 120p | 160 × 120 | 15 | | | |
* | 120p_1 | 160 × 120 | 15 | | | |
* | 120p_3 | 120 × 120 | 15 | | | |
* | 180p | 320 × 180 | 15 | | | |
* | 180p_1 | 320 × 180 | 15 | | | |
* | 180p_3 | 180 × 180 | 15 | | | |
* | 180p_4 | 240 × 180 | 15 | | | |
* | 240p | 320 × 240 | 15 | | | |
* | 240p_1 | 320 × 240 | 15 | | | |
* | 240p_3 | 240 × 240 | 15 | | | |
* | 240p_4 | 424 × 240 | 15 | | | |
* | 360p | 640 × 360 | 15 | | | |
* | 360p_1 | 640 × 360 | 15 | | | |
* | 360p_3 | 360 × 360 | 15 | | | |
* | 360p_4 | 640 × 360 | 30 | | | |
* | 360p_6 | 360 × 360 | 30 | | | |
* | 360p_7 | 480 × 360 | 15 | | | |
* | 360p_8 | 480 × 360 | 30 | | | |
* | 360p_9 | 640 × 360 | 15 | | | |
* | 360p_10 | 640 × 360 | 24 | | | |
* | 360p_11 | 640 × 360 | 24 | | | |
* | 480p | 640 × 480 | 15 | | | |
* | 480p_1 | 640 × 480 | 15 | | | |
* | 480p_2 | 640 × 480 | 30 | | | |
* | 480p_3 | 480 × 480 | 15 | | | |
* | 480p_4 | 640 × 480 | 30 | | | |
* | 480p_6 | 480 × 480 | 30 | | | |
* | 480p_8 | 848 × 480 | 15 | | | |
* | 480p_9 | 848 × 480 | 30 | | | |
* | 480p_10 | 640 × 480 | 10 | | | |
* | 720p | 1280 × 720 | 15 | | | |
* | 720p_1 | 1280 × 720 | 15 | | | |
* | 720p_2 | 1280 × 720 | 30 | | | |
* | 720p_3 | 1280 × 720 | 30 | | | |
* | 720p_auto <sup></sup> | 1280 × 720 | 30 | | | |
* | 720p_5 | 960 × 720 | 15 | | | |
* | 720p_6 | 960 × 720 | 30 | | | |
* | 1080p | 1920 × 1080 | 15 | | | |
* | 1080p_1 | 1920 × 1080 | 15 | | | |
* | 1080p_2 | 1920 × 1080 | 30 | | | |
* | 1080p_3 | 1920 × 1080 | 30 | | | |
* | 1080p_5 | 1920 × 1080 | 60 | | | |
*
* > <sup></sup> `"720p_auto"` is only recommended to be set on Safari to ensure dynamic adjustment of the encoding resolution. For details, see the release notes.
*/
export declare type VideoEncoderConfigurationPreset = keyof typeof SUPPORT_VIDEO_ENCODER_CONFIG_LIST;
declare class VideoPlayer {
trackId: string;
config: PlayerConfig;
onFirstVideoFrameDecoded?: () => void;
onVideoStateChanged?: (state: VideoState) => void;
freezeTimeCounterList: number[];
renderFreezeAccTime: number;
isKeepLastFrame: boolean;
private timeUpdatedCount;
private freezeTime;
private playbackTime;
private lastTimeUpdatedTime;
private autoplayFailed;
protected videoTrack?: MediaStreamTrack;
protected videoElement: HTMLVideoElement;
protected cacheVideoElement?: HTMLVideoElement;
private renderStats?;
get rendFrameRate(): number;
get videoElementStatus(): MediaElementStatus;
set videoElementStatus(status: MediaElementStatus);
get videoState(): VideoState;
set videoState(state: VideoState);
private _videoState;
private videoElementCheckInterval?;
private videoElementFreezeTimeout?;
private _videoElementStatus;
private isGettingVideoDimensions;
constructor(config: PlayerConfig);
getVideoElement(): HTMLVideoElement | undefined;
getContainerElement(): HTMLDivElement | undefined;
updateConfig(config: PlayerConfig): void;
updateVideoTrack(track?: MediaStreamTrack): void;
play(sessionId?: string): void;
getCurrentFrame(): ImageData;
getCurrentFrameToUint8Array(type: string, quality?: number): Promise<ImageTypedData>;
destroy(): void;
protected initVideoElement(): void;
protected resetVideoElement(): void;
private startGetVideoDimensions;
private handleAutoPlayFailed;
private autoResumeAfterInterruption;
private handleVideoEvents;
private autoResumeAfterInterruptionOnIOS15_16;
}
/**
* Playback configurations for a video track. Set the playback configurations for a video track when calling [ILocalVideoTrack.play]{@link ILocalVideoTrack.play}.
*/
export declare interface VideoPlayerConfig {
/**
* Sets whether to enable mirror mode:
* - `true`: Enable mirror mode.
* - `false`: Disable mirror mode.
*
* > Notes:
* > - The SDK enables mirror mode for the local video track by default.
* > - The SDK disables mirror mode for the remote video track by default.
*/
mirror?: boolean;
/**
* Sets video display mode:
* - `"cover"`: The image files the height and width of the box, while maintaining its aspect ratio but often cropping the image in the process. For more information, see the `cover` option of `object-fit` in CSS.
* - `"contain"`: The size of the image increases or decreases to fill the box while preserving its aspect-ratio. Areas that are not filled due to the disparity in the aspect ratio are filled with black. For more information, see the `contain` option of `object-fit` in CSS.
* - `"fill"`: The image stretches to fit the box, regardless of its aspect-ratio. For more information, see the `fill` option of `object-fit` in CSS.
*
* > Notes:
* > - When playing the local camera video track, the SDK uses cover mode by default; when playing the local video track of screen sharing, the SDK uses contain mode by default.
* > - When playing the remote video track, the SDK uses cover mode by default.
*/
fit?: "cover" | "contain" | "fill";
}
export declare class VideoProcessorContext extends EventEmitter implements IProcessorContext {
private constraintsMap;
private statsRegistry;
private usageRegistry;
private readonly trackId;
private readonly direction;
private _chained;
set chained(chained: boolean);
get chained(): boolean;
constructor(trackId: string, direction: "local" | "remote");
getConstraints(): Promise<MediaTrackConstraints>;
requestApplyConstraints(constraints: MediaTrackConstraints, processor: IBaseProcessor): Promise<void>;
requestRevertConstraints(processor: IBaseProcessor): Promise<void>;
registerStats(processor: IBaseProcessor, type: string, cb: () => any): void;
unregisterStats(processor: IBaseProcessor, type: string): void;
gatherStats(): ProcessorStats[];
registerUsage(processor: IBaseProcessor, cb: () => Usage): void;
unregisterUsage(processor: IBaseProcessor): void;
gatherUsage(): Promise<UsageWithDirection[]>;
getDirection(): "local" | "remote";
}
export declare class VideoProcessorDestination extends EventEmitter implements IBaseProcessor {
name: string;
ID: string;
_source?: IBaseProcessor;
private readonly videoContext;
constructor(videoContext: VideoProcessorContext);
private inputTrack?;
get kind(): Kind;
get enabled(): boolean;
pipe(): IBaseProcessor;
unpipe(): void;
enable(): void;
disable(): void;
updateInput(inputOptions: {
track?: MediaStreamTrack;
node?: AudioNode;
context: IProcessorContext;
}): void;
reset(): void;
}
/**
* The state of the video stream.
*/
export declare enum VideoState {
/**
* 0: The initial state of the video.
*/
VideoStateStopped = 0,
/**
* 1: The local user has received the first video packet.
*/
VideoStateStarting = 1,
/**
* 2: The video stream is being decoded and played normally.
*/
VideoStateDecoding = 2,
/**
* 3: The video stream is frozen.
*/
VideoStateFrozen = 3
}
declare class VisibilityWatcher extends EventEmitter {
private _lastHiddenTime;
private _lastVisibleTime;
get visibility(): DocumentVisibilityState;
get lastHiddenTime(): DOMHighResTimeStamp;
get lastVisibleTime(): DOMHighResTimeStamp;
constructor();
}
export declare const visibilityWatcher: VisibilityWatcher;
declare class VolumeLevelAnalyser {
private readonly context;
private analyserNode;
private sourceNode?;
constructor();
updateSource(sourceNode?: AudioNode): void;
getVolumeLevel(): number;
getAnalyserNode(): AnalyserNode;
rebuildAnalyser(): void;
destroy(): void;
}
export { }