jiuyiUniapp/jiuyi2/node_modules/@agora-js/media/media.d.ts

5497 lines
219 KiB
TypeScript
Raw Normal View History

2024-12-18 15:46:27 +08:00
import { AgoraRTCError } from '@agora-js/shared';
import { CheckVisibleResult } from '@agora-js/shared';
import { ElectronDesktopCapturerSource } from '@agora-js/shared';
import { EventEmitter } from '@agora-js/shared';
import { IAudioProcessor } from 'agora-rte-extension';
import type { IAudioProcessorContext } from 'agora-rte-extension';
import { IBaseProcessor } from 'agora-rte-extension';
import type { IProcessorContext } from 'agora-rte-extension';
import { isElectron } from '@agora-js/shared';
import type { Kind } from 'agora-rte-extension';
import type { NetworkQuality } from '@agora-js/shared';
import type { ProcessorStats } from 'agora-rte-extension';
import { PromiseMutex } from '@agora-js/shared';
import { RequiredOnlyOneOf } from '@agora-js/shared';
import { SDKStore } from '@agora-js/shared';
import { UID } from '@agora-js/shared';
import { Usage } from 'agora-rte-extension';
import type { UsageWithDirection } from 'agora-rte-extension';
export declare const __TRACK_LIST__: Track[];
export declare function addTrack(track: Track): void;
export declare interface AgoraRTCCompatibility {
/** 是否支持标准的无插件屏幕共享 API */
getDisplayMedia: boolean;
/** 是否支持通过 chrome 插件采集屏幕 */
getStreamFromExtension: boolean;
/** 是否支持 unified-plan */
supportUnifiedPlan: boolean;
/** 是否支持配置最小码率 */
supportMinBitrate: boolean;
/** 是否支持修改 RTCRtpSender 的参数 */
supportSetRtpSenderParameters: boolean;
/** 浏览器是否支持开启大小流 */
supportDualStream: boolean;
/** WebAudio 是否支持输出 MediaStream */
webAudioMediaStreamDest: boolean;
/** RTCRtpSender 是否支持 replaceTrack */
supportReplaceTrack: boolean;
/** 是否支持 WebGL */
supportWebGL: boolean;
/** 是否支持通过 WebAudio 实现回声消除 */
webAudioWithAEC: boolean;
/** 是否支持 CanvasCaptureMediaStreamTrack.requestFrame */
supportRequestFrame: boolean;
/** 是否支持屏幕共享音频 */
supportShareAudio: boolean;
/** 是否支持使用RTCRtpEncodingParameters设置小流参数*/
supportDualStreamEncoding: boolean;
/** 是否支持Datachannel*/
supportDataChannel: boolean;
/** 是否支持PeerConnection.setConfiguration*/
supportPCSetConfiguration: boolean;
/** 是否支持WebRTC Encoded Transform*/
supportWebRTCEncodedTransform: boolean;
/** 是否支持WebRTC Insertable Stream */
supportWebRTCInsertableStream: boolean;
/** 是否支持requestVideoFrameCallback, 用于计算卡顿率 */
supportRequestVideoFrameCallback: boolean;
/** 是否支持WebCrypto */
supportWebCrypto: boolean;
}
/**
* @internal
*/
declare class AgoraRTCPlayer extends VideoPlayer {
private container?;
private slot;
constructor(config: PlayerConfig);
updateConfig(config: PlayerConfig): void;
updateVideoTrack(track?: MediaStreamTrack): void;
play(sessionId?: string): void;
getCurrentFrame(): ImageData;
getCurrentFrameToUint8Array(type: string, quality?: number): Promise<ImageTypedData>;
destroy(): void;
private createElements;
private mountedVideoElement;
private unmountedVideoElement;
protected resetVideoElement(): void;
getContainerElement(): HTMLDivElement | undefined;
}
/**
* [AgoraRTCClient.getRTCStats]{@link IAgoraRTCClient.getRTCStats}
* @public
*/
/** @en
* Statistics of the call, which can be retrieved by calling [AgoraRTCClient.getRTCStats]{@link IAgoraRTCClient.getRTCStats}.
*/
export declare interface AgoraRTCStats {
/**
*
*/
/** @en
* Call duration in seconds.
*/
Duration: number;
/**
* bps
*/
/** @en
* The total bitrate (bps) of the received audio and video, represented by an instantaneous value.
*/
RecvBitrate: number;
/**
*
*/
/** @en
* The total number of bytes received, represented by an aggregate value.
*/
RecvBytes: number;
/**
* bps
*/
/** @en
* The total bitrate (bps) of the sent audio and video, represented by an instantaneous value.
*/
SendBitrate: number;
/**
*
*/
/** @en
* The total number of bytes sent, represented by an aggregate value.
*/
SendBytes: number;
/**
*
*
* + 1
*/
/** @en
* The number of users in the channel.
*
* - Communication profile: The number of users in the channel.
* - Live Broadcast profile:
* - If the local user is an audience: The number of users in the channel = The number of hosts in the channel + 1.
* - If the local user is a host: The number of users in the channel = The number of hosts in the channel.
*/
UserCount: number;
/**
* SDK RTT (Round-Trip Time) ms
*/
/** @en
* RTT (Round-Trip Time) between the SDK and Agora's edge server, in ms.
*/
RTT: number;
/**
* Kbps
*/
/** @en
* The estimated bandwidth (Kbps) of the uplink network.
*/
OutgoingAvailableBandwidth: number;
}
export declare enum AUDIO_CONTEXT_EVENT {
IOS_15_16_INTERRUPTION_START = "ios15_16-interruption-start",
IOS_15_16_INTERRUPTION_END = "ios15_16-interruption-end",
IOS_INTERRUPTION_START = "ios-interruption-start",
IOS_INTERRUPTION_END = "ios-interruption-end",
STATE_CHANGE = "state-change"
}
/** @en
* @ignore
*/
export declare const AUDIO_ENCODER_CONFIG_SETTINGS: {
speech_low_quality: AudioEncoderConfiguration;
speech_standard: AudioEncoderConfiguration;
music_standard: AudioEncoderConfiguration;
standard_stereo: AudioEncoderConfiguration;
high_quality: AudioEncoderConfiguration;
high_quality_stereo: AudioEncoderConfiguration;
};
export declare enum AUDIO_TRACK_EVENT {
UPDATE_TRACK_SOURCE = "update-track-source"
}
declare class AudioBufferSource extends AudioSource {
private audioBuffer;
protected sourceNode?: AudioBufferSourceNode;
/**
* context
*/
private startPlayTime;
/**
* buffer
*/
private startPlayOffset;
/**
* buffer
*/
private pausePlayTime;
private options;
private currentLoopCount;
private currentPlaybackSpeed;
set currentState(state: AudioSourceState);
get currentState(): AudioSourceState;
private _currentState;
constructor(buffer: AudioBuffer, options?: AudioSourceOptions);
createWebAudioDiagram(): GainNode;
get duration(): number;
get playbackSpeed(): number;
/**
*
* ( - ) * + offset%
* currentTime
* 0
*/
get currentTime(): number;
updateOptions(options: AudioSourceOptions): void;
startProcessAudioBuffer(): void;
/**
* AudioSourceNode pause
* AudioSourceNode stop start
* AudioSourceNode
*/
pauseProcessAudioBuffer(): void;
/**
* pause AudioSourceNode
* @time
*/
seekAudioBuffer(time: number): void;
/**
*
*/
resumeProcessAudioBuffer(): void;
stopProcessAudioBuffer(): void;
destroy(): void;
/**
* native setAudioMixingPlaybackSpeed对齐[50-400]
*/
setAudioBufferPlaybackSpeed(speed: number): void;
private startSourceNode;
private createSourceNode;
private handleSourceNodeEnded;
private reset;
}
export declare const audioContextState: AudioState;
/**
* SDK 使 <audio> 使 WebAudio
*
* Chrome 使 WebAudio
*
* 使 <audio>
* https://bugs.chromium.org/p/chromium/issues/detail?id=687574
*/
declare class AudioElementPlayCenter {
onAutoplayFailed?: () => void;
private elementMap;
private elementStateMap;
private elementsNeedToResume;
private sinkIdMap;
constructor();
setSinkID(trackId: string, deviceID: string): Promise<void>;
play(track: MediaStreamTrack, trackId: string, volume: number, sessionId?: string): void;
updateTrack(trackId: string, track: MediaStreamTrack): void;
isPlaying(trackId: string): boolean;
setVolume(trackId: string, volume: number): void;
stop(trackId: string): void;
private bindAudioElementEvents;
getPlayerState(trackId: string): string;
/**
*
*/
private autoResumeAudioElement;
autoResumeAfterInterruption: (force?: boolean) => void;
private autoResumeAfterInterruptionOnIOS15_16;
}
export declare const audioElementPlayCenter: AudioElementPlayCenter;
/**
*
*
*
*
* [AgoraRTC.createCustomAudioTrack]{@link IAgoraRTC.createCustomAudioTrack}
* [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack}
* [AgoraRTC.createBufferSourceAudioTrack]{@link IAgoraRTC.createBufferSourceAudioTrack}
*/
/** @en
*
* `AudioEncoderConfiguration` is the interface that defines the audio encoder configurations.
*
* You can customize the audio encoder configurations when calling [AgoraRTC.createCustomAudioTrack]{@link IAgoraRTC.createCustomAudioTrack}, [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack} or [AgoraRTC.createBufferSourceAudioTrack]{@link IAgoraRTC.createBufferSourceAudioTrack}.
*/
export declare interface AudioEncoderConfiguration {
/**
* Hz
*/
/** @en
* Sample rate of the audio (Hz).
*/
sampleRate?: number;
/**
*
*/
/** @en
* Sample size of the audio.
*/
sampleSize?: number;
/**
*
*/
/** @en
* Whether to enable stereo.
*/
stereo?: boolean;
/**
* Kbps
*/
/** @en
* Bitrate of the audio (Kbps).
*/
bitrate?: number;
}
/**
* SDK [AudioEncoderConfiguration]{@link AudioEncoderConfiguration}
*
*
* - [AgoraRTC.createCustomAudioTrack]{@link IAgoraRTC.createCustomAudioTrack}
* - [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack}
* - [AgoraRTC.createBufferSourceAudioTrack]{@link IAgoraRTC.createBufferSourceAudioTrack}
*
* SDK SDK 使 `"music_standard"`
*
* | | |
* | -------- | --------------- |
* |`"speech_low_quality"`|16 kHz 24 Kbps|
* |`"speech_standard"`|32 kHz 24 Kbps|
* |`"music_standard"`|48 kHz 32 Kbps|
* |`"standard_stereo"`|48 kHz 64 Kbps|
* |`"high_quality"`|48 kHz 128 Kbps|
* |`"high_quality_stereo"`|48 kHz 192 Kbps|
* @public
*/
/** @en
* The preset audio encoder configurations.
*
* You can pass the preset video encoder configurations when calling the following methods:
* - [AgoraRTC.createCustomAudioTrack]{@link IAgoraRTC.createCustomAudioTrack}
* - [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack}
* - [AgoraRTC.createBufferSourceAudioTrack]{@link IAgoraRTC.createBufferSourceAudioTrack}
*
* The following table lists all the preset audio profiles. The SDK uses `"music_standard"` by default.
*
* | Audio Profile | Configurations |
* | -------- | --------------- |
* |`"speech_low_quality"`|Sample rate 16 kHz, mono, encoding bitrate 24 Kbps|
* |`"speech_standard"`|Sample rate 32 kHz, mono, encoding bitrate 24 Kbps|
* |`"music_standard"`|Sample rate 48 kHz, mono, encoding bitrate 32 Kbps|
* |`"standard_stereo"`|Sample rate 48 kHz, stereo, encoding bitrate 64 Kbps|
* |`"high_quality"`|Sample rate 48 kHz, mono, encoding bitrate 128 Kbps|
* |`"high_quality_stereo"`|Sample rate 48 kHz, stereo, encoding bitrate 192 Kbps| Kbps.
* @public
*/
export declare type AudioEncoderConfigurationPreset = keyof typeof AUDIO_ENCODER_CONFIG_SETTINGS;
export declare interface AudioPlaybackOptions {
/**
* `true`
*/
origin?: boolean;
/**
* `true`
*/
mixing?: boolean;
/**
* `true`
*/
effect?: boolean;
}
export declare class AudioProcessorContext extends EventEmitter implements IAudioProcessorContext {
private constraintsMap;
private statsRegistry;
private readonly audioContext;
private readonly trackId;
private readonly direction;
private usageRegistry;
private _chained;
set chained(chained: boolean);
get chained(): boolean;
constructor(audioContext: AudioContext, trackId: string, direction: "local" | "remote");
getConstraints(): Promise<MediaTrackConstraints>;
getAudioContext(): AudioContext;
requestApplyConstraints(constraints: MediaTrackConstraints, processor: IBaseProcessor): Promise<void>;
requestRevertConstraints(processor: IBaseProcessor): Promise<void>;
registerStats(processor: IBaseProcessor, type: string, cb: () => any): void;
unregisterStats(processor: IBaseProcessor, type: string): void;
gatherStats(): ProcessorStats[];
registerUsage(processor: IBaseProcessor, cb: () => Usage): void;
unregisterUsage(processor: IBaseProcessor): void;
gatherUsage(): Promise<UsageWithDirection[]>;
getDirection(): "local" | "remote";
}
export declare class AudioProcessorDestination extends EventEmitter implements IAudioProcessor {
name: string;
ID: string;
private inputTrack?;
private inputNode?;
private readonly audioProcessorContext;
_source?: IAudioProcessor;
constructor(audioProcessorContext: AudioProcessorContext);
get kind(): Kind;
get enabled(): boolean;
pipe(): IAudioProcessor;
unpipe(): void;
enable(): void;
disable(): void;
reset(): void;
updateInput(inputOptions: {
track?: MediaStreamTrack;
node?: AudioNode;
context: IAudioProcessorContext;
}): void;
}
declare abstract class AudioSource extends EventEmitter {
outputNode: GainNode;
outputTrack?: MediaStreamTrack;
isPlayed: boolean;
protected abstract sourceNode?: AudioNode;
context: AudioContext;
private audioBufferNode?;
private destNode?;
private audioOutputLevel;
protected volumeLevelAnalyser: VolumeLevelAnalyser;
private _processedNode;
get processSourceNode(): AudioNode | undefined;
set processedNode(node: AudioNode | undefined);
get processedNode(): AudioNode | undefined;
/**
*
*/
protected playNode: AudioNode;
protected isDestroyed: boolean;
protected onNoAudioInput?: () => void;
/**
*
* Safari
*
* 5 0
*/
protected isNoAudioInput: boolean;
private _noAudioInputCount;
constructor();
startGetAudioBuffer(bufferSize: number): void;
stopGetAudioBuffer(): void;
createOutputTrack(): MediaStreamTrack;
play(dest?: AudioNode): void;
stop(): void;
/**
*
* @returns [0, 1]
*/
getAccurateVolumeLevel(): number;
/**
* 1s track
* @param times 使
* @returns
*/
checkHasAudioInput(times?: number): Promise<boolean>;
/**
* GainNode
* [0. Infinity], 1
*/
getAudioVolume(): number;
setVolume(level: number): void;
destroy(): void;
protected disconnect(): void;
protected connect(): void;
}
export declare enum AudioSourceEvents {
AUDIO_SOURCE_STATE_CHANGE = "audio_source_state_change",
/** 第一次收到 MediaStreamTrack 的数据 */
RECEIVE_TRACK_BUFFER = "receive_track_buffer",
ON_AUDIO_BUFFER = "on_audio_buffer",
UPDATE_SOURCE = "update_source"
}
/**
* [startProcessAudioBuffer]{@link IBufferSourceAudioTrack.startProcessAudioBuffer}
*/
/** @en
* Options for processing the audio buffer. You need to set the options for processing the audio buffer when calling [startProcessAudioBuffer]{@link IBufferSourceAudioTrack.startProcessAudioBuffer}.
*/
export declare interface AudioSourceOptions {
/**
*
*/
/** @en
* How many times the audio loops.
*/
cycle?: number;
/**
*
*/
/** @en
* Whether to loop the audio infinitely.
*/
loop?: boolean;
/**
*
*/
/** @en
* The playback position (seconds).
*/
startPlayTime?: number;
}
/**
* [BufferSourceAudioTrack.on("source-state-change")]{@link IBufferSourceAudioTrack.event_source_state_change}
*
* - `"stopped"`:
* - `"playing"`:
* - `"paused"`:
* @public
*/
/** @en
* Processing state of the audio buffer:
* - `"stopped"`: The SDK stops processing the audio buffer. Reasons may include:
* - The SDK finishes processing the audio buffer.
* - The user manually stops the processing of the audio buffer.
* - `"playing"`: The SDK is processing the audio buffer.
* - `"paused"`: The SDK pauses processing the audio buffer.
*
* You can get the state with [BufferSourceAudioTrack.on("source-state-change")]{@link IBufferSourceAudioTrack.event_source_state_change}.
*/
export declare type AudioSourceState = "stopped" | "playing" | "paused";
declare class AudioState extends EventEmitter {
prevState: AudioContextState | "interrupted" | undefined;
curState: AudioContextState | "interrupted" | undefined;
currentTime?: number;
currentTimeStuckAt?: number;
private interruptDetectorTrack?;
get duringInterruption(): boolean;
private onLocalAudioTrackMute;
private onLocalAudioTrackUnmute;
/**
* only be called on iOS 15, 16
* */
bindInterruptDetectorTrack(track: MicrophoneAudioTrack): void;
/**
* only be called on iOS 15, 16
* */
unbindInterruptDetectorTrack(track: MicrophoneAudioTrack): void;
}
export declare function audioTimerLoop(callback: (time: number) => any, frequency: number): () => void;
declare class AudioTrackSource extends AudioSource {
protected sourceNode: MediaStreamAudioSourceNode;
track: MediaStreamTrack;
clonedTrack?: MediaStreamTrack;
private audioElement;
private isCurrentTrackCloned;
private isRemoteTrack;
private originVolumeLevelAnalyser?;
get isFreeze(): boolean;
constructor(track: MediaStreamTrack, isRemoteTrack?: boolean, originTrack?: MediaStreamTrack);
/**
* ios 12 context suspend MediaStreamSource
* context running MediaStreamSource 使
* WebAudio
*/
private rebuildWebAudio;
updateTrack(track: MediaStreamTrack): void;
destroy(): void;
createMediaStreamSourceNode(track: MediaStreamTrack): MediaStreamAudioSourceNode;
updateOriginTrack(originTrack: MediaStreamTrack): void;
getOriginVolumeLevel(): number;
}
declare class AutoPlayGestureEventEmitter extends EventEmitter {
onAutoplayFailed?: () => void;
onAudioAutoplayFailed?: () => void;
}
export declare const autoPlayGestureEventEmitter: AutoPlayGestureEventEmitter;
/**
* @ignore
*
* [setBeautyEffect]{@link ILocalVideoTrack.setBeautyEffect}
* @public
*/
/** @en
* @ignore
*
* Image enhancement options. You need to set the image enhancement options when calling [setBeautyEffect]{@link ILocalVideoTrack.setBeautyEffect}.
*/
export declare interface BeautyEffectOptions {
/**
*
*
* [0.0, 1.0] 0.0 0.5
*/
/** @en
*
* The smoothness level.
*
* The value range is [0.0, 1.0]. The original smoothness level is 0.0. The default value is 0.5. This parameter is usually used to remove blemishes.
*/
smoothnessLevel?: number;
/**
*
*
* [0.0, 1.0] 0.0 0.7
*/
/** @en
* The brightness level.
*
* The value range is [0.0, 1.0]. The original brightness level is 0.0. The default value is 0.7.
*/
lighteningLevel?: number;
/**
*
*
* [0.0, 1.0] 0.0 0.1
*/
/** @en
* The redness level.
*
* The value range is [0.0, 1.0]. The original redness level is 0.0. The default value is 0.1. This parameter adjusts the red saturation level.
*/
rednessLevel?: number;
/**
* {@link lighteningLevel} 使
* - 0: 明暗对比弱
* - 1: ()
* - 2: 明暗对比强
*/
/** @en
* The contrast level. Use this together with {@link lighteningLevel}.
* - 0: Low contrast level.
* - 1: (Default) The original contrast level.
* - 2: High contrast level.
*/
lighteningContrastLevel?: 0 | 1 | 2;
}
export declare interface BeautyWebGLParameters {
denoiseLevel?: number;
lightLevel?: number;
rednessLevel?: number;
lighteningContrastLevel?: number;
}
export declare const blob2Uint8Array: (blob: Blob) => Promise<Uint8Array>;
export declare class BufferSourceAudioTrack extends LocalAudioTrack implements IBufferSourceAudioTrack {
source: string | File | AudioBuffer | null;
private _bufferSource;
get __className__(): string;
constructor(source: string | File | AudioBuffer, bufferSource: AudioBufferSource, encodingConfig?: AudioEncoderConfiguration, trackId?: string);
get currentState(): AudioSourceState;
get duration(): number;
get playbackSpeed(): number;
getCurrentTime(): number;
startProcessAudioBuffer(options?: AudioSourceOptions): void;
pauseProcessAudioBuffer(): void;
seekAudioBuffer(time: number): void;
resumeProcessAudioBuffer(): void;
stopProcessAudioBuffer(): void;
close(): void;
setAudioBufferPlaybackSpeed(speed: number): void;
}
/**
* /线/`AudioBuffer` [AgoraRTC.createBufferSourceAudioTrack]{@link IAgoraRTC.createBufferSourceAudioTrack}
*/
/** @en
* Configurations for the audio track from an audio file or `AudioBuffer` object. Set these configurations when calling [AgoraRTC.createBufferSourceAudioTrack]{@link IAgoraRTC.createBufferSourceAudioTrack}.
*/
export declare interface BufferSourceAudioTrackInitConfig {
/**
* 3
* - `File`: [File](https://developer.mozilla.org/en-US/docs/Web/API/File) 对象,表示一个本地文件。
* - `string`: 线 HTTPS 线(线 HTTPS CORS)
* - `AudioBuffer`: [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer) 对象表示 PCM 原始数据。
*/
/** @en
* The type of the audio source:
* - `File`: An [File](https://developer.mozilla.org/en-US/docs/Web/API/File) object, representing a local audio file.
* - `string`: The online audio file retrieved from an HTTPS address. Ensure the address supports HTTPS and CORS.
* - `AudioBuffer`: An [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer) object, representing the raw data in PCM format.
*/
source: File | string | AudioBuffer;
/**
* 线:
* - `true`: 线
* - `false`: 线
*/
/** @en
* Whether to cache the online file:
* - `true`: Cache the online file.
* - `false`: (default) Do not cache the online file.
*/
cacheOnlineFile?: boolean;
/**
*
*
* [[AudioEncoderConfigurationPreset]] SDK [[AudioEncoderConfiguration]]
*
* > Firefox
*/
/** @en
* The audio encoder configurations.
*
* You can set the audio encoder configurations in either of the following ways:
* - Pass the preset audio encoder configurations by using [[AudioEncoderConfigurationPreset]].
* - Pass your customized audio encoder configurations by using [[AudioEncoderConfiguration]].
*
* > Firefox does not support setting the audio encoding rate.
*/
encoderConfig?: AudioEncoderConfiguration | AudioEncoderConfigurationPreset;
}
export declare class CameraVideoTrack extends LocalVideoTrack implements ICameraVideoTrack {
private _config;
private _originalConstraints;
private _constraints;
_enabled: boolean;
_deviceName: string;
get __className__(): string;
constructor(track: MediaStreamTrack, config: CameraVideoTrackInitConfig, constraints: MediaTrackConstraints, scalabilityConfig?: SVCConfiguration, optimizationMode?: OptimizationMode | "balanced", trackId?: string);
setDevice(deviceId: string | RequiredOnlyOneOf<{
facingMode: VideoFacingModeEnum;
deviceId: string;
}>): Promise<void>;
private _setDeviceById;
/**
* facingMode
* @param facingMode https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackSettings/facingMode
*/
private _setDeviceByFacingModel;
setEnabled(enabled: boolean, skipChangeState?: boolean): Promise<void>;
setEncoderConfiguration(config: VideoEncoderConfiguration | VideoEncoderConfigurationPreset, doNotRenegoation?: boolean): Promise<void>;
protected _getDefaultPlayerConfig(): Partial<PlayerConfig>;
protected onTrackEnded(): void;
renewMediaStreamTrack(newConstraints?: MediaTrackConstraints): Promise<void>;
tryResumeVideoForIOS15_16WeChat: () => Promise<void>;
close(): void;
clone(config?: VideoEncoderConfiguration | VideoEncoderConfigurationPreset, cloneTrack?: boolean): CameraVideoTrack;
bindProcessorContextEvents(): void;
}
/**
* [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}
*/
/** @en
* Configurations for the video track from the video captured by a camera. Set these configurations when calling [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}.
*/
export declare interface CameraVideoTrackInitConfig {
/**
*
*
*
* - [[VideoEncoderConfigurationPreset]] SDK
* - [[VideoEncoderConfiguration]]
* - 使 SDK `"480p_1"` 640 × 480 15 fps 500 Kbps
*/
/** @en
* The video encoder configurations.
*
* You can set the video encoder configurations in either of the following ways:
* - Pass the preset video encoder configurations by using [[VideoEncoderConfigurationPreset]].
* - Pass your customized video encoder configurations by using [[VideoEncoderConfiguration]].
* - Leave this property empty to use the SDK's default value, `"480p_1"` (resolution: 640 × 480, frame rate: 15 fps, bitrate: 500 Kbps).
*/
encoderConfig?: VideoEncoderConfiguration | VideoEncoderConfigurationPreset;
/**
* 使/
*
* 使
* - `"user"`:
* - `"environment"`:
*/
/** @en
* Whether to user the front camera or the rear camera.
*
* You can use this parameter to choose between the front camera and the rear camera on a mobile device:
* - `"user"`: The front camera.
* - `"environment"`: The rear camera.
*/
facingMode?: VideoFacingModeEnum;
/**
* ID
*
* [AgoraRTC.getCameras]{@link IAgoraRTC.getCameras}
*/
/** @en
* Specifies the camera ID.
*
* You can get a list of the available cameras by calling [AgoraRTC.getCameras]{@link IAgoraRTC.getCameras}.
*/
cameraId?: string;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.2.0*
*
*
*
* 稿 `"detail"` `"motion"`
*
* > Chrome
*
* @param mode
* - `"detail"`:
* - SDK 使
* - SDK
* - `"motion"`: 4.21.0 SDK
* - SDK
* - SDK
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.2.0*
*
* Sets the video transmission optimization mode.
*
* You can call this method during a video call, a live streaming or screen sharing to dynamically change the optimization mode. For example, during the screen sharing, before you change the shared content from text to video, you can change the optimization mode from `"detail"` to `"motion"` to ensure smoothness in poor network conditions.
*
* > Note: This method supports Chrome only.
*
* @param mode The video transmission optimization mode:
* - `"detail"`: Prioritizes video quality.
* - The SDK ensures high-quality images by automatically calculating a minimum bitrate based on the capturing resolution and frame rate. No matter how poor the network condition is, the sending bitrate will never be lower than the minimum value.
* - In most cases, the SDK does not reduce the sending resolution, but may reduce the frame rate.
* - `"motion"`: Since 4.21.0, the SDK prioritizes video smoothness.
* - In poor network conditions, the SDK reduces the sending bitrate to minimize video freezes.
* - In most cases, the SDK does not reduce the frame rate, but may reduce the sending resolution.
*/
optimizationMode?: OptimizationMode;
/**
* @ignore
* @自从
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* SVC
*
* {@link SVCConfigurationPreset} SDK SVC {@link SVCConfiguration} SVC
*/
/** @en
* @ignore
*
* @since
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* Configurations for Scalable Video Coding (SVC).
*
* You can set the configurations using one of the following options:
* - Use the preset SVC configurations provided by the SDK through {@link SVCConfigurationPreset}.
* - Use your custom SVC configurations through {@link SVCConfiguration}.
*/
scalabiltyMode?: SVCConfiguration | SVCConfigurationPreset;
}
export declare function checkMediaStreamTrackResolution(track: MediaStreamTrack): Promise<[number, number]>;
export declare function checkTrackState(): (target: any, propertyKey: any, descriptor: PropertyDescriptor) => TypedPropertyDescriptor<any> | undefined;
/**
* `<video>`
*
*
* - [localVideoTrack.on("video-element-visible-status")]{@link ILocalVideoTrack.event_video_element_visible_status}
* - [localVideoTrack.getVideoElementVisibleStatus]{@link ILocalVideoTrack.getVideoElementVisibleStatus}
* - [remoteVideoTrack.on("video-element-visible-status")]{@link IRemoteVideoTrack.event_video_element_visible_status_2}
* - [remoteVideoTrack.getVideoElementVisibleStatus]{@link IRemoteVideoTrack.getVideoElementVisibleStatus}
*
*
* - `visible`: Boolean `<video>`
* - `reason`: `visible` `false`
* - `"SIZE"`: `<video>` `<video>`
* - `"STYLE"`: `<video>` CSS `<video>`
* - `"POSITION"`: `<video>` `<video>`
* - `"COVERED"`: `<video>`
*/
/** @en
* The visibility of the `<video>` tag.
*
* Get the visibility of the `<video>` tag through the following methods and events:
* - [localVideoTrack.on("video-element-visible-status")]{@link ILocalVideoTrack.event_video_element_visible_status}
* - [localVideoTrack.getVideoElementVisibleStatus]{@link ILocalVideoTrack.getVideoElementVisibleStatus}
* - [remoteVideoTrack.on("video-element-visible-status")]{@link IRemoteVideoTrack.event_video_element_visible_status_2}
* - [remoteVideoTrack.getVideoElementVisibleStatus]{@link IRemoteVideoTrack.getVideoElementVisibleStatus}
*
* This object contains the following parameters:
* - `visible`: Boolean, whether the `<video>` tag is visible or not.
* - `reason`: This parameter is only valid when `visible` is `false`, which indicates the reason:
* - `"SIZE"`: The size of `<video>` is too small to see.
* - `"STYLE"`: The CSS styles of `<video>` or its ancestor element cause `<video>` to be invisible.
* - `"POSITION"`: The `<video>` tag or its ancestor element are positioned outside the viewport.
* - `"COVERED"`: The `<video>` tag is covered by other elements.
*/
export declare type CheckVideoVisibleResult = CheckVisibleResult;
/**
* [[VideoEncoderConfiguration]] /
*/
/** @en
* Specifies a constraint for a property, such as the resolution or bitrate for video capture in [[VideoEncoderConfiguration]].
*/
export declare interface ConstrainLong {
/**
*
*/
/** @en
* The lower limit of the property.
*/
min?: number;
/**
*
*/
/** @en
* The upper limit of the property.
*/
max?: number;
/**
*
*/
/** @en
* An ideal value of a property. If the video capture device cannot output this value, it outputs the closest value instead.
*/
ideal?: number;
/**
*
*/
/** @en
* A required value of a property. If the video capture device cannot output this value, the video capture fails.
*/
exact?: number;
}
export declare function createBufferSourceAudioTrack(config: BufferSourceAudioTrackInitConfig): Promise<BufferSourceAudioTrack>;
export declare function createCameraVideoTrack(config?: CameraVideoTrackInitConfig): Promise<CameraVideoTrack>;
export declare function createCustomAudioTrack(config: CustomAudioTrackInitConfig): LocalAudioTrack;
export declare function createCustomVideoTrack(config: CustomVideoTrackInitConfig): LocalVideoTrack;
export declare function createMicrophoneAndCameraTracks(audioConfig?: MicrophoneAudioTrackInitConfig, videoConfig?: CameraVideoTrackInitConfig): Promise<[MicrophoneAudioTrack, CameraVideoTrack]>;
export declare function createMicrophoneAudioTrack(config?: MicrophoneAudioTrackInitConfig): Promise<MicrophoneAudioTrack>;
export declare function createMixingAudioTrack(trackList: LocalAudioTrack[]): MixingAudioTrack;
export declare function createScreenVideoTrack(config: ScreenVideoTrackInitConfig, withAudio: "enable"): Promise<[LocalVideoTrack, LocalAudioTrack]>;
export declare function createScreenVideoTrack(config?: ScreenVideoTrackInitConfig, withAudio?: "disable"): Promise<LocalVideoTrack>;
export declare function createScreenVideoTrack(config: ScreenVideoTrackInitConfig, withAudio: "auto"): Promise<[LocalVideoTrack, LocalAudioTrack] | LocalVideoTrack>;
/**
* [AgoraRTC.createCustomAudioTrack]{@link IAgoraRTC.createCustomAudioTrack}
*/
/** @en
* Configurations for the custom audio track. Set these configurations when calling [AgoraRTC.createCustomAudioTrack]{@link IAgoraRTC.createCustomAudioTrack}.
*/
export declare interface CustomAudioTrackInitConfig {
/**
* [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) 对象。
*/
/** @en
* Your [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
*/
mediaStreamTrack: MediaStreamTrack;
/**
*
*
* [[AudioEncoderConfigurationPreset]] SDK [[AudioEncoderConfiguration]]
*
* > Firefox
*/
/** @en
* The audio encoder configurations.
*
* You can set the audio encoder configurations in either of the following ways:
* - Pass the preset audio encoder configurations by using [[AudioEncoderConfigurationPreset]].
* - Pass your customized audio encoder configurations by using [[AudioEncoderConfiguration]].
*
* > Firefox does not support setting the audio encoding rate.
*/
encoderConfig?: AudioEncoderConfiguration | AudioEncoderConfigurationPreset;
}
/**
* [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack}
*/
/** @en
* Configurations for the custom video track. Set these configurations when calling [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack}.
*/
export declare interface CustomVideoTrackInitConfig {
/**
* [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) 对象
*/
/** @en
* Your [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
*/
mediaStreamTrack: MediaStreamTrack;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
*
*
* `number` `{ max: 1280, min: 720 }`
*
* [ConstrainLong]{@link ConstrainLong}
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* Width of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
width?: number | ConstrainLong;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
*
*
* `number` `{ max: 1280, min: 720 }`
*
* [ConstrainLong]{@link ConstrainLong}
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* Height of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
height?: number | ConstrainLong;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* fps
*
* `number` `{ max: 30, min: 5 }`
*
* [ConstrainLong]{@link ConstrainLong}
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* Frame rate of the video (fps).
*
* You can pass a `number`, or a constraint such as `{ max: 30, min: 5 }`.
*
* For details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
frameRate?: number | ConstrainLong;
/**
* Kbps
*/
/** @en
* The minimum bitrate of sending the video track (Kbps).
*/
bitrateMin?: number;
/**
* Kbps
*/
/** @en
* The maximum bitrate of sending the video track (Kbps).
*/
bitrateMax?: number;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.2.0*
*
*
*
* 稿 `"detail"` `"motion"`
*
* > Chrome
*
* @param mode
* - `"balanced"`: 使
* - SDK
* - SDK
* - `"detail"`:
* - SDK 使
* - SDK
* - `"motion"`: 4.21.0SDK
* - SDK
* - SDK
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.2.0*
*
* Sets the video transmission optimization mode.
*
* You can call this method during a video call, a live streaming or screen sharing to dynamically change the optimization mode. For example, during the screen sharing, before you change the shared content from text to video, you can change the optimization mode from `"detail"` to `"motion"` to ensure smoothness in poor network conditions.
*
* > Note: This method supports Chrome only.
*
* @param mode The video transmission optimization mode:
* - `"balanced"`: Uses the default optimization mode.
* - For a screen-sharing video track, the default transmission optimization strategy is to prioritizes clarity.
* - For the other types of video tracks, the SDK may reduce the frame rate or the sending resolution in poor network conditions.
* - `"detail"`: Prioritizes video quality.
* - The SDK ensures high-quality images by automatically calculating a minimum bitrate based on the capturing resolution and frame rate. No matter how poor the network condition is, the sending bitrate will never be lower than the minimum value.
* - In most cases, the SDK does not reduce the sending resolution, but may reduce the frame rate.
* - `"motion"`: Since 4.21.0, the SDK prioritizes video smoothness.
* - In poor network conditions, the SDK reduces the sending bitrate to minimize video freezes.
* - In most cases, the SDK does not reduce the frame rate, but may reduce the sending resolution.
*/
optimizationMode?: "motion" | "detail";
/**
* @ignore
* @自从
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* SVC
*
* {@link SVCConfigurationPreset} SDK SVC {@link SVCConfiguration} SVC
*/
/** @en
* @ignore
*
* @since
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* Configurations for Scalable Video Coding (SVC).
*
* You can set `scalabiltyMode` using one of the following options:
* - Use the preset SVC configurations provided by the SDK through {@link SVCConfigurationPreset}.
* - Use your custom SVC configurations through {@link SVCConfiguration}.
*/
scalabiltyMode?: SVCConfiguration | SVCConfigurationPreset;
}
export declare abstract class DataChannel extends EventEmitter implements IDataChannel {
readonly trackMediaType: TrackMediaType.DATA;
private _version;
private _type;
_config: IDataChannelConfig;
/** 原始的 DataChannel 对象 */
_originDataChannel?: RTCDataChannel;
protected _dataStreamPacketHeader: ArrayBuffer;
protected _dataStreamPacketHandler: IDataStream;
private _datachannelEventMap;
constructor(config: IDataChannelConfig, datachannel?: RTCDataChannel);
useDataStream(dataStream: IDataStream): void;
get id(): number;
get ordered(): boolean;
get maxRetransmits(): number;
get metadata(): string;
get readyState(): RTCDataChannelState;
get _originDataChannelId(): number | null;
getChannelId(): number;
getConfig(): IDataChannelConfig;
_close(): void;
_waitTillOpen(): Promise<void>;
_updateOriginDataChannel(datachannel: RTCDataChannel): void;
private _initPacketHeader;
private _bandDataChannelEvents;
private _unbindDataChannelEvents;
}
export declare enum DataChannelEvents {
OPEN = "open",
MESSAGE = "message",
CLOSE = "close",
CLOSING = "closing",
ERROR = "error"
}
export declare function decodeAudioData(buffer: ArrayBuffer): Promise<AudioBuffer>;
export declare const DEFAULT_LOCAL_AUDIO_TRACK_STATS: LocalAudioTrackStats;
export declare const DEFAULT_LOCAL_VIDEO_TRACK_STATS: LocalVideoTrackStats;
export declare const DEFAULT_NETWORK_QUALITY_STATS: NetworkQuality;
export declare const DEFAULT_REMOTE_AUDIO_TRACK_STATS: RemoteAudioTrackStats;
export declare const DEFAULT_REMOTE_VIDEO_TRACK_STATS: RemoteVideoTrackStats;
export declare interface DenoiserStats {
ns: number;
ebn: number;
ean: number;
vl: number;
}
/**
* Audio标签 setSinkId
* Firefox 116
* PC Chrome / Edge
*/
export declare function detectSupportAudioElementSetSinkId(): boolean;
/**
*
*
* - [onMicrophoneChanged]{@link onMicrophoneChanged}
* - [onCameraChanged]{@link onCameraChanged}
* - [onPlaybackDeviceChanged]{@link onPlaybackDeviceChanged}
* @public
*/
/** @en
* Information of the media input device.
*
* - You can get the audio sampling device information through [onMicrophoneChanged]{@link onMicrophoneChanged}.
* - You can get the video capture device information through [onCameraChanged]{@link onCameraChanged}.
* - You can get the audio playback device information through [onPlaybackDeviceChanged]{@link onPlaybackDeviceChanged}.
*/
export declare interface DeviceInfo {
/**
* UNIX ms
*/
/** @en
* The latest time when the state of the media input device was updated.
*
* A Unix timestamp in milliseconds.
*/
updateAt: number;
/**
* SDK UNIX ms
*/
/** @en
* The time when the SDK first detects the media input device.
*
* A Unix timestamp in milliseconds.
*/
initAt: number;
/**
*
*/
/** @en
* The state of the capture device.
*/
state: DeviceState;
/**
* [MediaDeviceInfo](https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo)。
*/
/** @en
* Device information of the media input device. See [MediaDeviceInfo](https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo) for details.
*/
device: MediaDeviceInfo;
}
export declare class DeviceManager extends EventEmitter {
private _state;
get state(): DeviceManagerState;
set state(state: DeviceManagerState);
private isAccessMicrophonePermission;
private isAccessCameraPermission;
private lastAccessMicrophonePermission;
private lastAccessCameraPermission;
private checkdeviceMatched;
/**
*
*/
private deviceInfoMap;
constructor();
enumerateDevices(audio: boolean, video: boolean, skipPermissionCheck?: boolean): Promise<MediaDeviceInfo[]>;
getRecordingDevices(skipPermissionCheck?: boolean): Promise<MediaDeviceInfo[]>;
getCamerasDevices(skipPermissionCheck?: boolean): Promise<MediaDeviceInfo[]>;
getSpeakers(skipPermissionCheck?: boolean): Promise<MediaDeviceInfo[]>;
searchDeviceIdByName(deviceName: string): string | null;
getDeviceById(deviceId: string): Promise<MediaDeviceInfo>;
private init;
private updateDevicesInfo;
/** 检查媒体设备信息是否可用,判断标准为 deviceId 和 label 不能为空 */
private checkMediaDeviceInfoIsOk;
}
export declare const deviceManager: DeviceManager;
export declare enum DeviceManagerEvent {
STATE_CHANGE = "state_change",
RECORDING_DEVICE_CHANGED = "recordingDeviceChanged",
PLAYOUT_DEVICE_CHANGED = "playoutDeviceChanged",
CAMERA_DEVICE_CHANGED = "cameraDeviceChanged"
}
export declare enum DeviceManagerState {
IDLE = "IDLE",
INITING = "INITING",
INITEND = "INITEND"
}
/**
*
* - `"ACTIVE"`:
* - `"INACTIVE"`:
* @public
*/
/** @en
* The state of the media input device.
* - `"ACTIVE"`: The device is plugged in.
* - `"INACTIVE"`: The device is unplugged.
*/
export declare type DeviceState = "ACTIVE" | "INACTIVE";
export declare const emptyImage2TypedArray: (type: string) => Promise<ImageTypedData>;
/**
* @ignore
*
* [setBeautyEffect]{@link ILocalVideoTrack.setBeautyEffect}
*
*
*
* ```javascript
* localVideoTrack.on("beauty-effect-overload", () => {
* console.log("beauty effect overload, disable beauty effect");
* localVideoTrack.setBeautyEffect(false);
* });
* ```
* @event
* @asMemberOf ILocalVideoTrack
*/
/** @en
* @ignore
*
* Occurs when the device is overloaded after you call [setBeautyEffect]{@link ILocalVideoTrack.setBeautyEffect} to enable image enhancement.
*
* You can listen for this event to notify users of the device overload and disable image enhancement.
*
* ```javascript
* localVideoTrack.on("beauty-effect-overload", () => {
* console.log("beauty effect overload, disable beauty effect");
* localVideoTrack.setBeautyEffect(false);
* });
* ```
* @event
* @asMemberOf ILocalVideoTrack
*/
declare function event_beauty_effect_overload(): void;
/**
*
*
* @event
* @asMemberOf IRemoteTrack
*/
/** @en
* Occurs when the first remote audio or video frame is decoded.
*
* @event
* @asMemberOf IRemoteTrack
*/
declare function event_first_frame_decoded(): void;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.20.1*
*
* H.264 SEI SEI
*
* @param sei Uint8Array SEI
* @asMemberOf ILocalVideoTrack
* @event
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.20.1*
*
* Parsing SEI data from the H.264 video stream triggers this event, which returns the SEI data.
*
* @param sei SEI data in Uint8Array
* @asMemberOf ILocalVideoTrack
* @event
*/
declare function event_sei_received(sei: Uint8Array): void;
/**
* [BufferSourceAudioTrack]{@link IBufferSourceAudioTrack}
*
* @param currentState
* - `"stopped"`:
* - `"paused"`:
* - `"playing"`:
*
* @event
* @asMemberOf IBufferSourceAudioTrack
*/
/** @en
* Occurs when the state of processing the audio buffer in [BufferSourceAudioTrack]{@link IBufferSourceAudioTrack} changes.
*
* @param currentState The state of processing the audio buffer:
* - `"stopped"`: The SDK stops processing the audio buffer. Reasons may include:
* - The SDK finishes processing the audio buffer.
* - The user manually stops the processing of the audio buffer.
* - `"paused"`: The SDK pauses the processing of the audio buffer.
* - `"playing"`: The SDK is processing the audio buffer.
*
* @event
* @asMemberOf IBufferSourceAudioTrack
*/
declare function event_source_state_change(currentState: AudioSourceState): void;
/**
*
* -
* -
* -
* - `MediaStreamTrack`
* -
* - 使
*
* @event
* @asMemberOf ILocalTrack
*/
/** @en
* Occurs when a audio or video track ends.
*
* Reasons may include:
* - Camera is unplugged.
* - Microphone is unplugged.
* - The local user stops screen sharing.
* - The local user closes the underlying `MediaStreamTrack`.
* - A local media device malfunctions.
* - The device permission is revoked.
*
* @event
* @asMemberOf ILocalTrack
*/
declare function event_track_ended(): void;
/**
*
* @param track [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack)。
* @event
*/
/** @en
* Triggers when a media track is updated.
* @param track The media stream track. See [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack).
* @event
*/
export declare function event_track_updated(track: MediaStreamTrack): void;
/**
* `RTCRtpTransceiver`
*
* @param transceiver `RTCRtpTransceiver`
* @param type {@link StreamType}
*
* @event
* @asMemberOf ILocalTrack
*/
/** @en
* Occurs when the `RTCRtpTransceiver` instance of the current track is updated.
*
* @param transceiver The new `RTCRtpTransceiver` instance.
* @param type The type of the video stream to which the current track belongs. See {@link StreamType}.
*
* @event
* @asMemberOf ILocalTrack
*/
declare function event_transceiver_updated(transceiver: RTCRtpTransceiver, type?: StreamType): void;
/**
* `RTCRtpTransceiver`
*
* @param transceiver `RTCRtpTransceiver`
*
* @event
* @asMemberOf IRemoteTrack
*/
/** @en
* Occurs when the `RTCRtpTransceiver` instance of the current track is updated.
*
* @param transceiver The new `RTCRtpTransceiver` instance.
* @event
* @asMemberOf IRemoteTrack
*/
declare function event_transceiver_updated_2(transceiver: RTCRtpTransceiver): void;
/**
* @ignore
*/
/** @en
* @ignore
*/
declare function event_user_datachannel_close(): void;
/**
* @ignore
*/
/** @en
* @ignore
*/
declare function event_user_datachannel_error(ev: Event): void;
/**
* @ignore
*/
/** @en
* @ignore
*/
declare function event_user_datachannel_message(data: ArrayBuffer): void;
/**
* @ignore
*/
/** @en
* @ignore
*/
declare function event_user_datachannel_open(): void;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.8.0*
*
* HTML `<video>` SDK 30
*
* `localVideoTrack.play` SDK [HTML `<video>`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video) 标签用于播放视频轨道。当 `localVideoTrack.isPlaying` 为 `true` 却看不到图像时,你可通过此事件来进行问题排查。
*
* @param data `<video>`
* @asMemberOf ILocalVideoTrack
* @event
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.8.0*
*
* Indicates the visibility of the `<video>` HTML tag.
*
* The SDK triggers this event every 30 seconds.
*
* After you call `localVideoTrack.play`, the SDK creates an [`<video>`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video) tag for playing video tracks. When `localVideoTrack.isPlaying` is `true` but you cannot see any video, this event helps you check whether the `<video>` tag is visible or not and learn the reason when the `<video>` tag is invisible.
*
* @param data The visibility of the `<video>` tag.
* @asMemberOf ILocalVideoTrack
* @event
*/
declare function event_video_element_visible_status(data?: CheckVideoVisibleResult): void;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.8.0*
*
* HTML `<video>` SDK 30
*
* `remoteVideoTrack.play` SDK [HTML `<video>`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video) 标签用于播放视频轨道。当 `remoteVideoTrack.isPlaying` 为 `true` 却看不到图像时,你可通过此事件来进行问题排查。
*
* @param data `<video>`
* @asMemberOf IRemoteVideoTrack
* @event
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.8.0*
*
* Indicates the visibility of the `<video>` HTML tag.
*
* The SDK triggers this event every 30 seconds.
*
* After you call `remoteVideoTrack.play`, the SDK creates an [`<video>`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video) tag for playing video tracks. When `remoteVideoTrack.isPlaying` is `true` but you cannot see any video, this event helps you check whether the `<video>` tag is visible or not and learn the reason when the `<video>` tag is invisible.
*
* @param data The visibility of the `<video>` tag.
* @asMemberOf IRemoteVideoTrack
* @event
*/
declare function event_video_element_visible_status_2(data?: CheckVideoVisibleResult): void;
/**
*
*
* @event
* @asMemberOf IRemoteTrack
*/
/** @en
* Occurs when the video state changes.
*
* @event
* @asMemberOf IRemoteTrack
*/
declare function event_video_state_changed(videoState: VideoState): void;
export declare interface ExtendedVideoFrameCallbackMetadata extends VideoFrameCallbackMetadata {
timestamp: DOMHighResTimeStamp;
}
export declare interface ExternalMethods {
getDenoiserStats?: () => DenoiserStats | undefined;
}
declare class FakeAudioNode {
disconnect(): void;
connect(): void;
}
declare class FakeTrackSource extends EventEmitter {
context: any;
processSourceNode: undefined;
outputTrack: undefined;
processedNode: undefined;
clonedTrack: undefined;
outputNode: FakeAudioNode;
get isPlayed(): boolean;
get isFreeze(): boolean;
constructor();
setVolume(): void;
createOutputTrack(): MediaStreamTrack;
getOriginVolumeLevel(): number;
getAccurateVolumeLevel(): number;
stopGetAudioBuffer(): void;
startGetAudioBuffer(): void;
play(): void;
stop(): void;
destroy(): void;
updateTrack(): void;
updateOriginTrack(): void;
createMediaStreamSourceNode(): undefined;
}
export declare const frameData2CryptoBuffer: (imageData: ImageTypedData, appid: string, channelName: string) => Promise<Uint8Array>;
export declare function getAudioContext(): AudioContext;
export declare function getAudioEncoderConfiguration(profile: AudioEncoderConfigurationPreset | AudioEncoderConfiguration): AudioEncoderConfiguration;
export declare function getBitrateConstrainRange(width: number, height: number, frameRate: number, bitrateMin?: number, bitrateMax?: number): DoubleRange;
/**
* get bitrate, use STANDARD_BITRATE to make sure the response
* @param width
* @param height
* @param fps
*/
export declare function getBitrateFromResAndFps(width: number, height: number, fps: number): Required<DoubleRange>;
export declare function getCompatibility(): AgoraRTCCompatibility;
export declare function getConstraintsFromCameraConfig(config: CameraVideoTrackInitConfig): MediaTrackConstraints;
export declare function getConstraintsFromMicrophoneConfig(config: MicrophoneAudioTrackInitConfig): MediaTrackConstraints;
export declare function getConstraintsFromScreenConfig(config: ScreenVideoTrackInitConfig): ScreenConstraintsWithElectron;
export declare function getElectronScreenSources(type?: ScreenSourceType): Promise<ElectronDesktopCapturerSource[]>;
export declare function getElectronScreenStream(sourceId: string, config: ScreenConstraintsWithElectron, captureAudio?: boolean): Promise<MediaStream>;
export declare function getElectronScreenStreamByUserSelect(config: ScreenConstraintsWithElectron, captureAudio?: boolean): Promise<MediaStream>;
export declare function getLocalStream(config: GetUserMediaConfig, id: string): Promise<MediaStream>;
/**
*
* track.getMediaStreamTrackSettings >
* 帧率: track.getMediaStreamTrackSettings >
* >
*
*
*/
export declare const getOriginSenderConfig: (track: LocalVideoTrack) => {
frameRate: number;
bitrateMax: number;
bitrateMin: number;
scaleResolutionDownBy: number;
scale: number;
};
export declare function getScalabilityConfiguration(profile: SVCConfiguration | SVCConfigurationPreset): SVCConfiguration;
export declare function getScreenEncoderConfiguration(profile: VideoEncoderConfiguration | ScreenEncoderConfigurationPreset | string): VideoEncoderConfiguration;
export declare function getSilenceAudioTrack(): MediaStreamTrack;
/**
* PCM WebAudioPCM
* WebAudio PCM padding zero value Buffer
*
* WebAudio padding
*/
export declare function getSilenceSamplesDuration(buffer: AudioBuffer): number;
/**
* 1s时
*/
export declare function getStaticTrackStream(track: MediaStreamTrack, interval?: number): MediaStreamTrack | void;
export declare interface GetUserMediaConfig {
video?: MediaTrackConstraints;
audio?: MediaTrackConstraints;
screen?: ScreenConstraintsWithElectron;
videoSource?: MediaStreamTrack;
audioSource?: MediaStreamTrack;
/** 是否采集屏幕共享的音频 */
screenAudio?: boolean;
}
export declare function getVideoEncoderConfiguration(profile: VideoEncoderConfigurationPreset | VideoEncoderConfiguration | undefined): VideoEncoderConfiguration;
/**
* DomException AgoraRTCError
*/
export declare function handleGetUserMediaError(errorName: string, message?: string): AgoraRTCError;
export declare let HAS_GUM_AUDIO: boolean;
export declare let HAS_GUM_VIDEO: boolean;
export declare function hasAudioContext(): boolean;
/**
* [LocalAudioTrack]{@link ILocalAudioTrack}, //
*
* [AgoraRTC.createBufferSourceAudioTrack]{@link IAgoraRTC.createBufferSourceAudioTrack}
*/
/** @en
* Inherited from [LocalAudioTrack]{@link ILocalAudioTrack}, `BufferSourceAudioTrack` is an interface for the audio from a local audio file and adds several functions for controlling the processing of the audio buffer, such as starting processing, stopping processing, and seeking a specified time location.
*
* You can create an audio track from an audio file by calling [AgoraRTC.createBufferSourceAudioTrack]{@link IAgoraRTC.createBufferSourceAudioTrack}.
*/
export declare interface IBufferSourceAudioTrack extends ILocalAudioTrack {
/**
* [source]{@link BufferSourceAudioTrackInitConfig.source}
*/
/** @en
* The [source]{@link BufferSourceAudioTrackInitConfig.source} specified when creating an audio track.
*/
source: string | File | AudioBuffer | null;
/**
* //
*/
/** @en
* The current state of audio processing, such as start, pause, or stop.
*/
currentState: AudioSourceState;
/**
*
*/
/** @en
* The total duration of the audio (seconds).
*/
duration: number;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* [50,400]
* - `50`: 0.5
* - `100`:
* - `400`: 4
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* The playback speed of the current audio file. Valid range is [50, 400], where:
* - `50`: Half the original speed.
* - `100`: (Default) The original speed.
* - `400`: Four times the original speed.
*/
playbackSpeed: number;
/**
* @param event
* @param listener [source-state-change]{@link event_source_state_change}
*/
/** @en
* @param event The event name.
* @param listener See [source-state-change]{@link event_source_state_change}.
*/
on(event: "source-state-change", listener: typeof event_source_state_change): void;
/**
*
* @param event
* @param listener
*/
/** @en
* When the specified event happens, the SDK triggers the callback that you pass.
*
* @param event The event name.
* @param listener The callback function.
*/
on(event: string, listener: Function): void;
/**
*
*
* @returns
*/
/** @en
* Gets the progress (seconds) of the audio buffer processing.
*
* @returns The progress (seconds) of the audio buffer processing.
*/
getCurrentTime(): number;
/**
*
*
* > SDK
* > [[play]]
*
* @param options / [[AudioSourceOptions]]
*/
/** @en
* Starts processing the audio buffer.
*
* > Starting processing the audio buffer means that the processing unit in the SDK has received the audio data. If the audio track has been published, the remote user can hear the audio.
* > Whether the local user can hear the audio depends on whether the SDK calls the [[play]] method and sends the audio data to the sound card.
*
* @param options Options for processing the audio buffer. See [[AudioSourceOptions]].
*/
startProcessAudioBuffer(options?: AudioSourceOptions): void;
/**
*
*/
/** @en
* Pauses processing the audio buffer.
*/
pauseProcessAudioBuffer(): void;
/**
*
*
* @param time
*/
/** @en
* Jumps to a specified time point.
*
* @param time The specified time point (seconds).
*/
seekAudioBuffer(time: number): void;
/**
*
*/
/** @en
* Resumes processing the audio buffer.
*/
resumeProcessAudioBuffer(): void;
/**
*
*/
/** @en
* Stops processing the audio buffer.
*/
stopProcessAudioBuffer(): void;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
*
*
*
*
* @param speed [50,400]
* - `50`: 0.5
* - `100`:
* - `400`: 4
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* Sets the playback speed for the current audio file.
*
* You can call this method before or after joining a channel.
*
* @param speed The playback speed. Valid range is [50, 400], where:
* - `50`: Half the original speed.
* - `100`: (Default) The original speed.
* - `400`: Four times the original speed.
*/
setAudioBufferPlaybackSpeed(speed: number): void;
}
/**
* [LocalVideoTrack]{@link ILocalVideoTrack}/
*
* [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}
*/
/** @en
*
* Inherited from [LocalVideoTrack]{@link ILocalVideoTrack}, `CameraVideoTrack` is an interface for the video captured by a local camera and adds functions such as switching devices and adjusting video encoder configurations.
*
* You can create a local camera video track by calling [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}.
*/
export declare interface ICameraVideoTrack extends ILocalVideoTrack {
/**
*
*
* >
*
* @param deviceId ID
* - [AgoraRTC.getCameras]{@link IAgoraRTC.getCameras} `deviceId`
* - 4.19.0 `facingMode` `deviceId` `deviceId` [AgoraRTC.getCameras]{@link IAgoraRTC.getCameras} `facingMode`
* - `"environment"`: 使
* - `"user"`: 使
*/
/** @en
* Sets the device for capturing video.
*
* > You can call this method either before or after publishing the video track.
*
* @param deviceId Device ID, which can be passed in using the following ways:
* - Pass a string: Pass the `deviceId` obtained using [AgoraRTC.getCameras]{@link IAgoraRTC.getCameras}.
* - Pass an object: Starting from version 4.19.0, you can pass an object
* containing `facingMode` or `deviceId`, but only one of these properties
* can be specified. `deviceId` can be obtained through [AgoraRTC.getCameras]{@link IAgoraRTC.getCameras}, and `facingMode` supports the following values:
* - `"environment"`: Use the rear camera.
* - `"user"`: Use the front camera.
*/
setDevice(deviceId: string | RequiredOnlyOneOf<{
facingMode: VideoFacingModeEnum;
deviceId: string;
}>): Promise<void>;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.0.0*
*
* /
*
*
*
* > - [LocalTrack.on("track-ended")]{@link event_track_ended}
* > - [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} [user-published]{@link IAgoraRTCClient.event_user_published}
* > - `setEnabled` `setMuted`
*
* @param enabled :
* - `true`: .
* - `false`: .
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.0.0*
*
* Enables/Disables the track.
*
* After a track is disabled, the SDK stops playing and publishing the track.
*
* > - Disabling a track does not trigger the [LocalTrack.on("track-ended")]{@link event_track_ended} event.
* > - If a track is published, disabling this track triggers the [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} event on the remote client, and re-enabling this track triggers the [user-published]{@link IAgoraRTCClient.event_user_published} event.
* > - Do not call `setEnabled` and `setMuted` together.
*
* @param enabled Whether to enable the track:
* - `true`: Enable the track.
* - `false`: Disable the track.
*/
setEnabled(enabled: boolean): Promise<void>;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.19.0*
*
*
*
* 线
*
* @param config [[VideoEncoderConfiguration]] SDK [[VideoEncoderConfigurationPreset]]
* @param cloneTrack `true`
* @returns
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.19.0*
*
* Clones the current video track to create a new video track.
*
* In scenarios such as video conferencing and online education, you can use this method to display the same video stream with two sets of display parameters, including resolution, frame rate, and bitrate. For example, you can have one display set to high-definition and the other to low-definition.
*
* @param config The encoding configuration for the new video track. You can pass in the SDK's built-in encoding configuration through [[VideoEncoderConfiguration]], or customize the video encoding configuration by passing in a [[VideoEncoderConfigurationPreset]].
* @param cloneTrack Whether to clone the current track. Default is `true`.
* @returns The newly generated video track.
*/
clone(config?: VideoEncoderConfiguration | VideoEncoderConfigurationPreset, cloneTrack?: boolean): ICameraVideoTrack;
}
/** @ignore */
/** @en
* @ignore
*/
export declare interface IDataChannel extends EventEmitter {
readonly id: number;
readonly maxRetransmits: number | null;
readonly ordered: boolean;
readonly readyState: RTCDataChannelState;
readonly metadata: string;
getChannelId(): number;
getConfig(): IDataChannelConfig;
}
/** @ignore */
/** @en
* @ignore
*/
export declare interface IDataChannelConfig {
id: number;
/**
* RTCDataChannel (true), (false). true.
*/
ordered: boolean;
/**
* 0~512
*/
metadata: string;
}
declare interface IDataStream {
serialize: (payload: ArrayBuffer) => ArrayBuffer;
deserialize: (packet: ArrayBuffer) => ArrayBuffer;
}
/**
*
*
* [AgoraRTC.createCustomAudioTrack]{@link IAgoraRTC.createCustomAudioTrack}
*
* - [MicrophoneAudioTrack]{@link IMicrophoneAudioTrack}
* - [BufferSourceAudioTrack]{@link IBufferSourceAudioTrack}
*/
/** @en
* `LocalAudioTrack` is the basic interface for local audio tracks, providing main methods of local audio tracks.
*
* You can create a local audio track by calling [AgoraRTC.createCustomAudioTrack]{@link IAgoraRTC.createCustomAudioTrack}.
*
* The following interfaces are inherited from `LocalAudioTrack`:
* - [MicrophoneAudioTrack]{@link IMicrophoneAudioTrack}, the interface for the audio sampled by a local microphone, which adds several microphone-related functions.
* - [BufferSourceAudioTrack]{@link IBufferSourceAudioTrack}, the interface for the audio from a local audio file, which adds several audio-file-related functions.
*/
export declare interface ILocalAudioTrack extends ILocalTrack {
/**
*
* @param volume [0, 1000], 0 100 100 使 WebAudio
*/
/** @en
* Sets the volume of a local audio track.
*
* @param volume The volume. The value ranges from 0 (mute) to 1000 (maximum). A value of 100 is the original volume The volume change may not be obvious to human ear. If local track has been published, setting volume will affect the volume heard by remote users.
*/
setVolume(volume: number): void;
/**
*
*
* @returns [0, 1]1 0.6
*/
/** @en
* Gets the audio level of a local audio track.
*
* @returns The audio level. The value range is [0,1]. 1 is the highest audio level.
* Usually a user with audio level above 0.6 is a speaking user.
*
*/
getVolumeLevel(): number;
/**
* PCM
*
* SDK [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer) 的形式通过回调返回。
*
* > `frameSize` `frameSize`
*
* ```js
* track.setAudioFrameCallback((buffer) => {
* for (let channel = 0; channel < buffer.numberOfChannels; channel += 1) {
* // Float32Array with PCM data
* const currentChannelData = buffer.getChannelData(channel);
* console.log("PCM data in channel", channel, currentChannelData);
* }
* }, 2048);
*
* // ....
* // Stop getting the raw audio data
* track.setAudioFrameCallback(null);
* ```
*
* @param audioFrameCallback [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer) 的回调函数。设为 `null` 后SDK 就会停止获取音频原始数据。
* @param frameSize `AudioBuffer` 256, 512, 1024, 2048, 4096, 8192, 16384 4096
*/
/** @en
* Sets the callback for getting raw audio data in PCM format.
*
* After you successfully set the callback, the SDK constantly returns the audio frames of a local audio track in this callback by using [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer).
*
* > You can set the `frameSize` parameter to determine the frame size in each callback, which affects the interval between the callbacks. The larger the frame size, the longer the interval between them.
*
* ```js
* track.setAudioFrameCallback((buffer) => {
* for (let channel = 0; channel < buffer.numberOfChannels; channel += 1) {
* // Float32Array with PCM data
* const currentChannelData = buffer.getChannelData(channel);
* console.log("PCM data in channel", channel, currentChannelData);
* }
* }, 2048);
*
* // ....
* // Stop getting the raw audio data
* track.setAudioFrameCallback(null);
* ```
*
* @param audioFrameCallback The callback function for receiving the [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer) object. If you set `audioBufferCallback` as `null`, the SDK stops getting raw audio data.
* @param frameSize The number of samples of each audio channel that an `AudioBuffer` object contains. You can set `frameSize` as 256, 512, 1024, 2048, 4096, 8192, or 16384. The default value is 4096.
*/
setAudioFrameCallback(audioFrameCallback: null | ((buffer: AudioBuffer) => void), frameSize?: number): void;
/**
*
*
* > SDK DOM DOM
*/
/** @en
* Plays a local audio track.
*
* > When playing a audio track, you do not need to pass any DOM element.
*/
play(): void;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.1.0*
*
* >
* > - v4.7.0 [IRemoteAudioTrack.setPlaybackDevice]{@link IRemoteAudioTrack.setPlaybackDevice}
*
*
*
* @param deviceId ID [[getPlaybackDevices]]
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.1.0*
*
* > Note:
* > - As of v4.7.0, this method no longer takes effect. Use [IRemoteAudioTrack.setPlaybackDevice]{@link IRemoteAudioTrack.setPlaybackDevice} instead.
*
* Sets the playback device (speaker) for the remote audio stream.
*
* @param deviceId The device ID, which can be retrieved by calling [[getPlaybackDevices]].
*/
setPlaybackDevice(deviceId: string): Promise<void>;
/**
*
*
* @deprecated
* v4.1.0 使 [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats} [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats}
*/
/** @en
* Gets the statistics of a local audio track.
*
* @deprecated from v4.1.0. Use [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats} and [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats} instead.
*/
getStats(): LocalAudioTrackStats;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.10.0*
*
* `Processor`
* @param processor `Processor` `Processor`
*
* @returns `Processor`
*/
/** @en
* Inserts a `Processor` to the local audio track.
*
* @param processor The `Processor` instance. Each extension has a corresponding type of `Processor`.
*
* @returns The `Processor` instance.
*/
pipe(processor: IAudioProcessor): IAudioProcessor;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.10.0*
*
* `Processor`
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.10.0*
*
* Removes the `Processor` inserted to the local audio track.
*/
unpipe(): void;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.10.0*
*
*
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.10.0*
*
* The destination of the current processing pipeline on the local audio track.
*/
processorDestination: IAudioProcessor;
}
/** @ignore */
/** @en
* @ignore
*/
export declare interface ILocalDataChannel extends IDataChannel {
send(data: ArrayBuffer): void;
/** @internal */
on(event: "open", listener: typeof event_user_datachannel_open): void;
/** @internal */
on(event: "close", listener: typeof event_user_datachannel_close): void;
/** @internal */
on(event: "error", listener: typeof event_user_datachannel_error): void;
}
/**
* `LocalTrack` [LocalAudioTrack]{@link ILocalAudioTrack} [LocalVideoTrack]{@link ILocalVideoTrack}
*
*/
/** @en
* `LocalTrack` is the basic interface for local tracks, providing public methods for [LocalAudioTrack]{@link ILocalAudioTrack} and [LocalVideoTrack]{@link ILocalVideoTrack}.
*/
export declare interface ILocalTrack extends ITrack {
/**
* @param event
* @param listener [track-updated]{@link event_track_updated}
*/
/** @en
* @param event The event name.
* @param listener See [track-updated]{@link event_track_updated}.
*/
on(event: "track-updated", listener: typeof event_track_updated): void;
/**
* @param event
* @param listener [track-ended]{@link event_track_ended}
*/
/** @en
* @param event The event name.
* @param listener See [track-ended]{@link event_track_ended}.
*/
on(event: "track-ended", listener: typeof event_track_ended): void;
/**
*
* @param event
* @param listener [ILocalTrack.transceiver-updated]{@link event_transceiver_updated}
*/
/** @en
* Adds an event listener.
* @param event The event name.
* @param listener See [ILocalTrack.transceiver-updated]{@link event_transceiver_updated}.
*/
on(event: "transceiver-updated", listener: typeof event_transceiver_updated): void;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.0.0*
*
* /
*
*
*
* > - [LocalTrack.on("track-ended")]{@link event_track_ended}
* > - [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} [user-published]{@link IAgoraRTCClient.event_user_published}
* > - `setEnabled` `setMuted`
*
* @param enabled :
* - `true`:
* - `false`:
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.0.0*
*
* Enables/Disables the track.
*
* After a track is disabled, the SDK stops playing and publishing the track.
*
* > - Disabling a track does not trigger the [LocalTrack.on("track-ended")]{@link event_track_ended} event.
* > - If a track is published, disabling this track triggers the [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} event on the remote client, and re-enabling this track triggers the [user-published]{@link IAgoraRTCClient.event_user_published} event.
* > - Do not call `setEnabled` and `setMuted` together.
*
* @param enabled Whether to enable the track:
* - `true`: Enable the track.
* - `false`: Disable the track.
*/
setEnabled(enabled: boolean): Promise<void>;
/**
* @deprecated
* v4.1.0 使 [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats} [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats}
*
*
*
* > iOS `encodeDelay`
*/
/** @en
* @deprecated from v4.1.0. Use [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats} and [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats} instead.
*
* Gets the statistics of a local track.
*
* > Note: When getting the statistics of a local video track, you cannot get the `encodeDelay` property on iOS.
*/
getStats(): LocalVideoTrackStats | LocalAudioTrackStats;
/**
*
*
* @return
* - `createMicrophoneAudioTrack` `createCameraVideoTrack` [MediaDeviceInfo.label](https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo/label) 字段。
* - `createScreenVideoTrack` `sourceId`
* - `createCustomAudioTrack` `createCustomVideoTrack` [MediaStreamTrack.label](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/label) 字段。
*/
/** @en
* Gets the label of a local track.
*
* @return The label that the SDK returns may include:
* - The [MediaDeviceInfo.label](https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo/label) property, if the track is created by calling `createMicrophoneAudioTrack` or `createCameraVideoTrack`.
* - The `sourceId` property, if the track is created by calling `createScreenVideoTrack`.
* - The [MediaStreamTrack.label](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/label) property, if the track is created by calling `createCustomAudioTrack` or `createCustomVideoTrack`.
*/
getTrackLabel(): string;
/**
*
*
* @自从
* <br>&emsp;&emsp;&emsp;*4.6.0*
*
* `setMuted(true)` [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} `setMuted(false)` [user-published]{@link IAgoraRTCClient.event_user_published}
*
* > - {@link setEnabled} [setEnabled setMuted ](http://doc.shengwang.cn/faq/integration-issues/diff-setenabled-setmuted)。
* > - `setEnabled` `setMuted`
*
* @param muted :
* - `true`:
* - `false`:
*/
/** @en
* Sends or stops sending the media data of the track.
*
* @since
* <br>&emsp;&emsp;&emsp;*4.6.0*
*
* If the track is published, a successful call of `setMuted(true)` triggers the [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} event on the remote client, and a successful call of `setMuted(false)` triggers the [user-published]{@link IAgoraRTCClient.event_user_published} event.
*
* > - Calling `setMuted(true)` does not stop capturing audio or video and takes shorter time to take effect than [[setEnabled]]. For details, see [What are the differences between setEnabled and setMuted?](https://docs.agora.io/en/interactive-live-streaming/develop/product-workflow?platform=web#setenabled-and-setmuted).
* > - Do not call `setEnabled` and `setMuted` together.
*
* @param muted Whether to stop sending the media data of the track:
* - `true`: Stop sending the media data of the track.
* - `false`: Resume sending the media data of the track.
*/
setMuted(muted: boolean): Promise<void>;
/**
*
*
* 使使
*/
/** @en
* Closes a local track and releases the audio and video resources that it occupies.
*
* Once you close a local track, you can no longer reuse it.
*/
close(): void;
/**
*
*/
muted: boolean;
/**
*
*/
enabled: boolean;
}
/**
* `LocalVideoTrack`
*
* [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack} [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack} `LocalVideoTrack`
*
* [CameraVideoTrack]{@link ICameraVideoTrack}
*/
/** @en
* `LocalVideoTrack` is the basic interface for local video tracks, providing the main methods for local video tracks.
*
* You can get create a local video track by calling [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack} or [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack} method.
*
* Inherited from `LocalVideoTrack`, [CameraVideoTrack]{@link ICameraVideoTrack} is an interface for the video captured by a local camera and adds several camera-related functions.
*/
export declare interface ILocalVideoTrack extends ILocalTrack {
/**
* @param event
* @param listener [track-updated]{@link event_track_updated}
*/
/** @en
* @param event The event name.
* @param listener See [track-updated]{@link event_track_updated}.
*/
on(event: "track-updated", listener: typeof event_track_updated): void;
/** @internal */
on(event: "beauty-effect-overload", listener: typeof event_beauty_effect_overload): void;
/**
* @param event
* @param listener [track-ended]{@link event_track_ended}
*/
/** @en
* @param event The event name.
* @param listener See [track-ended]{@link event_track_ended}.
*/
on(event: "track-ended", listener: typeof event_track_ended): void;
/**
* @param event
* @param listener [video-element-visible-status]{@link event_video_element_visible_status}
*/
/** @en
* @param event The event name.
* @param listener See [video-element-visible-status]{@link event_video_element_visible_status}.
*/
on(event: "video-element-visible-status", listener: typeof event_video_element_visible_status): void;
/**
*
* @param event
* @param listener [ILocalTrack.transceiver-updated]{@link event_transceiver_updated}
*/
/** @en
* Adds an event listener.
* @param event The event name.
* @param listener See [ILocalTrack.transceiver-updated]{@link event_transceiver_updated}.
*/
on(event: "transceiver-updated", listener: typeof event_transceiver_updated): void;
/**
*
*
* @param element DOM SDK `<video>` 2
* - `string`: DOM ID
* - `HTMLElement`: DOM
* @param config / [[VideoPlayerConfig]]
*/
/** @en
* Plays a remote video track on the web page.
*
* @param element Specifies a DOM element. The SDK will create a `<video>` element under the specified DOM element to play the video track. You can specify a DOM element in either of the following ways:
* - `string`: Specify the ID of the DOM element.
* - `HTMLElement`: Pass a DOM object.
* @param config Sets the playback configurations, such as display mode and mirror mode. See [[VideoPlayerConfig]]. By default, the SDK enables mirror mode for a local video track.
*/
play(element: string | HTMLElement, config?: VideoPlayerConfig): void;
/**
*
*
* @deprecated
* v4.1.0 使 [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats} [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats}
*
*/
/** @en
* Gets the statistics of a local video track.
*
* @deprecated from v4.1.0. Use [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats} and [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats} instead.
*/
getStats(): LocalVideoTrackStats;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.19.0*
*
*
*
* 线
*
* @param config [[VideoEncoderConfiguration]] SDK [[VideoEncoderConfigurationPreset]]
* @param cloneTrack `true`
* @returns
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.19.0*
*
* Clones the current video track to create a new video track.
*
* In scenarios such as video conferencing and online education, you can use this method to display the same video stream with two sets of display parameters, including resolution, frame rate, and bitrate. For example, you can have one display set to high-definition and the other to low-definition.
*
* @param config The encoding configuration for the new video track. You can pass in the SDK's built-in encoding configuration through [[VideoEncoderConfiguration]], or customize the video encoding configuration by passing in a [[VideoEncoderConfigurationPreset]].
* @param cloneTrack Whether to clone the current track. Default is `true`.
* @returns The newly generated video track.
*/
clone(config?: VideoEncoderConfiguration | VideoEncoderConfigurationPreset, cloneTrack?: boolean): ILocalVideoTrack;
/**
* @ignore
*
*
*
* >
* > - SDK [使](https://docs.agora.io/cn/Video/beauty_effect_web_ng?platform=Web)。
* > -
* > - Safari 12
* > - Chrome 65
* > - Firefox 70.0.1
* > -
* > -
*
* @param enabled
* - `true`:
* - `false`:
* @param options [[BeautyEffectOptions]]
*/
/** @en
* @ignore
*
* Enables/Disables image enhancement and sets the options.
*
* > Notes:
* > - Agora is planning to sunset this built-in image enhancement feature.
* You can use the upgraded Image Enhancement Extension instead. For details, see
* [Use the Image Enhancement Extension](https://docs.agora.io/en/Video/beauty_effect_web_ng?platform=Web).
* > - This method supports the following browsers:
* > - Safari 12 or later.
* > - Chrome 65 or later.
* > - Firefox 70.0.1 or later.
* > - This function is not supported on mobile devices.
* > - If you enable dual-stream mode, the image enhancement options only apply to the high-quality video stream.
*
* @param enabled Whether to enable image enhancement:
* - `true`: Enable image enhancement.
* - `false`: Disable image enhancement.
* @param options Sets image enhancement options. See [[BeautyEffectOptions]].
*/
setBeautyEffect(enabled: boolean, options?: BeautyEffectOptions): Promise<void>;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.1.0*
*
*
*
* > [[play]]
*
* @returns RGBA `ImageData` [ImageData](https://developer.mozilla.org/zh-CN/docs/Web/API/ImageData)。
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.1.0*
*
* Gets the data of the video frame being rendered.
*
* > You should call this method after calling [[play]]. Otherwise, the method call returns null.
*
* @returns An `ImageData` object that stores RGBA data. `ImageData` is a web API supported by the browser. For details, see [ImageData](https://developer.mozilla.org/en-US/docs/Web/API/ImageData).
*/
getCurrentFrameData(): ImageData;
/**
* @ignore
*/
/** @en
*
* @ignore
*/
getCurrentFrameImage(imageType: string, quality: number): Promise<ImageTypedData>;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.2.0*
*
*
*
* 稿 `"detail"` `"motion"`
*
* > Chrome
*
* @param mode
* - `"balanced"`: 使
* - SDK
* - SDK
* - `"detail"`:
* - SDK 使
* - SDK
* - `"motion"`: 4.21.0 SDK
* - SDK
* - SDK
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.2.0*
*
* Sets the video transmission optimization mode.
*
* You can call this method during a video call, a live streaming or screen sharing to dynamically change the optimization mode. For example, during the screen sharing, before you change the shared content from text to video, you can change the optimization mode from `"detail"` to `"motion"` to ensure smoothness in poor network conditions.
*
* > Note: This method supports Chrome only.
*
* @param mode The video transmission optimization mode:
* - `"balanced"`: Uses the default optimization mode.
* - For a screen-sharing video track, the default transmission optimization strategy is to prioritizes clarity.
* - For the other types of video tracks, the SDK may reduce the frame rate or the sending resolution in poor network conditions.
* - `"detail"`: Prioritizes video quality.
* - The SDK ensures high-quality images by automatically calculating a minimum bitrate based on the capturing resolution and frame rate. No matter how poor the network condition is, the sending bitrate will never be lower than the minimum value.
* - In most cases, the SDK does not reduce the sending resolution, but may reduce the frame rate.
* - `"motion"`: Since 4.21.0, the SDK prioritizes video smoothness.
* - In poor network conditions, the SDK reduces the sending bitrate to minimize video freezes.
* - In most cases, the SDK does not reduce the frame rate, but may reduce the sending resolution.
*/
setOptimizationMode(mode: "balanced" | "motion" | "detail"): Promise<void>;
/**
* * @自从
* <br>&emsp;&emsp;&emsp;*4.8.0*
*
* HTML `<video>`
*
* `localVideoTrack.play` SDK [HTML `<video>`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video) 标签用于播放视频轨道。当 `localVideoTrack.isPlaying` 为 `true` 却看不到图像时,你可调用该方法进行问题排查。
*
* @returns [[CheckVideoVisibleResult]] `undefined`
* - `localVideoTrack.isPlaying` `false`
* - `<video>`
* - `<video>` `play`
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.8.0*
*
* Gets the visibility of the `<video>` HTML tag.
*
* After you call `localVideoTrack.play`, the SDK creates an [`<video>`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video) tag for playing video tracks. When `localVideoTrack.isPlaying` is `true` but you cannot see any video, call this method to check whether the `<video>` tag is visible or not and learn the reason when the `<video>` tag is invisible.
*
* @returns The [[CheckVideoVisibleResult]] object. If this method returns `undefined`, it may be due to the following reasons:
* - `localVideoTrack.isPlaying` is `false`.
* - The `<video>` tag does not exist.
* - The `<video>` tag is not created by calling the `play` method.
*/
getVideoElementVisibleStatus(): CheckVideoVisibleResult | undefined;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.10.0*
*
* `Processor`
* @param processor `Processor` `Processor`
*
* @returns `Processor`
*/
/** @en
* Inserts a `Processor` to the local video track.
*
* @param processor The `Processor` instance. Each extension has a corresponding type of `Processor`.
*
* @returns The `Processor` instance.
*/
pipe(processor: IBaseProcessor): IBaseProcessor;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.10.0*
*
* `Processor`
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.10.0*
*
* Removes the `Processor` inserted to the local video track.
*/
unpipe(): void;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.10.0*
*
*
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.10.0*
*
* The destination of the current processing pipeline on the local video track.
*/
processorDestination: IBaseProcessor;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.17.0*
*
*
*
*
* -
* -
*
* {@link ILocalVideoTrack.getMediaStreamTrack} `mediaStream.getVideoTracks`
*
* >
* > - Chrome 65+Safari Firefox
* > -
* > -
* > - `ScreenVideoTrack` `CameraVideoTrack` `CameraVideoTrack` {@link VideoPlayerConfig.mirror}
* > - `encoderConfig`
* > -
* > - 使 {@link ICameraVideoTrack.setDevice}
*
* ****
* ```javascript
* // 原有的本地视频轨道
* const localVideoTrack = await AgoraRTC.createCameraVideoTrack();
* // 获取新的视频轨道(方式一)
* const newTrack = localVideoTrack.getMediaStreamTrack();
* // 获取新的视频轨道(方式二)
* const newTrack = await navigator.mediaDevices.getUserMedia({audio: true, video: true}).then(mediaStream => mediaStream.getVideoTracks()[0]);
* // 替换本地视频轨道,并且停止原有的视频轨道
* await localVideoTrack.replaceTrack(newTrack, true);
* ```
*
* @param track [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) 对象。
* @param stopOldTrack
* -
* -
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.17.0*
*
* Replaces the local video track.
*
* You can call this method before or after publishing the local video stream:
* - If you call this method before publishing, the new video track is played locally.
* - If you call this method after publishing, the new video track is received by the remote user.
*
* The new video track can be retrieved by the {@link ILocalVideoTrack.getMediaStreamTrack} or `mediaStream.getVideoTracks` method. You can choose to either stop or retain the replaced track.
*
* > Notes:
* > - This method supports Chrome 65+, Safari, and the latest Firefox.
* > - This method might not take effect on some mobile devices.
* > - Agora recommends switching between video tracks that are of the same type and have the same encoder configurations for the following reasons:
* > - If the video track types are different, such as replacing a `CameraVideoTrack` object with a `ScreenVideoTrack` object, the video is flipped horizontally due to the mirror effect enabled by default on `CameraVideoTrack` (see {@link VideoPlayerConfig.mirror} for details).
* > - If the encoder configurations (`encoderConfig`) are different, the actual sending resolution or frame rate might be different from what you set.
* > - The subscriber will not be notified if the track gets replaced.
* > - To switch the media input devices, Agora recommends using {@link ICameraVideoTrack.setDevice}.
*
* **Example**
* ```javascript
* // Current video track
* const localVideoTrack = await AgoraRTC.createCameraVideoTrack();
* // Gets the new video track (option one)
* const newTrack = localVideoTrack.getMediaStreamTrack();
* // Gets the new video track (option two)
* const newTrack = await navigator.mediaDevices.getUserMedia({audio: true, video: true}).then(mediaStream => mediaStream.getVideoTracks()[0]);
* // Replaces and stops the current video track
* await localVideoTrack.replaceTrack(newTrack, true);
* ```
* @param track The new video track, which is a [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
* @param stopOldTrack Whether to stop the old video track:
* - true: Stops the old video track.
* - false: Retains the old video track.
*/
replaceTrack(track: MediaStreamTrack, stopOldTrack: boolean): Promise<void>;
/**
*
*
* @param config SDK [[VideoEncoderConfigurationPreset]] [[VideoEncoderConfiguration]]
*/
/** @en
* Sets the video encoder configurations, such as resolution, frame rate, and bitrate.
*
* @param config The video encoder configurations. You can pass either [[VideoEncoderConfigurationPreset]] or a customized [[VideoEncoderConfiguration]] object.
*/
setEncoderConfiguration(config: VideoEncoderConfiguration | VideoEncoderConfigurationPreset): Promise<void>;
/**
* H.264 SEI
*
* @param sei SEI
*/
/** @en
* Add the SEI data to the H.264 video stream.
*
* @param config SEI data.
*/
sendSeiData(sei: Uint8Array): void;
}
export declare interface ImageTypedData {
buffer: Uint8Array;
width: number;
height: number;
}
/**
* [LocalAudioTrack]{@link ILocalAudioTrack}
*
* [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack}
*/
/** @en
* Inherited from [LocalAudioTrack]{@link ILocalAudioTrack}, `MicrophoneAudioTrack` is an interface for the audio sampled by a local microphone and adds several functions such as switching devices.
*
* You can create a local microphone audio track by calling [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack}.
*/
export declare interface IMicrophoneAudioTrack extends ILocalAudioTrack {
/**
*
*
* >
*
* @param deviceId Id [AgoraRTC.getMicrophones]{@link IAgoraRTC.getMicrophones}
*/
/** @en
* Sets the device for sampling audio.
*
* > You can call the method either before or after publishing an audio track.
*
* @param deviceId The ID of the specified device. You can get the `deviceId` by calling [AgoraRTC.getMicrophones]{@link IAgoraRTC.getMicrophones}.
*/
setDevice(deviceId: string): Promise<void>;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.0.0*
*
* /
*
*
*
* > - [LocalTrack.on("track-ended")]{@link event_track_ended}
* > - [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} [user-published]{@link IAgoraRTCClient.event_user_published}
* > - `setEnabled` `setMuted`
*
* @param enabled :
* - `true`: .
* - `false`: .
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.0.0*
*
* Enables/Disables the track.
*
* After a track is disabled, the SDK stops playing and publishing the track.
*
* > - Disabling a track does not trigger the [LocalTrack.on("track-ended")]{@link event_track_ended} event.
* > - If a track is published, disabling this track triggers the [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} event on the remote client, and re-enabling this track triggers the [user-published]{@link IAgoraRTCClient.event_user_published} event.
* > - Do not call `setEnabled` and `setMuted` together.
*
* @param enabled Whether to enable the track:
* - `true`: Enable the track.
* - `false`: Disable the track.
*/
setEnabled(enabled: boolean): Promise<void>;
}
export declare function interceptLocalAudioFrame(sender: RTCRtpSender): Promise<void>;
export declare function interceptLocalVideoFrame(sender: RTCRtpSender, localVideoTrack: LocalVideoTrack): Promise<void>;
export declare function interceptRemoteAudioFrame(receiver: RTCRtpReceiver): Promise<void>;
export declare function interceptRemoteVideoFrame(receiver: RTCRtpReceiver, options?: InterceptVideoFrameOptions): Promise<void>;
declare interface InterceptVideoFrameOptions {
onSei?: (sei: Uint8Array) => void;
}
/**
* [subscribe]{@link IAgoraRTCClient.subscribe} [AgoraRTCRemoteUser.audioTrack]{@link IAgoraRTCRemoteUser.audioTrack}
*/
/** @en
* `RemoteAudioTrack` is the basic interface for the remote audio track.
*
* You can get create a remote audio track by the [AgoraRTCRemoteUser.audioTrack]{@link IAgoraRTCRemoteUser.audioTrack} object after calling [subscribe]{@link IAgoraRTCClient.subscribe}.
*/
export declare interface IRemoteAudioTrack extends IRemoteTrack {
/**
*
*
* **使** 使 [AgoraRTCClient.getRemoteAudioStats]{@link IAgoraRTCClient.getRemoteAudioStats}
*
* @return [[RemoteAudioTrackStats]]
*/
/** @en
* Gets the statistics of a remote audio track.
*
* @return An [[RemoteAudioTrackStats]] object.
*/
getStats(): RemoteAudioTrackStats;
/**
*
*
* > SDK DOM DOM
*/
/** @en
* Plays a remote audio track.
*
* > When playing the audio track, you do not need to pass any DOM element.
*/
play(): void;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.1.0*
*
*
*
* > Chrome Edge `NOT_SUPPORTED`
* @param deviceId ID [[getPlaybackDevices]]
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.1.0*
*
* Sets the audio playback device, for example, the speaker.
*
* > This method supports Chrome and Edge on desktop devices only. Other browsers throw a `NOT_SUPPORTED` error when calling this method.
* @param deviceId Device ID, which can be retrieved by calling [[getPlaybackDevices]].
*/
setPlaybackDevice(deviceId: string): Promise<void>;
/**
* PCM
*
* SDK [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer) 的形式通过回调返回。
*
* > `frameSize` `frameSize`
*
* ```js
* track.setAudioFrameCallback((buffer) => {
* for (let channel = 0; channel < buffer.numberOfChannels; channel += 1) {
* // Float32Array with PCM data
* const currentChannelData = buffer.getChannelData(channel);
* console.log("PCM data in channel", channel, currentChannelData);
* }
* }, 2048);
*
* // ....
* // Stop getting the raw audio data
* track.setAudioFrameCallback(null);
* ```
*
* @param audioFrameCallback [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer) 的回调函数。设为 `null` 后SDK 就会停止获取音频原始数据。
* @param frameSize `AudioBuffer` 256, 512, 1024, 2048, 4096, 8192, 16384 4096
*/
/** @en
* Sets the callback for getting raw audio data in PCM format.
*
* After you successfully set the callback, the SDK constantly returns the audio frames of a remote audio track in this callback by using [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer).
*
* > You can set the `frameSize` parameter to determine the frame size in each callback, which affects the interval between the callbacks. The larger the frame size, the longer the interval between them.
*
* ```js
* track.setAudioFrameCallback((buffer) => {
* for (let channel = 0; channel < buffer.numberOfChannels; channel += 1) {
* // Float32Array with PCM data
* const currentChannelData = buffer.getChannelData(channel);
* console.log("PCM data in channel", channel, currentChannelData);
* }
* }, 2048);
*
* // ....
* // Stop getting the raw audio data
* track.setAudioFrameCallback(null);
* ```
*
* @param audioFrameCallback The callback function for receiving the [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer) object. If you set `audioBufferCallback` as `null`, the SDK stops getting raw audio data.
* @param frameSize The number of samples of each audio channel that an `AudioBuffer` object contains. You can set `frameSize` as 256, 512, 1024, 2048, 4096, 8192, or 16384. The default value is 4096.
*/
setAudioFrameCallback(audioFrameCallback: null | ((buffer: AudioBuffer) => void), frameSize?: number): void;
/**
*
* @param volume [0, 100]0 100
*/
/** @en
* Sets the volume of a remote audio track.
*
* @param volume The volume. The value ranges from 0 (mute) to 100 (maximum). A value of 100 is the current volume.
*/
setVolume(volume: number): void;
/**
*
*
* @returns [0, 1]1 0.6
*/
/** @en
* Gets the audio level of a remote audio track.
*
* @returns The audio level. The value range is [0,1]. 1 is the highest audio level.
* Usually a user with audio level above 0.6 is a speaking user.
*/
getVolumeLevel(): number;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* `Processor`
*
* @param processor `Processor` `Processor`
*
* @returns `Processor`
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* Inserts a `Processor` to the remote audio track.
*
* @param processor The `Processor` instance. Each extension has a corresponding type of `Processor`.
*
* @returns The `Processor` instance.
*/
pipe(processor: IAudioProcessor): IAudioProcessor;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* `Processor`
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* Removes the `Processor` inserted to the remote audio track.
*/
unpipe(): void;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
*
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* The destination of the current processing pipeline on the remote audio track.
*/
processorDestination: IAudioProcessor;
}
/** @ignore */
/** @en
* @ignore
*/
export declare interface IRemoteDataChannel extends IDataChannel {
/** @internal */
on(event: "open", listener: typeof event_user_datachannel_open): void;
/** @internal */
on(event: "close", listener: typeof event_user_datachannel_close): void;
/** @internal */
on(event: "error", listener: typeof event_user_datachannel_error): void;
/** @internal */
on(event: "message", listener: typeof event_user_datachannel_message): void;
}
/**
* [RemoteAudioTrack]{@link IRemoteAudioTrack} [RemoteVideoTrack]{@link IRemoteVideoTrack}
*/
/** @en
* `RemoteTrack` is the basic interface for remote tracks, providing public methods for [RemoteAudioTrack]{@link IRemoteAudioTrack} and [RemoteVideoTrack]{@link IRemoteVideoTrack}.
*/
export declare interface IRemoteTrack extends ITrack {
/**
* @param event
* @param listener [track-updated]{@link event_track_updated}
*/
/** @en
* @param event The event name.
* @param listener See [track-updated]{@link event_track_updated}.
*/
on(event: "track-updated", listener: typeof event_track_updated): void;
/**
* @param event
* @param listener [first-frame-decoded]{@link event_first_frame_decoded}
*/
/** @en
* @param event The event name.
* @param listener See [first-frame-decoded]{@link event_first_frame_decoded}.
*/
on(event: "first-frame-decoded", listener: typeof event_first_frame_decoded): void;
/**
*
* @param event
* @param listener [IRemoteTrack.transceiver-updated]{@link event_transceiver_updated_2}
*/
/** @en
* Adds an event listener.
* @param event The event name.
* @param listener See [IRemoteTrack.transceiver-updated]{@link event_transceiver_updated_2}.
*/
on(event: "transceiver-updated", listener: typeof event_transceiver_updated_2): void;
/**
* ID
*
* @return `uid`
*/
/** @en
* Gets the `uid` of the remote user who publishes the remote track.
*
* @return The `uid` of the remote user.
*/
getUserId(): UID;
/**
*
*
* @deprecated
*
* v4.1.0 使 [AgoraRTCClient.getRemoteVideoStats]{@link IAgoraRTCClient.getRemoteVideoStats} [AgoraRTCClient.getRemoteAudioStats]{@link IAgoraRTCClient.getRemoteAudioStats}
* @return [[RemoteAudioTrackStats]] [[RemoteVideoTrackStats]]
*/
/** @en
* Gets the statistics of a remote track.
*
* @deprecated from v4.1.0. Use [AgoraRTCClient.getRemoteVideoStats]{@link IAgoraRTCClient.getRemoteVideoStats} and [AgoraRTCClient.getRemoteAudioStats]{@link IAgoraRTCClient.getRemoteAudioStats} instead.
* @return An [[RemoteAudioTrackStats]] or [[RemoteVideoTrackStats]] object.
*/
getStats(): RemoteAudioTrackStats | RemoteVideoTrackStats;
}
/**
*
*
* [subscribe]{@link IAgoraRTCClient.subscribe} [AgoraRTCRemoteUser.videoTrack]{@link IAgoraRTCRemoteUser.videoTrack}
*/
/** @en
* `RemoteVideoTrack` is the basic interface for the remote video track.
*
* You can get create a remote video track by the [AgoraRTCRemoteUser.videoTrack]{@link IAgoraRTCRemoteUser.videoTrack} object after calling [subscribe]{@link IAgoraRTCClient.subscribe}.
*/
export declare interface IRemoteVideoTrack extends IRemoteTrack {
/**
* @param event
* @param listener [track-updated]{@link event_track_updated}
*/
/** @en
* @param event The event name.
* @param listener See [track-updated]{@link event_track_updated}.
*/
on(event: "track-updated", listener: typeof event_track_updated): void;
/**
* @param event
* @param listener [video-state-changed]{@link event_video_state_changed}
*/
/** @en
* @param event The event name.
* @param listener See [video-state-changed]{@link event_video_state_changed}.
*/
on(event: "video-state-changed", listener: typeof event_video_state_changed): void;
/**
* @param event
* @param listener [first-frame-decoded]{@link event_first_frame_decoded}
*/
/** @en
* @param event The event name.
* @param listener See [first-frame-decoded]{@link event_first_frame_decoded}.
*/
on(event: "first-frame-decoded", listener: typeof event_first_frame_decoded): void;
/**
* @param event
* @param listener [video-element-visible-status]{@link event_video_element_visible_status_2}
*/
/** @en
* @param event The event name.
* @param listener See [video-element-visible-status]{@link event_video_element_visible_status_2}.
*/
on(event: "video-element-visible-status", listener: typeof event_video_element_visible_status_2): void;
/**
*
* @param event
* @param listener [IRemoteTrack.transceiver-updated]{@link event_transceiver_updated_2}
*/
/** @en
* Adds an event listener.
* @param event The event name.
* @param listener See [IRemoteTrack.transceiver-updated]{@link event_transceiver_updated_2}.
*/
on(event: "transceiver-updated", listener: typeof event_transceiver_updated_2): void;
/**
*
* @param event
* @param listener [ILocalTrack.sei-received]{@link event_sei_received}
*/
/** @en
* Adds an event listener.
* @param event The event name.
* @param listener See [ILocalTrack.sei-received]{@link event_sei_received}.
*/
on(event: "sei-received", listener: typeof event_sei_received): void;
/**
*
*
* **使** 使 [AgoraRTCClient.getRemoteVideoStats]{@link IAgoraRTCClient.getRemoteVideoStats}
*
* @return [[RemoteVideoTrackStats]]
*/
/** @en
* Gets the statistics of a remote video track.
*
* @return An [[RemoteVideoTrackStats]] object
*/
getStats(): RemoteVideoTrackStats;
/**
*
* @param element DOM SDK `<video>` 2
* - `string`: DOM ID
* - `HTMLElement`: DOM
* @param config / [[VideoPlayerConfig]]
*/
/** @en
* Plays a remote video track on the web page.
*
* @param element Specifies a DOM element. The SDK will create a `<video>` element under the specified DOM element to play the video track. You can specify a DOM element in either of following ways:
* - `string`: Specify the ID of the DOM element.
* - `HTMLElement`: Pass a DOM object.
* @param config Sets the playback configurations, such as display mode and mirror mode. See [[VideoPlayerConfig]]. By default, the SDK enables mirror mode for a local video track.
*/
play(element: string | HTMLElement, config?: VideoPlayerConfig): void;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.1.0*
*
*
*
* > [[play]]
*
* @returns RGBA `ImageData` [ImageData](https://developer.mozilla.org/zh-CN/docs/Web/API/ImageData)。
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.1.0*
*
* Gets the data of the video frame being rendered.
*
* > You should call this method after calling [[play]]. Otherwise, the method call returns null.
*
* @returns An `ImageData` object that stores RGBA data. `ImageData` is a web API supported by the browser. For details, see [ImageData](https://developer.mozilla.org/en-US/docs/Web/API/ImageData).
*/
getCurrentFrameData(): ImageData;
/**
* * @自从
* <br>&emsp;&emsp;&emsp;*4.8.0*
*
* HTML `<video>`
*
* `remoteVideoTrack.play` SDK [HTML `<video>`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video) 标签用于播放视频轨道。当 `remoteVideoTrack.isPlaying` 为 `true` 却看不到图像时,你可调用该方法进行问题排查。
*
* @return [[CheckVideoVisibleResult]] `undefined`
* - `remoteVideoTrack.isPlaying` `false`
* - `<video>`
* - `<video>` `play`
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.8.0*
*
* Gets the visibility of the `<video>` HTML tag.
*
* After you call `remoteVideoTrack.play`, the SDK creates an [`<video>`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video) tag for playing video tracks. When `remoteVideoTrack.isPlaying` is `true` but you cannot see any video, call this method to check whether the `<video>` tag is visible or not and learn the reason when the `<video>` tag is invisible.
*
* @returns The [[CheckVideoVisibleResult]] object. If this method returns `undefined`, it may be due to the following reasons:
* - `remoteVideoTrack.isPlaying` is `false`.
* - The `<video>` tag does not exist.
* - The `<video>` tag is not created by calling the `play` method.
*/
getVideoElementVisibleStatus(): CheckVideoVisibleResult | undefined;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* `Processor`
*
* @param processor `Processor` `Processor`
*
* @returns `Processor`
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* Inserts a `Processor` to the remote video track.
*
* @param processor The `Processor` instance. Each extension has a corresponding type of `Processor`.
*
* @returns The `Processor` instance.
*/
pipe(processor: IBaseProcessor): IBaseProcessor;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* `Processor`
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* Removes the `Processor` inserted to the remote video track.
*/
unpipe(): void;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
*
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.13.0*
*
* The destination of the current processing pipeline on the remote video track.
*/
processorDestination: IBaseProcessor;
}
export declare function isAudioEncoderConfiguration(config: AudioEncoderConfiguration): config is AudioEncoderConfiguration;
/**
* AudioProfilePreset Type Guard
*/
export declare function isAudioEncoderConfigurationOrPreset(profile: any): profile is AudioEncoderConfigurationPreset | AudioEncoderConfiguration;
export declare function isBeautyEffectOptions(options: BeautyEffectOptions): options is BeautyEffectOptions;
export { isElectron }
export declare function isLowStreamParameter(streamParameter: any): streamParameter is LowStreamParameter;
export declare function isPlanB(): boolean;
export declare function isScreenSourceType(mediaSource: any): mediaSource is ScreenSourceType;
export declare function isVideoEncoderConfiguration(config: VideoEncoderConfiguration): config is VideoEncoderConfiguration;
/**
* VideoProfilePreset Type Guard
*/
export declare function isVideoEncoderConfigurationOrPreset(profile: any): profile is VideoEncoderConfigurationPreset | VideoEncoderConfiguration;
/**
* @ignore
*/
export declare interface ITrack extends EventEmitter {
/**
*
* - `"audio"`:
* - `"video"`:
*/
/** @en
* The type of a media track:
* - `"audio"`: Audio track.
* - `"video"`: Video track.
*/
trackMediaType: "audio" | "video";
/**
*
* - `true`:
* - `false`:
*/
/** @en
* Whether a media track is playing on the webpage:
* - `true`: The media track is playing on the webpage.
* - `false`: The media track is not playing on the webpage.
*/
isPlaying: boolean;
/**
* SDK ID
*
* @return ID
*/
/** @en
* Gets the ID of a media track, a unique identifier generated by the SDK.
*
* @return The media track ID.
*/
getTrackId(): string;
/**
* [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) 对象。
*
* @return [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) 对象。
*/
/** @en
* Gets an [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
*
* @return An [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
*/
getMediaStreamTrack(): MediaStreamTrack;
/**
* [RTCRtpTransceiver](https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver) 实例。
*
* [Beta](https://doc.shengwang.cn/doc/rtc/javascript/advanced-features/e2e-encryption)。
*
* > SDK 线 `RTCRtpTransceiver` `RTCRtpTransceiver`
* > - [ILocalTrack.transceiver-updated]{@link event_transceiver_updated}
* > - [IRemoteTrack.transceiver-updated]{@link event_transceiver_updated_2}
*
* @param type {@link StreamType}
* @returns `RTCRtpTransceiver`
*/
/** @en
* Gets the [RTCRtpTransceiver](https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver) instance of the current track.
*
* This method is currently mainly used for end-to-end encryption of video streams (Beta).
*
* > If the SDK experiences a reconnection, the `RTCRtpTransceiver` instance corresponding to the current track might change. You can obtain the new `RTCRtpTransceiver` instance through the following callbacks:
* > - For a local track: [ILocalTrack.transceiver-updated]{@link event_transceiver_updated}
* > - For a remote track: [IRemoteTrack.transceiver-updated]{@link event_transceiver_updated_2}
*
* @param type The type of the video stream. See {@link StreamType}.
* @returns The [RTCRtpTransceiver](https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver) instance of the current track.
*/
getRTCRtpTransceiver(type?: StreamType): RTCRtpTransceiver | undefined;
/**
*
*
* @param element DOM SDK `<video>` 2
* - `string`: DOM ID
* - `HTMLElement`: DOM
*/
/** @en
* Plays a media track on the webpage.
*
* @param element Specifies a DOM element. The SDK will create a `<video>` element under the specified DOM element to play the video track. You can specify a DOM element in either of following ways:
* - `string`: Specify the ID of the DOM element.
* - `HTMLElement`: Pass a DOM object.
*/
play(element?: string | HTMLElement): void;
/**
*
*/
/** @en
* Stops playing the media track.
*/
stop(): void;
}
export declare class LocalAudioTrack extends LocalTrack implements ILocalAudioTrack {
readonly trackMediaType: "audio" | "video";
_encoderConfig?: AudioEncoderConfiguration;
_trackSource: AudioTrackSource | FakeTrackSource;
/**
* @description This method is used to init WebAudio, if you're sure you don't want to use WebAudio, call _trackSource
*/
get _source(): AudioTrackSource;
set _source(source: AudioTrackSource | FakeTrackSource);
_enabled: boolean;
private _volume;
_useAudioElement: boolean;
_bypassWebAudio: boolean;
protected processor?: IAudioProcessor;
protected _processorContext: AudioProcessorContext | undefined;
protected get processorContext(): AudioProcessorContext;
_processorDestination: AudioProcessorDestination | undefined;
get processorDestination(): AudioProcessorDestination;
protected _getOriginVolumeLevel: boolean;
get isPlaying(): boolean;
get __className__(): string;
constructor(track: MediaStreamTrack, encoderConfig?: AudioEncoderConfiguration, trackId?: string, getOriginVolumeLevel?: boolean);
setVolume(volume: number): void;
getVolumeLevel(): number;
setPlaybackDevice(deviceId: string): Promise<void>;
setEnabled(enabled: boolean, _?: any, skipChangeState?: boolean): Promise<void>;
protected _setEnabled(enabled: boolean, _?: any, skipChangeState?: boolean): Promise<void>;
setMuted(muted: boolean): Promise<void>;
getStats(): LocalAudioTrackStats;
setAudioFrameCallback(callback: null | ((buffer: AudioBuffer) => void), frameSize?: number): void;
play(): void;
stop(): void;
close(): void;
protected _updatePlayerSource(updateWebAudioSource?: boolean): void;
protected _updateOriginMediaStreamTrack(track: MediaStreamTrack, stopOldTrack: boolean): Promise<void>;
renewMediaStreamTrack(newConstraints?: MediaTrackConstraints): Promise<void>;
pipe(processor: IAudioProcessor): IAudioProcessor;
unpipe(): void;
private bindProcessorDestinationEvents;
private unbindProcessorDestinationEvents;
protected bindProcessorContextEvents(processorContext: AudioProcessorContext): void;
protected unbindProcessorContextEvents(processorContext: AudioProcessorContext): void;
private initWebAudio;
private initProcessor;
}
/**
* [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats}
* @public
*/
/** @en
* Information of the local audio track, which can be retrieved by calling [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats}.
*/
export declare interface LocalAudioTrackStats {
/**
*
*
* - `"opus"`: OPUS
* - `"aac"`: AAC
* - `"pcmu"`:
* - `"pcma"`:
* - `"g722"`:
*
* > Firefox
*/
/** @en
* The audio codec.
*
* - `"opus"`: The audio codec is OPUS
* - `"aac"`: The audio codec is AAC
* - `"pcmu"`: Reserved for future use.
* - `"pcma"`: Reserved for future use.
* - `"g722"`: Reserved for future use.
*
* > Firefox does not support this property.
*/
codecType?: "opus" | "aac" | "PCMU" | "PCMA" | "G722";
/**
* [0,32767]
*
* > WebRTC-Stats [LocalAudioTrack.getVolumeLevel]{@link ILocalAudioTrack.getVolumeLevel}
*/
/** @en
* The energy level of the sent audio.
*
* The value range is [0,32767].
*
* > This value is retrieved by calling WebRTC-Stats and may not be up-to-date. To get the real-time sound volume, call [LocalAudioTrack.getVolumeLevel]{@link ILocalAudioTrack.getVolumeLevel}.
*/
sendVolumeLevel: number;
/**
* (bps)
*/
/** @en
* The bitrate (bps) of the sent audio.
*/
sendBitrate: number;
/**
* (bytes)
*/
/** @en
* The total bytes of the sent audio.
*/
sendBytes: number;
/**
*
*/
/** @en
* The total packets of the sent audio.
*/
sendPackets: number;
/**
*
*
* > Safari
*/
/** @en
* The total number of lost audio packets that were sent.
*
* > You can not get this property on Safari.
*/
sendPacketsLost: number;
/**
* (ms)
*/
/** @en
* Jitter (ms) of the audio packets that were sent.
*/
sendJitterMs: number;
/**
* (ms)
*/
/** @en
* Round-trip time delay (ms) of the audio packets that were sent.
*/
sendRttMs: number;
/**
* 400ms
*/
/** @en
* The packet loss rate of the sent audio in 400ms.
*/
currentPacketLossRate: number;
}
export declare class LocalDataChannel extends DataChannel implements ILocalDataChannel {
send(data: ArrayBuffer): void;
}
export declare abstract class LocalTrack extends Track implements ILocalTrack {
_enabled: boolean;
_muted: boolean;
/**
* @description Used to determine whether the user switches streams
*/
_isExternalTrack: boolean;
get isExternalTrack(): boolean;
get muted(): boolean;
get enabled(): boolean;
_isClosed: boolean;
protected _enabledMutex: PromiseMutex;
protected processor?: IBaseProcessor;
protected abstract _processorContext: IProcessorContext | undefined;
protected get processorContext(): IProcessorContext | undefined;
abstract get processorDestination(): IBaseProcessor;
constructor(track: MediaStreamTrack, trackId?: string);
abstract getStats(): LocalVideoTrackStats | LocalAudioTrackStats;
abstract setMuted(enabled: boolean): Promise<void>;
abstract setEnabled(enabled: boolean): Promise<void>;
getTrackLabel(): string;
close(): void;
protected _updateOriginMediaStreamTrack(track: MediaStreamTrack, stopOldTrack: boolean, isExternalTrack?: boolean): Promise<void>;
protected abstract _updatePlayerSource(): void;
protected _getDefaultPlayerConfig(): Partial<PlayerConfig>;
protected _handleTrackEnded: () => void;
protected onTrackEnded(): void;
protected stateCheck(stateName: "enabled" | "muted", state: boolean): void;
abstract renewMediaStreamTrack(): Promise<void>;
getProcessorStats(): ProcessorStats[];
getProcessorUsage(): Promise<UsageWithDirection[]>;
}
export declare enum LocalTrackEvents {
SOURCE_STATE_CHANGE = "source-state-change",
TRACK_ENDED = "track-ended",
/** @ignore */
BEAUTY_EFFECT_OVERLOAD = "beauty-effect-overload",
VIDEO_ELEMENT_VISIBLE_STATUS = "video-element-visible-status",
CLOSED = "closed"
}
export declare class LocalVideoTrack extends LocalTrack implements ILocalVideoTrack {
readonly trackMediaType: "audio" | "video";
_player?: AgoraRTCPlayer | VideoPlayer;
isUseScaleResolutionDownBy: boolean;
private _videoVisibleTimer;
private _previousVideoVisibleStatus;
private _clearPreviousVideoVisibleStatus;
_encoderConfig?: Partial<VideoEncoderConfiguration>;
_scalabilityMode?: SVCConfiguration;
_optimizationMode?: OptimizationMode | "balanced";
private _videoHeight?;
private _videoWidth?;
get videoHeight(): number | undefined;
get videoWidth(): number | undefined;
_forceBitrateLimit?: {
max_bitrate: number;
min_bitrate: number;
};
_enabled: boolean;
get isPlaying(): boolean;
protected _processorDestination: VideoProcessorDestination;
get processorDestination(): VideoProcessorDestination;
protected _processorContext: VideoProcessorContext;
protected get processorContext(): VideoProcessorContext;
protected set processorContext(ctx: VideoProcessorContext);
get __className__(): string;
constructor(track: MediaStreamTrack, encoderConfig?: Partial<VideoEncoderConfiguration>, scalabilityConfig?: SVCConfiguration, optimizationMode?: OptimizationMode | "balanced", trackId?: string, hints?: TrackHint[]);
play(element: HTMLElement | HTMLVideoElement | string, config?: VideoPlayerConfig): void;
stop(): void;
setEnabled(enabled: boolean, skipChangeState?: boolean): Promise<void>;
setMuted(muted: boolean): Promise<void>;
setEncoderConfiguration(config: VideoEncoderConfiguration | VideoEncoderConfigurationPreset, doNotRenegoation?: boolean): Promise<void>;
getStats(): LocalVideoTrackStats;
setBeautyEffect(enabled: boolean, options?: BeautyEffectOptions): Promise<void>;
getCurrentFrameData(): ImageData;
getCurrentFrameImage(imageType: string, quality?: number): Promise<ImageTypedData>;
setBitrateLimit(bitrateLimit: {
max_bitrate: number;
min_bitrate: number;
}): Promise<void>;
setOptimizationMode(mode: OptimizationMode | "balanced"): Promise<void>;
setScalabiltyMode(mode: SVCConfiguration): void;
updateMediaStreamTrackResolution(): void;
protected _updatePlayerSource(): void;
protected _getDefaultPlayerConfig(): Partial<VideoPlayerConfig>;
protected setSenderConfiguration(config: VideoEncoderConfiguration | VideoEncoderConfigurationPreset): Promise<void>;
updateBitrateFromProfile(): void;
getVideoElementVisibleStatus(): CheckVideoVisibleResult | undefined;
renewMediaStreamTrack(newConstraints?: MediaTrackConstraints): Promise<void>;
pipe(processor: IBaseProcessor): IBaseProcessor;
unpipe(): void;
close(): void;
clone(config?: VideoEncoderConfiguration | VideoEncoderConfigurationPreset, cloneTrack?: boolean): LocalVideoTrack;
replaceTrack(track: MediaStreamTrack, stopOldTrack: boolean): Promise<void>;
sendSeiData(sei: Uint8Array): never;
private bindProcessorDestinationEvents;
private unbindProcessorDestinationEvents;
private unbindProcessorContextEvents;
}
/**
* [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats}
* @public
*/
/** @en
* Information of the local video track, which can be retrieved by calling [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats}.
*/
export declare interface LocalVideoTrackStats {
/**
*
*
* - `"H264"`: H.264
* - `"VP8"`: VP8
* - `"VP9"`: VP9
* - `"AV1X"`:
* - `"AV1"`: AV1
*
* > Firefox
*/
/** @en
* The video codec.
*
* - `"H264"`: The video codec is H.264.
* - `"VP8"`: The video codec is VP8.
* - `"VP9"`: The video codec is VP9.
* - `"AV1X"`: Reserved for future use.
* - `"AV1"`: The video codec is AV1.
*
* > You can not get this property on Firefox.
*/
codecType?: "H264" | "H265" | "VP8" | "VP9" | "AV1X" | "AV1";
/**
* (bytes)
*/
/** @en
* The total bytes of the sent video.
*/
sendBytes: number;
/**
* fps
*
* > Firefox
*/
/** @en
* The frame rate (fps) of the sent video.
*
* > You can not get this property on Firefox.
*/
sendFrameRate?: number;
/**
* fps
*
* > Safari Firefox
*/
/** @en
* The frame rate (fps) of the captured video.
*
* > You can not get this property on Safari and Firefox.
*/
captureFrameRate?: number;
/**
*
*/
/** @en
* The total packets of the sent video.
*/
sendPackets: number;
/**
*
*
* > - Safari
* > - Firefox
*/
/** @en
* The total number of lost video packets that were sent.
*
* > - You can not get this property on Safari.
* > - This property is inaccurate on Firefox.
*/
sendPacketsLost: number;
/**
* (ms)
*/
/** @en
* Jitter (ms) of the video packets that were sent.
*/
sendJitterMs: number;
/**
* (ms)
*/
/** @en
* Round-trip time delay (ms) of the video packets that were sent.
*/
sendRttMs: number;
/**
*
*/
/** @en
* The resolution height (pixel) of the sent video.
*/
sendResolutionHeight: number;
/**
*
*/
/** @en
* The resolution width (pixel) of the sent video.
*/
sendResolutionWidth: number;
/**
*
*/
/** @en
* The resolution height (pixel) of the captured video.
*/
captureResolutionHeight: number;
/**
*
*/
/** @en
* The resolution width (pixel) of the captured video.
*/
captureResolutionWidth: number;
/**
* ms
*/
/** @en
* The time (ms) required for encoding the captured video.
*/
encodeDelay?: number;
/**
* (bps)
*/
/** @en
* The bitrate (bps) of the sent video.
*/
sendBitrate: number;
/**
* (bps) {@link VideoEncoderConfiguration}
*/
/** @en
* The target bitrate (bps) of the sent video, namely the bitrate set in {@link VideoEncoderConfiguration}.
*/
targetSendBitrate: number;
/**
*
*/
/** @en
* The total duration of the sent video in seconds.
*/
totalDuration: number;
/**
*
*/
/** @en
* The total freeze time of the encoded video in seconds.
*/
totalFreezeTime: number;
/**
* 400ms
*/
/** @en
* The packet loss rate of the sent video in 400ms.
*/
currentPacketLossRate: number;
}
/**
* [setLowStreamParameter]{@link IAgoraRTCClient.setLowStreamParameter}
* @public
*/
/** @en
* The video profile of the low-quality video stream. Set the the video profile of the low-quality video stream when calling [setLowStreamParameter]{@link IAgoraRTCClient.setLowStreamParameter}.
*/
export declare interface LowStreamParameter {
/**
*
*
* `number` `{ max: 1280, min: 720 }`
*
* [ConstrainLong]{@link ConstrainLong}
*/
/** @en
* Width of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
width: ConstrainULong;
/**
*
*
* `number` `{ max: 720, min: 480 }`
*
* [ConstrainLong]{@link ConstrainLong}
*/
/** @en
* Height of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
height: ConstrainULong;
/**
*
*
* `number` `{ max: 30, min: 5 }`
*
* [ConstrainLong]{@link ConstrainLong}
*/
/** @en
* Frame rate of the video (fps).
*
* You can pass a `number`, or a constraint such as `{ max: 30, min: 5 }`.
*
* For details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
framerate?: ConstrainULong;
/**
* Kbps
*/
/** @en
* Bitrate of the video (Kbps).
*/
bitrate?: number;
}
export declare const MediaElementNumStatus: {
[n in MediaElementStatus | "uninit"]: number;
};
export declare enum MediaElementStatus {
NONE = "none",
INIT = "init",
CANPLAY = "canplay",
PLAYING = "playing",
PAUSED = "paused",
SUSPEND = "suspend",
STALLED = "stalled",
WAITING = "waiting",
ERROR = "error",
DESTROYED = "destroyed",
ABORT = "abort",
ENDED = "ended",
EMPTIED = "emptied",
LOADEDDATA = "loadeddata"
}
export declare interface MediaStats {
resolution: Resolution;
}
export declare class MicrophoneAudioTrack extends LocalAudioTrack implements IMicrophoneAudioTrack {
_config: MicrophoneAudioTrackInitConfig;
_deviceName: string;
private _constraints;
private readonly _originalConstraints;
_enabled: boolean;
get __className__(): string;
constructor(track: MediaStreamTrack, config: MicrophoneAudioTrackInitConfig, constraints: MediaTrackConstraints, trackId?: string);
setDevice(deviceId: string): Promise<void>;
setEnabled(enabled: boolean, notCloseDevice?: boolean, skipChangeState?: boolean): Promise<void>;
close(): void;
protected onTrackEnded(): void;
renewMediaStreamTrack(newConstraints?: MediaTrackConstraints): Promise<void>;
protected bindProcessorContextEvents(processorContext: AudioProcessorContext): void;
protected unbindProcessorContextEvents(processorContext: AudioProcessorContext): void;
}
/**
* [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack}
*/
/** @en
* Configurations for the audio track from the audio captured by a microphone. Set these configurations when calling [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack}.
*/
export declare interface MicrophoneAudioTrackInitConfig {
/**
*
*
* [[AudioEncoderConfigurationPreset]] SDK [[AudioEncoderConfiguration]]
*
* > Firefox
*/
/** @en
* The audio encoder configurations.
*
* You can set the audio encoder configurations in either of the following ways:
* - Pass the preset audio encoder configurations by using [[AudioEncoderConfigurationPreset]].
* - Pass your customized audio encoder configurations by using [[AudioEncoderConfiguration]].
*
* > Firefox does not support setting the audio encoding rate.
*/
encoderConfig?: AudioEncoderConfiguration | AudioEncoderConfigurationPreset;
/**
*
* - `true`:
* - `false`:
*/
/** @en
* Whether to enable acoustic echo cancellation:
* - `true`: Enable acoustic echo cancellation.
* - `false`: Do not enable acoustic echo cancellation.
*/
AEC?: boolean;
/**
*
* - `true`:
* - `false`:
*/
/** @en
* Whether to enable audio gain control:
* - `true`: Enable audio gain control.
* - `false`: Do not enable audio gain control.
*/
AGC?: boolean;
/**
*
* - `true`:
* - `false`:
*/
/** @en
* Whether to enable automatic noise suppression:
* - `true`: Enable automatic noise suppression.
* - `false`: Do not automatic noise suppression.
*/
ANS?: boolean;
/**
* @ignore
*/
/** @en
* @ignore
*/
DTX?: boolean;
/**
* ID
*
* [AgoraRTC.getMicrophones]{@link IAgoraRTC.getMicrophones}
*/
/** @en
* Specifies the microphone ID.
*
* You can get a list of the available microphones by calling [AgoraRTC.getMicrophones]{@link IAgoraRTC.getMicrophones}.
*/
microphoneId?: string;
/**
* @ignore
* WebAudio
*/
/** @en
* @ignore
* Specifies whether or not audio track pass through WebAudio.
*/
bypassWebAudio?: boolean;
}
export declare class MixingAudioTrack extends LocalAudioTrack {
get __className__(): string;
trackList: LocalAudioTrack[];
private destNode;
get isActive(): boolean;
constructor();
hasAudioTrack(track: LocalAudioTrack): boolean;
addAudioTrack(track: LocalAudioTrack): void;
removeAudioTrack(track: LocalAudioTrack): void;
private updateEncoderConfig;
_updateRtpTransceiver(transceiver: RTCRtpTransceiver): void;
}
export declare type OptimizationMode = "motion" | "detail";
/**
* @internal
*/
export declare interface PlayerConfig extends VideoPlayerConfig {
trackId: string;
element: HTMLElement;
}
/**
* Safari AudioNode.disconnect disconnect
* polyfill _inputNodes connect
* disconnect disconnect connect
*/
export declare function polyfillAudioNode(node: AudioNode): void;
export declare enum PROCESSOR_CONTEXT_EVENTS {
REQUEST_UPDATE_CONSTRAINTS = "request_update_constraints",
REQUEST_CONSTRAINTS = "request_constraints"
}
export declare enum PROCESSOR_DESTINATION_EVENTS {
ON_TRACK = "on_track",
ON_NODE = "on_node"
}
export declare interface ProcessorUsageStats {
id: string;
value: number;
level: number;
totalTs: number;
}
export declare class RemoteAudioTrack extends RemoteTrack implements IRemoteAudioTrack {
readonly trackMediaType: TrackMediaType.AUDIO;
_source: AudioTrackSource | FakeTrackSource;
_useAudioElement: boolean;
private _volume;
protected processorContext: AudioProcessorContext;
processorDestination: AudioProcessorDestination;
private _played;
private _bypassWebAudio;
get isPlaying(): boolean;
get __className__(): string;
constructor(track: MediaStreamTrack, userId: UID, uintId: number, store: SDKStore);
setAudioFrameCallback(callback: null | ((buffer: AudioBuffer) => void), frameSize?: number): void;
setVolume(volume: number): void;
setPlaybackDevice(deviceId: string): Promise<void>;
getVolumeLevel(): number;
getStats(): RemoteAudioTrackStats;
play(): void;
stop(): void;
_destroy(): void;
_isFreeze(): boolean;
protected _updatePlayerSource(updateWebAudioSource?: boolean): void;
pipe(processor: IAudioProcessor): IAudioProcessor;
unpipe(): void;
private bindProcessorDestinationEvents;
private unbindProcessorDestinationEvents;
}
/**
* [AgoraRTCClient.getRemoteAudioStats]{@link IAgoraRTCClient.getRemoteAudioStats}
* @public
*/
/** @en
* Statistics of the remote audio track, such as connection and transmission statistics, which can be retrieved by calling [AgoraRTCClient.getRemoteAudioStats]{@link IAgoraRTCClient.getRemoteAudioStats}.
*/
export declare interface RemoteAudioTrackStats {
/**
* ms)
*
*
*/
/** @en
* Transmission delay (ms).
*
* The delay (ms) between a remote client sending the audio and the local client receiving the audio.
*/
transportDelay: number;
/**
*
*
* - `"opus"`: OPUS
* - `"aac"`: AAC
* - `"pcmu"`:
* - `"pcma"`:
* - `"g722"`:
*
* > Firefox
*/
/** @en
* The audio codec.
*
* - `"opus"`: The audio codec is OPUS
* - `"aac"`: The audio codec is AAC
* - `"pcmu"`: Reserved for future use.
* - `"pcma"`: Reserved for future use.
* - `"g722"`: Reserved for future use.
*
* > Firefox does not support this property.
*/
codecType?: "opus" | "aac" | "PCMU" | "PCMA" | "G722";
/**
* ms
*
*
*/
/** @en
* End-to-end delay (ms).
*
* The delay (ms) between a remote client sampling the audio and the local client playing the audio.
* This delay does not include the time spent in encoding at the remote client and the time spent in decoding at the local client.
*/
end2EndDelay: number;
/**
* bps
*/
/** @en
* The bitrate (bps) of the received audio.
*/
receiveBitrate: number;
/**
* [0,32767]
*
* > WebRTC-Stats [RemoteAudioTrack.getVolumeLevel]{@link IRemoteAudioTrack.getVolumeLevel}
*/
/** @en
* The energy level of the received audio.
*
* The value range is [0,32767].
*
* > This value is retrieved by calling WebRTC-Stats and may not be up-to-date. To get the real-time sound volume, call [RemoteAudioTrack.getVolumeLevel]{@link IRemoteAudioTrack.getVolumeLevel}.
*/
receiveLevel: number;
/**
*
*/
/** @en
* The total bytes of the received audio.
*/
receiveBytes: number;
/**
* (ms)
*
*
*
* > Safari Firefox
*/
/** @en
* The delay (ms) between a remote client sending the audio and the local client playing the audio.
*
* > This property is inaccurate on Safari and Firefox.
*/
receiveDelay: number;
/**
*
*/
/** @en
* The total packets of the received audio.
*/
receivePackets: number;
/**
*
*/
/** @en
* The total number of lost audio packets that should be received.
*/
receivePacketsLost: number;
/**
*
*/
/** @en
* The number of packets discarded by the jitter buffer due to early or late arrival.
*/
receivePacketsDiscarded: number;
/**
*
*/
/** @en
* The packet loss rate of the received audio.
*/
packetLossRate: number;
/**
* 400ms内的丢包率
*/
/** @en
* The packet loss rate of the received audio.
*/
currentPacketLossRate: number;
/**
*
*/
/** @en
* The total duration of the received audio in seconds.
*/
totalDuration: number;
/**
*
*/
/** @en
* The total freeze time of the received audio in seconds.
*/
totalFreezeTime: number;
/**
*
*/
/** @en
* The freeze rate of the received audio.
*/
freezeRate: number;
/**
*
* -1
*/
publishDuration: number;
}
export declare class RemoteDataChannel extends DataChannel implements IRemoteDataChannel {
private _messageListener;
constructor(config: IDataChannelConfig);
_updateOriginDataChannel(datachannel: RTCDataChannel): void;
_close(): void;
private _bandRemoteDataChannelEvents;
}
/**
* 退 [setStreamFallbackOption]{@link IAgoraRTCClient.setStreamFallbackOption}
* @public
*/
/** @en
* The stream fallback option. Set the stream fallback option when calling [setStreamFallbackOption]{@link IAgoraRTCClient.setStreamFallbackOption}.
*
*/
export declare enum RemoteStreamFallbackType {
/** 0: 关闭回退策略,弱网时不对音视频流作回退处理。 */
/** @en
* 0: Disable the fallback.
*/
DISABLE = 0,
/** 1: 在网络条件较差的情况下,自动订阅视频小流。 */
/** @en
* 1: Automatically subscribe to the low-video stream under poor network conditions. */
LOW_STREAM = 1,
/** 2: 网络较弱时,先尝试订阅视频小流。如果网络环境无法显示视频,则再回退到订阅纯音频流。 */
/** @en
* 2: Subscribe to the low-quality video stream when the network conditions worsen, and subscribe to audio only when the conditions become too poor to support video transmission.
*/
AUDIO_ONLY = 2,
/**
* ABR ,
*/
HIGH_STREAM_LAYER1 = 3,
HIGH_STREAM_LAYER2 = 4,
HIGH_STREAM_LAYER3 = 5,
HIGH_STREAM_LAYER4 = 6,
HIGH_STREAM_LAYER5 = 7,
HIGH_STREAM_LAYER6 = 8
}
/**
* [setRemoteVideoStreamType]{@link IAgoraRTCClient.setRemoteVideoStreamType} 使
* @public
*/
/** @en
* The video type of the remote stream. Set the video type of the remote stream when calling [setRemoteVideoStreamType]{@link IAgoraRTCClient.setRemoteVideoStreamType}.
*/
export declare enum RemoteStreamType {
/** 0: 高分辨率、高码率的视频大流。 */
/** @en
* 0: High-quality video stream (high-bitrate, high-resolution).
*/
HIGH_STREAM = 0,
/** 1: 低分辨率、低码率的视频小流。 */
/** @en
* 1: Low-quality video stream (low-bitrate, low-resolution).
*/
LOW_STREAM = 1,
/**
* ABR
*
*/
HIGH_STREAM_LAYER1 = 4,
HIGH_STREAM_LAYER2 = 5,
HIGH_STREAM_LAYER3 = 6,
HIGH_STREAM_LAYER4 = 7,
HIGH_STREAM_LAYER5 = 8,
HIGH_STREAM_LAYER6 = 9
}
export declare abstract class RemoteTrack extends Track implements IRemoteTrack {
private _userId;
/** @internal */
_uintId: number;
_isDestroyed: boolean;
protected store: SDKStore;
protected processor?: IBaseProcessor;
protected abstract processorContext: IProcessorContext;
getUserId(): UID;
abstract getStats(): RemoteAudioTrackStats | RemoteVideoTrackStats;
constructor(track: MediaStreamTrack, userId: UID, uintId: number, store: SDKStore);
_updateOriginMediaStreamTrack(track: MediaStreamTrack): void;
_destroy(): void;
protected abstract _updatePlayerSource(): void;
getProcessorStats(): ProcessorStats[];
getProcessorUsage(): Promise<UsageWithDirection[]>;
}
export declare enum RemoteTrackEvents {
FIRST_FRAME_DECODED = "first-frame-decoded",
VIDEO_ELEMENT_VISIBLE_STATUS = "video-element-visible-status",
VIDEO_STATE_CHANGED = "video-state-changed"
}
export declare class RemoteVideoTrack extends RemoteTrack implements IRemoteVideoTrack {
private _videoVisibleTimer;
private _previousVideoVisibleStatus;
private _clearPreviousVideoVisibleStatus;
readonly trackMediaType: TrackMediaType.VIDEO;
_videoWidth?: number;
_videoHeight?: number;
_player?: AgoraRTCPlayer | VideoPlayer;
processorDestination: VideoProcessorDestination;
protected processorContext: VideoProcessorContext;
get isPlaying(): boolean;
get __className__(): string;
constructor(track: MediaStreamTrack, userId: UID, uintId: number, store: SDKStore);
getStats(): RemoteVideoTrackStats;
play(element: string | HTMLElement | HTMLVideoElement, config?: VideoPlayerConfig): void;
stop(): void;
getCurrentFrameData(): ImageData;
updateMediaStreamTrackResolution(): void;
protected _updatePlayerSource(): void;
getVideoElementVisibleStatus(): CheckVideoVisibleResult | undefined;
pipe(processor: IBaseProcessor): IBaseProcessor;
unpipe(): void;
private bindProcessorDestinationEvents;
private unbindProcessorDestinationEvents;
_destroy(): void;
_onSei(sei: Uint8Array): void;
}
/**
* [AgoraRTCClient.getRemoteVideoStats]{@link IAgoraRTCClient.getRemoteVideoStats}
* @public
*/
/** @en
* Statistics of the remote video track, such as connection and transmission statistics, which can be retrieved by calling [AgoraRTCClient.getRemoteVideoStats]{@link IAgoraRTCClient.getRemoteVideoStats}.
*/
export declare interface RemoteVideoTrackStats {
/**
* ms)
*
*
*/
/** @en
* Transmission delay (ms).
*
* The delay (ms) between a remote client sending the video and the local client receiving the video.
*/
transportDelay: number;
/**
*
*
* - `"H264"`: H.264
* - `"VP8"`: VP8
* - `"VP9"`: VP9
* - `"AV1X"`: AV1 Chrome M96
* - `"AV1"`: AV1
*
* > Firefox
*/
/** @en
* The video codec.
*
* - `"H264"`: The video codec is H.264.
* - `"VP8"`: The video codec is VP8.
* - `"VP9"`: The video codec is VP9.
* - `"AV1X"`: Reserved for future use.
* - `"AV1"`: The video codec is AV1.
*
* > You can not get this property on Firefox.
*/
codecType?: "H264" | "H265" | "VP8" | "VP9" | "AV1X" | "AV1";
/**
* ms
*
*
*/
/** @en
* End-to-end delay (ms).
*
* The delay (ms) a remote client capturing the video and the local client playing the video.
* This delay does not include the time spent in encoding at the remote client and the time spent in decoding at the local client.
*/
end2EndDelay: number;
/**
* bps
*/
/** @en
* The bitrate (bps) of the received video.
*/
receiveBitrate: number;
/**
* (ms)
*
*
*
* > Safari Firefox
*/
/** @en
* The delay (ms) between a remote client sending the video and the local client playing the video.
*
* > This property is inaccurate on Safari and Firefox.
*/
receiveDelay: number;
/**
*
*/
/** @en
* The total byes of the received video.
*/
receiveBytes: number;
/**
* fps
*/
/** @en
* The frame rate (fps) of the decoded video.
*/
decodeFrameRate?: number;
/**
* fps
*/
/** @en
* The frame rate (fps) of the received video.
*/
receiveFrameRate?: number;
/**
* fps
*/
/** @en
* The rendering frame rate (fps) of the decoded video.
*/
renderFrameRate?: number;
/**
*
*/
/** @en
* The total bytes of the received video.
*/
receivePackets: number;
/**
*
*/
/** @en
* The total number of lost video packets that should be received.
*/
receivePacketsLost: number;
/**
*
*/
/** @en
* The packet loss rate of the received video.
*/
packetLossRate: number;
/**
* 400ms内的丢包率
*/
/** @en
* The packet loss rate of the received video.
*/
currentPacketLossRate: number;
/**
*
*/
/** @en
* The resolution height (pixel) of the received video.
*/
receiveResolutionHeight: number;
/**
*
*/
/** @en
* The resolution width (pixel) of the received video.
*/
receiveResolutionWidth: number;
/**
*
*/
/** @en
* The total duration of the received video in seconds.
*/
totalDuration: number;
/**
*
*/
/** @en
* The total freeze time of the received video in seconds.
*/
totalFreezeTime: number;
/**
*
*/
/** @en
* The freeze rate of the received video.
*/
freezeRate: number;
/**
*
* -1
*/
publishDuration: number;
}
export declare function removeTrack(track: Track): void;
export declare function requestAutoplayGesture(): void;
export declare interface Resolution {
width: number;
height: number;
}
/**
* getUserMedia GUM GUM
*
* Safari iOS https://bugs.webkit.org/show_bug.cgi?id=179363
*
* SDK Device GUM GUN track mute
*/
export declare const SAFARI_GLOBAL_GUM_LOCK: PromiseMutex;
export declare interface ScreenConstraintsWithElectron extends MediaTrackConstraints {
/**
* Electron sourceID
*/
sourceId?: string;
/**
* Chrome id
*/
extensionId?: string;
/**
* Chrome options
*/
mandatory?: any;
/**
* source Firefox
*/
mediaSource?: ScreenSourceType;
width?: ConstrainULong;
height?: ConstrainULong;
/**
* "browser" for tabs.
* "window" for windows.
* "monitor" for screens.
*/
displaySurface?: "browser" | "window" | "monitor";
/** 是否屏蔽本地tag页面, 默认include(和Chrome 107默认exclude不同为了保持与过去浏览器版本一致), Chrome 107以上 Opera 93以上*/
selfBrowserSurface?: "include" | "exclude";
/** 是否允许tag切换分享, 默认允许include, Chrome 107以上 Opera 93以上*/
surfaceSwitching?: "include" | "exclude";
/** 是否开启系统音频默认开启include, Chrome 105以上, Opera 91以上 */
systemAudio?: "include" | "exclude";
}
/**
* SDK [[VideoEncoderConfiguration]]
*
* [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}
*
* SDK
*
* | | × | fps |
* | -------- | --------------- | ----------- |
* | "480p" | 640 × 480 | 5 |
* | "480p_1" | 640 × 480 | 5 |
* | "480p_2" | 640 × 480 | 30 |
* | "480p_3" | 640 × 480 | 15 |
* | "720p" | 1280 × 720 | 5 |
* | "720p_1" | 1280 × 720 | 5 |
* | "720p_2" | 1280 × 720 | 30 |
* | "720p_3" | 1280 × 720 | 15 |
* | "720p_auto" <sup></sup> | 1280 × 720 | 30 |
* | "1080p" | 1920 × 1080 | 5 |
* | "1080p_1" | 1920 × 1080 | 5 |
* | "1080p_2" | 1920 × 1080 | 30 |
* | "1080p_3" | 1920 × 1080 | 15 |
*
* > <sup></sup> `"720p_auto"` Safari [](https://doc.shengwang.cn/doc/rtc/javascript/overview/release-notes)。
*
*/
/** @en
* The preset video encoder configurations for screen sharing.
*
* You can pass the preset video encoder configurations when calling [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}.
*
* The following table lists all the preset video profiles for screen sharing.
*
* | Video Profile | Resolution (Width×Height) | Frame Rate (fps) |
* | -------- | --------------- | ----------- |
* | "480p" | 640 × 480 | 5 |
* | "480p_1" | 640 × 480 | 5 |
* | "480p_2" | 640 × 480 | 30 |
* | "480p_3" | 640 × 480 | 15 |
* | "720p" | 1280 × 720 | 5 |
* | "720p_1" | 1280 × 720 | 5 |
* | "720p_2" | 1280 × 720 | 30 |
* | "720p_3" | 1280 × 720 | 15 |
* | "720p_auto" <sup></sup> | 1280 × 720 | 30 |
* | "1080p" | 1920 × 1080 | 5 |
* | "1080p_1" | 1920 × 1080 | 5 |
* | "1080p_2" | 1920 × 1080 | 30 |
* | "1080p_3" | 1920 × 1080 | 15 |
*
* > <sup></sup> `"720p_auto"` is only recommended to be set on Safari to ensure dynamic adjustment of the encoding resolution. For details, see the release notes.
*/
export declare type ScreenEncoderConfigurationPreset = keyof typeof SUPPORT_SCREEN_ENCODER_CONFIG_LIST;
/**
*
* - `"screen"`:
* - `"application"`: app
* - `"window"`: app
* @public
*/
/** @en
* The type of the source for screen sharing.
* - `"screen"`: Sharing the whole screen.
* - `"application"`: Sharing all windows of an app.
* - `"window"`: Sharing a window of an app.
*/
export declare type ScreenSourceType = "screen" | "window" | "application";
/**
* [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}
*/
/** @en
* Configurations for the video track for screen sharing. Set these configurations when calling [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}.
*/
export declare interface ScreenVideoTrackInitConfig {
/**
*
*
*
* - [[ScreenEncoderConfigurationPreset]] SDK
* - [[VideoEncoderConfiguration]]
* - 使 SDK `"1080p_2"` 1920 × 1080 30 fps 3000 Kbps
*/
/** @en
* The video encoder configurations for screen sharing.
*
* You can set the video encoder configurations in either of the following ways:
* - Pass the preset video encoder configurations by using [[ScreenEncoderConfigurationPreset]].
* - Pass your customized video encoder configurations by using [[VideoEncoderConfiguration]].
* - Leave this property empty to use the SDK's default value, `"1080p_2"` (resolution: 1920 × 1080, frame rate: 30 fps, bitrate: 3000 Kbps).
*/
encoderConfig?: VideoEncoderConfiguration | ScreenEncoderConfigurationPreset;
/**
* 使 Electron `sourceId`
*/
/** @en
* The `sourceId` when you share the screen through Electron.
*/
electronScreenSourceId?: string;
/**
* 使 Chrome ID
*/
/** @en
* The `extensionId` when you share the screen with a Chrome extension.
*/
extensionId?: string;
/**
* @deprecated
*
* v4.17.1 {@link displaySurface}
*
*
*/
/** @en
*
* @deprecated from v4.17.1. Use {@link displaySurface} instead.
*
* The type of the source for screen sharing.
*/
screenSourceType?: ScreenSourceType;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.2.0*
*
*
*
* 稿 `"detail"` `"motion"`
*
* > Chrome
*
* @param mode
* - `"detail"`:
* - SDK 使
* - SDK
* - `"motion"`: 4.21.0 SDK
* - SDK
* - SDK
*/
/** @en
* @since
* <br>&emsp;&emsp;&emsp;*4.2.0*
*
* Sets the video transmission optimization mode.
*
* You can call this method during a video call, a live streaming or screen sharing to dynamically change the optimization mode. For example, during the screen sharing, before you change the shared content from text to video, you can change the optimization mode from `"detail"` to `"motion"` to ensure smoothness in poor network conditions.
*
* > Note: This method supports Chrome only.
*
* @param mode The video transmission optimization mode:
* - `"detail"`: Prioritizes video quality.
* - The SDK ensures high-quality images by automatically calculating a minimum bitrate based on the capturing resolution and frame rate. No matter how poor the network condition is, the sending bitrate will never be lower than the minimum value.
* - In most cases, the SDK does not reduce the sending resolution, but may reduce the frame rate.
* - `"motion"`: Since 4.21.0, the SDK prioritizes video smoothness.
* - In poor network conditions, the SDK reduces the sending bitrate to minimize video freezes.
* - In most cases, the SDK does not reduce the frame rate, but may reduce the sending resolution.
*/
optimizationMode?: "motion" | "detail";
/**
* @ignore
*
* @自从
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* SVC
*
* {@link SVCConfigurationPreset} SDK SVC {@link SVCConfiguration} SVC
*/
/** @en
* @ignore
*
* @since
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* Configurations for Scalable Video Coding (SVC).
*
* You can set the configurations using one of the following options:
* - Use the preset SVC configurations provided by the SDK through {@link SVCConfigurationPreset}.
* - Use your custom SVC configurations through {@link SVCConfiguration}.
*/
scalabiltyMode?: SVCConfiguration | SVCConfigurationPreset;
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* [displaySurface](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#displaySurface)。
*
* ![](https://wd.imgix.net/image/vvhSqZboQoZZN9wBvoXq72wzGAf1/AaQIUrKKCvoNuaBjvGOM.png?auto=format&w=1600)
*
* > Chrome 107 Edge 107
*
*/
/** @en
*
* @since
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* The pre-selected pane in the media picker. See [displaySurface](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#displaySurface) for details.
*
* > Note: This property is supported on Chrome 107 and later, as well as Edge 107 and later.
*/
displaySurface?: "browser" | "window" | "monitor";
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
*
* - `"include"`
* - `"exclude"`
*
* [selfBrowserSurface](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#selfBrowserSurface)。
*
* >
* > - Chrome 107 `"exclude"` SDK `"include"` Chrome 107
* > - Chrome 107 Edge 107
*/
/** @en
*
* @since
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* Whether to allow the user to share the current tab:
* - `"include"`: (Default) Allows the user to share the current tab.
* - `"exclude"`: Prevents the user from sharing the current tab.
*
* See [displaySurface](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#displaySurface) for details.
*
* > Note:
* > - This property is defaulted to `"exclude"` on Chrome 107. For better compatibility with earlier versions, the SDK changes the default value to "include", which ensures that users can still share the current tab after upgrading to Chrome 107.
* > - This property is supported on Chrome 107 and later, as well as Edge 107 and later.
*/
selfBrowserSurface?: "include" | "exclude";
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
*
* - `"include"`
* - `"exclude"`
*
* [surfaceSwitching](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#surfaceSwitching)。
*
* > Chrome 107 Edge 107
*/
/** @en
*
* @since
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* Whether to allow the user to dynamically switch between shared tabs:
* - `"include"`: (Default) The user can dynamically switch between shared tabs.
* - `"exclude"`: The user cannot dynamically switch between shared tabs.
*
* See [surfaceSwitching](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#surfaceSwitching) for details.
*
* > Note: This property is supported on Chrome 107 and later, as well as Edge 107 and later.
*/
surfaceSwitching?: "include" | "exclude";
/**
* @自从
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
*
* - `"include"`
* - `"exclude"`
*
* [systemAudio](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#systemAudio)。
*
* > Windows Chrome 105 Edge 105
*/
/** @en
*
* @since
* <br>&emsp;&emsp;&emsp;*4.17.1*
*
* Whether to capture system audio:
* - `"include"`: (Default) Captures system audio.
* - `"exclude"`: Avoids capturing system audio
*
* See [systemAudio](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#systemAudio) for details.
*
* > Note: This property is supported on Chrome 105 and later (Windows only), as well as Edge 105 and later (Windows only).
*/
systemAudio?: "include" | "exclude";
}
export declare type SenderConfig = {
bitrateMax: number;
bitrateMin: number;
scaleResolutionDownBy: number;
};
/**
* AudioBuffer AudioBuffer silence
*/
export declare function silenceScriptProcessHandler(e: AudioProcessingEvent): AudioBuffer;
export declare interface StatsRegistry {
processorID: string;
processorName: string;
type: string;
cb: Function;
}
export declare enum StreamType {
/** 0: 高分辨率、高码率的视频大流。 */
/** @en
* 0: High-quality video stream (high-bitrate, high-resolution).
*/
HIGH_STREAM = 0,
/** 1: 低分辨率、低码率的视频小流。 */
/** @en
* 1: Low-quality video stream (low-bitrate, low-resolution).
*/
LOW_STREAM = 1
}
export declare const SUPPORT_720P_AUTO_CONFIG_LIST: Required<{
scaleResolutionDownBy: number;
width: number;
height: number;
frameRate: number;
bitrateMin: number;
bitrateMax: number;
}>[];
/** @en
* @ignore
*/
export declare const SUPPORT_SCREEN_ENCODER_CONFIG_LIST: Record<string, VideoEncoderConfiguration>;
/** @en
* @ignore
*/
export declare const SUPPORT_SVC_CONFIG_LIST: Record<string, SVCConfiguration>;
/** @en
* @ignore
*/
export declare const SUPPORT_VIDEO_ENCODER_CONFIG_LIST: Record<string, VideoEncoderConfiguration>;
/**
* @ignore
* @自从
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* SVC
*
* `SVCConfiguration` SVC
* - [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}
* - [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack}
* - [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}
*/
/** @en
* @ignore
* @since
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* Custom SVC encoding configurations.
*
* You can control the SVC configurations for local video by passing `SVCConfiguration` in the following methods:
* - [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}
* - [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack}
* - [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}
*/
export declare interface SVCConfiguration {
/**
* SVC
*/
/** @en
* Specifies the number of spatial layer in SVC.
*/
numSpatialLayers: 1 | 2 | 3;
/**
* SVC
*/
/** @en
* Specifies the number of temporal layer in SVC.
*/
numTemporalLayers: 1 | 3;
}
/**
* @ignore
*
* @自从
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* SDK SVC
* - `"1SL1TL"`: 1 1
* - `"3SL3TL"`: 3 3
* - `"2SL3TL"`: 2 3
*
* SVC
* - [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}
* - [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack}
* - [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}
*
*/
/** @en
* @ignore
*
* @since
* <br>&emsp;&emsp;&emsp;*4.18.0*
*
* The preset SVC configurations provided by the SDK.
* - `"1SL1TL"`: 1 spatial layer, 1 temporal layer.
* - `"3SL3TL"`: 3 spatial layers, 3 temporal layers.
* - `"2SL3TL"`: 2 spatial layers, 3 temporal layers.
*
* You can control the SVC configurations for local video by passing these preset values in the following methods:
* - [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}
* - [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack}
* - [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}
*
*/
export declare type SVCConfigurationPreset = keyof typeof SUPPORT_SVC_CONFIG_LIST;
export declare abstract class Track extends EventEmitter implements ITrack {
abstract readonly trackMediaType: "audio" | "video";
private _ID;
protected _rtpTransceiver?: RTCRtpTransceiver;
protected _lowRtpTransceiver?: RTCRtpTransceiver;
abstract get isPlaying(): boolean;
/** 用于描述这个 Track 的一些额外信息 */
_hints: TrackHint[];
_isClosed: boolean;
/** 原始的 Track 对象,来自于采集/用户传入/订阅返回 */
_originMediaStreamTrack: MediaStreamTrack;
/** 用于播放和发布的 Track 对象,可能被各种模块修改,一般来说等于 `_originMediaStreamTrack` */
mediaStreamTrack: MediaStreamTrack;
set _mediaStreamTrack(track: MediaStreamTrack);
get _mediaStreamTrack(): MediaStreamTrack;
/** 提供给外部插件以同步数据 */
_external: ExternalMethods;
constructor(track: MediaStreamTrack, trackId?: string);
toString(): string;
getTrackId(): string;
getMediaStreamTrack(fromInternal?: boolean): MediaStreamTrack;
getRTCRtpTransceiver(type?: StreamType): RTCRtpTransceiver | undefined;
getMediaStreamTrackSettings(): MediaTrackSettings;
protected close(): void;
abstract play(element?: HTMLElement | string): void;
abstract stop(): void;
_updateRtpTransceiver(transceiver?: RTCRtpTransceiver, type?: StreamType): void;
}
export declare enum TrackEvents {
TRANSCEIVER_UPDATED = "transceiver-updated",
SEI_TO_SEND = "sei-to-send",
SEI_RECEIVED = "sei-received",
TRACK_UPDATED = "track-updated"
}
/**
* track.clone
*/
export declare enum TrackHint {
/** 该轨道是克隆屏幕共享视频轨道生成的。 */
SCREEN_TRACK = "screen_track",
/** 该轨道是克隆自定义视频轨道生成的。 */
CUSTOM_TRACK = "custome_track",
/** 该轨道是克隆小流视频轨道生成的。 */
LOW_STREAM = "low_stream",
/** 标记这个 Track 是否来自于屏幕共享小流 */
SCREEN_LOW_TRACK = "screen_low_track"
}
export declare enum TrackInternalEvent {
NEED_RENEGOTIATE = "@need_renegotiate",//deprecated
NEED_REPLACE_TRACK = "@need_replace_track",
NEED_REPLACE_MIXING_TRACK = "@need_replace_mixing_track",
NEED_CLOSE = "@need_close",
NEED_ENABLE_TRACK = "@need_enable_track",
NEED_DISABLE_TRACK = "@need_disable_track",
NEED_SESSION_ID = "@need_sid",
SET_OPTIMIZATION_MODE = "@set_optimization_mode",
GET_STATS = "@get_stats",
GET_RTC_STATS = "@get_rtc_stats",
GET_LOW_VIDEO_TRACK = "@get_low_video_track",
NEED_RESET_REMOTE_SDP = "@need_reset_remote_sdp",//deprecated
NEED_UPDATE_VIDEO_ENCODER = "@need_update_video_encoder",
NEED_UPDATE_VIDEO_SEND_PARAMETERS = "@need_update_video_send_parameters",
NEED_MUTE_TRACK = "@need_mute_track",
NEED_UNMUTE_TRACK = "@need_unmute_track"
}
export declare enum TrackMediaType {
AUDIO = "audio",
VIDEO = "video",
DATA = "data"
}
export declare function updateAgoraRTCCompatibility(): void;
export declare interface UsageRegistry {
processorID: string;
processorName: string;
cb: () => Usage | Promise<Usage>;
}
/**
*
*
* [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack} [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}
*
* SDK [[VideoEncoderConfigurationPreset]]
*
* > 100 Kbps 5000 Kbps
*
* @public
*/
/** @en
* `VideoEncoderConfiguration` is the interface that defines the video encoder configurations.
*
* You can customize the video encoder configurations when calling [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack} or [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}.
*
* The SDK provides the preset video encoder configurations. For more information, see [[VideoEncoderConfigurationPreset]].
*
* > The actual bitrate may differ slightly from the value you set due to the limitations of the operation system or the web browser. Agora recommends setting the bitrate between 100 Kbps and 5000 Kbps.
*/
export declare interface VideoEncoderConfiguration {
/**
*
*
* `number` `{ max: 1280, min: 720 }`
*
* [ConstrainLong]{@link ConstrainLong}
*/
/** @en
* Width of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
width?: number | ConstrainLong;
/**
*
*
* `number` `{ max: 1280, min: 720 }`
*
* [ConstrainLong]{@link ConstrainLong}
*/
/** @en
* Height of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
height?: number | ConstrainLong;
/**
* fps
*
* `number` `{ max: 30, min: 5 }`
*
* [ConstrainLong]{@link ConstrainLong}
*/
/** @en
* Frame rate of the video (fps).
*
* You can pass a `number`, or a constraint such as `{ max: 30, min: 5 }`.
*
* For details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
frameRate?: number | ConstrainLong;
/**
* Kbps
*/
/** @en
* The minimum bitrate of the video (Kbps).
*/
bitrateMin?: number;
/**
* Kbps
*/
/** @en
* The maximum bitrate of the video (Kbps).
*/
bitrateMax?: number;
/**
* @ignore
*/
/** @en
* @ignore
*/
scaleResolutionDownBy?: number;
}
/**
* SDK [[VideoEncoderConfiguration]]
*
* [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}
*
* SDK SDK 使 `"480p_1"`
*
* | | × | fps | Chrome | Firefox | Safari |
* | -------- | --------------- | ----------- | ------ | ------- | ------ |
* | 120p | 160 × 120 | 15 | | | |
* | 120p_1 | 160 × 120 | 15 | | | |
* | 120p_3 | 120 × 120 | 15 | | | |
* | 180p | 320 × 180 | 15 | | | |
* | 180p_1 | 320 × 180 | 15 | | | |
* | 180p_3 | 180 × 180 | 15 | | | |
* | 180p_4 | 240 × 180 | 15 | | | |
* | 240p | 320 × 240 | 15 | | | |
* | 240p_1 | 320 × 240 | 15 | | | |
* | 240p_3 | 240 × 240 | 15 | | | |
* | 240p_4 | 424 × 240 | 15 | | | |
* | 360p | 640 × 360 | 15 | | | |
* | 360p_1 | 640 × 360 | 15 | | | |
* | 360p_3 | 360 × 360 | 15 | | | |
* | 360p_4 | 640 × 360 | 30 | | | |
* | 360p_6 | 360 × 360 | 30 | | | |
* | 360p_7 | 480 × 360 | 15 | | | |
* | 360p_8 | 480 × 360 | 30 | | | |
* | 360p_9 | 640 × 360 | 15 | | | |
* | 360p_10 | 640 × 360 | 24 | | | |
* | 360p_11 | 640 × 360 | 24 | | | |
* | 480p | 640 × 480 | 15 | | | |
* | 480p_1 | 640 × 480 | 15 | | | |
* | 480p_2 | 640 × 480 | 30 | | | |
* | 480p_3 | 480 × 480 | 15 | | | |
* | 480p_4 | 640 × 480 | 30 | | | |
* | 480p_6 | 480 × 480 | 30 | | | |
* | 480p_8 | 848 × 480 | 15 | | | |
* | 480p_9 | 848 × 480 | 30 | | | |
* | 480p_10 | 640 × 480 | 10 | | | |
* | 720p | 1280 × 720 | 15 | | | |
* | 720p_1 | 1280 × 720 | 15 | | | |
* | 720p_2 | 1280 × 720 | 30 | | | |
* | 720p_3 | 1280 × 720 | 30 | | | |
* | 720p_auto <sup></sup> | 1280 × 720 | 30 | | | |
* | 720p_5 | 960 × 720 | 15 | | | |
* | 720p_6 | 960 × 720 | 30 | | | |
* | 1080p | 1920 × 1080 | 15 | | | |
* | 1080p_1 | 1920 × 1080 | 15 | | | |
* | 1080p_2 | 1920 × 1080 | 30 | | | |
* | 1080p_3 | 1920 × 1080 | 30 | | | |
* | 1080p_5 | 1920 × 1080 | 60 | | | |
*
* > <sup></sup> `"720p_auto"` Safari [](https://doc.shengwang.cn/doc/rtc/javascript/overview/release-notes)。
*
* @public
*/
/** @en
*
* The preset video encoder configurations.
*
* You can pass the preset video encoder configurations when calling [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack} or [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}.
*
* The following table lists all the preset video profiles. The SDK uses `"480p_1"` by default.
*
* | Video Profile | Resolution (Width×Height) | Frame Rate (fps) | Chrome | Firefox | Safari |
* | -------- | --------------- | ----------- | ------ | ------- | ------ |
* | 120p | 160 × 120 | 15 | | | |
* | 120p_1 | 160 × 120 | 15 | | | |
* | 120p_3 | 120 × 120 | 15 | | | |
* | 180p | 320 × 180 | 15 | | | |
* | 180p_1 | 320 × 180 | 15 | | | |
* | 180p_3 | 180 × 180 | 15 | | | |
* | 180p_4 | 240 × 180 | 15 | | | |
* | 240p | 320 × 240 | 15 | | | |
* | 240p_1 | 320 × 240 | 15 | | | |
* | 240p_3 | 240 × 240 | 15 | | | |
* | 240p_4 | 424 × 240 | 15 | | | |
* | 360p | 640 × 360 | 15 | | | |
* | 360p_1 | 640 × 360 | 15 | | | |
* | 360p_3 | 360 × 360 | 15 | | | |
* | 360p_4 | 640 × 360 | 30 | | | |
* | 360p_6 | 360 × 360 | 30 | | | |
* | 360p_7 | 480 × 360 | 15 | | | |
* | 360p_8 | 480 × 360 | 30 | | | |
* | 360p_9 | 640 × 360 | 15 | | | |
* | 360p_10 | 640 × 360 | 24 | | | |
* | 360p_11 | 640 × 360 | 24 | | | |
* | 480p | 640 × 480 | 15 | | | |
* | 480p_1 | 640 × 480 | 15 | | | |
* | 480p_2 | 640 × 480 | 30 | | | |
* | 480p_3 | 480 × 480 | 15 | | | |
* | 480p_4 | 640 × 480 | 30 | | | |
* | 480p_6 | 480 × 480 | 30 | | | |
* | 480p_8 | 848 × 480 | 15 | | | |
* | 480p_9 | 848 × 480 | 30 | | | |
* | 480p_10 | 640 × 480 | 10 | | | |
* | 720p | 1280 × 720 | 15 | | | |
* | 720p_1 | 1280 × 720 | 15 | | | |
* | 720p_2 | 1280 × 720 | 30 | | | |
* | 720p_3 | 1280 × 720 | 30 | | | |
* | 720p_auto <sup></sup> | 1280 × 720 | 30 | | | |
* | 720p_5 | 960 × 720 | 15 | | | |
* | 720p_6 | 960 × 720 | 30 | | | |
* | 1080p | 1920 × 1080 | 15 | | | |
* | 1080p_1 | 1920 × 1080 | 15 | | | |
* | 1080p_2 | 1920 × 1080 | 30 | | | |
* | 1080p_3 | 1920 × 1080 | 30 | | | |
* | 1080p_5 | 1920 × 1080 | 60 | | | |
*
* > <sup></sup> `"720p_auto"` is only recommended to be set on Safari to ensure dynamic adjustment of the encoding resolution. For details, see the release notes.
*/
export declare type VideoEncoderConfigurationPreset = keyof typeof SUPPORT_VIDEO_ENCODER_CONFIG_LIST;
/**
* @internal
*/
declare class VideoPlayer {
trackId: string;
config: PlayerConfig;
onFirstVideoFrameDecoded?: () => void;
onVideoStateChanged?: (state: VideoState) => void;
freezeTimeCounterList: number[];
renderFreezeAccTime: number;
isKeepLastFrame: boolean;
/** 计算 500ms 卡顿时间用 */
private timeUpdatedCount;
private freezeTime;
private playbackTime;
private lastTimeUpdatedTime;
/** 记录初始autoplay 情况用于safari异常状态处理 */
private autoplayFailed;
protected videoTrack?: MediaStreamTrack;
protected videoElement: HTMLVideoElement;
protected cacheVideoElement?: HTMLVideoElement;
private renderStats?;
get rendFrameRate(): number;
get videoElementStatus(): MediaElementStatus;
set videoElementStatus(status: MediaElementStatus);
get videoState(): VideoState;
set videoState(state: VideoState);
private _videoState;
private videoElementCheckInterval?;
private videoElementFreezeTimeout?;
private _videoElementStatus;
private isGettingVideoDimensions;
constructor(config: PlayerConfig);
getVideoElement(): HTMLVideoElement | undefined;
getContainerElement(): HTMLDivElement | undefined;
updateConfig(config: PlayerConfig): void;
updateVideoTrack(track?: MediaStreamTrack): void;
play(sessionId?: string): void;
getCurrentFrame(): ImageData;
getCurrentFrameToUint8Array(type: string, quality?: number): Promise<ImageTypedData>;
destroy(): void;
protected initVideoElement(): void;
protected resetVideoElement(): void;
private startGetVideoDimensions;
private handleAutoPlayFailed;
private autoResumeAfterInterruption;
private handleVideoEvents;
private autoResumeAfterInterruptionOnIOS15_16;
}
/**
* [LocalVideoTrack.play]{@link ILocalVideoTrack.play}
*/
/** @en
* Playback configurations for a video track. Set the playback configurations for a video track when calling [ILocalVideoTrack.play]{@link ILocalVideoTrack.play}.
*/
export declare interface VideoPlayerConfig {
/**
*
* - `true`:
* - `false`:
*
* >
* > -
* > -
*/
/** @en
* Sets whether to enable mirror mode:
* - `true`: Enable mirror mode.
* - `false`: Disable mirror mode.
*
* > Notes:
* > - The SDK enables mirror mode for the local video track by default.
* > - The SDK disables mirror mode for the remote video track by default.
*/
mirror?: boolean;
/**
*
* - `"cover"`: CSS `object-fit` `cover`
* - `"contain"`: CSS `object-fit` `contain`
* - `"fill"`: CSS `object-fit` `fill`
*
* >
* > - 使 cover 使 contain
* > - 使 cover
*/
/** @en
* Sets video display mode:
* - `"cover"`: The image files the height and width of the box, while maintaining its aspect ratio but often cropping the image in the process. For more information, see the `cover` option of `object-fit` in CSS.
* - `"contain"`: The size of the image increases or decreases to fill the box while preserving its aspect-ratio. Areas that are not filled due to the disparity in the aspect ratio are filled with black. For more information, see the `contain` option of `object-fit` in CSS.
* - `"fill"`: The image stretches to fit the box, regardless of its aspect-ratio. For more information, see the `fill` option of `object-fit` in CSS.
*
* > Notes:
* > - When playing the local camera video track, the SDK uses cover mode by default; when playing the local video track of screen sharing, the SDK uses contain mode by default.
* > - When playing the remote video track, the SDK uses cover mode by default.
*/
fit?: "cover" | "contain" | "fill";
}
export declare class VideoProcessorContext extends EventEmitter implements IProcessorContext {
private constraintsMap;
private statsRegistry;
private usageRegistry;
private readonly trackId;
private readonly direction;
private _chained;
set chained(chained: boolean);
get chained(): boolean;
constructor(trackId: string, direction: "local" | "remote");
getConstraints(): Promise<MediaTrackConstraints>;
requestApplyConstraints(constraints: MediaTrackConstraints, processor: IBaseProcessor): Promise<void>;
requestRevertConstraints(processor: IBaseProcessor): Promise<void>;
registerStats(processor: IBaseProcessor, type: string, cb: () => any): void;
unregisterStats(processor: IBaseProcessor, type: string): void;
gatherStats(): ProcessorStats[];
registerUsage(processor: IBaseProcessor, cb: () => Usage): void;
unregisterUsage(processor: IBaseProcessor): void;
gatherUsage(): Promise<UsageWithDirection[]>;
getDirection(): "local" | "remote";
}
export declare class VideoProcessorDestination extends EventEmitter implements IBaseProcessor {
name: string;
ID: string;
_source?: IBaseProcessor;
private readonly videoContext;
constructor(videoContext: VideoProcessorContext);
private inputTrack?;
get kind(): Kind;
get enabled(): boolean;
pipe(): IBaseProcessor;
unpipe(): void;
enable(): void;
disable(): void;
updateInput(inputOptions: {
track?: MediaStreamTrack;
node?: AudioNode;
context: IProcessorContext;
}): void;
reset(): void;
}
/**
*
* @public
*/
/** @en
* The state of the video stream.
*/
export declare enum VideoState {
/** 0: 视频默认初始状态。 */
/** @en
* 0: The initial state of the video.
*/
VideoStateStopped = 0,
/** 1: 本地用户已接收视频首包。 */
/** @en
* 1: The local user has received the first video packet.
*/
VideoStateStarting = 1,
/** 2: 视频流正在解码,正常播放。 */
/** @en
* 2: The video stream is being decoded and played normally.
*/
VideoStateDecoding = 2,
/** 3: 视频流卡顿。 */
/** @en
* 3: The video stream is frozen.
*/
VideoStateFrozen = 3
}
declare class VisibilityWatcher extends EventEmitter {
private _lastHiddenTime;
private _lastVisibleTime;
get visibility(): DocumentVisibilityState;
get lastHiddenTime(): DOMHighResTimeStamp;
get lastVisibleTime(): DOMHighResTimeStamp;
constructor();
}
export declare const visibilityWatcher: VisibilityWatcher;
declare class VolumeLevelAnalyser {
private readonly context;
private analyserNode;
private sourceNode?;
constructor();
updateSource(sourceNode?: AudioNode): void;
getVolumeLevel(): number;
getAnalyserNode(): AnalyserNode;
rebuildAnalyser(): void;
destroy(): void;
}
export { }