` tag.
* @asMemberOf IRemoteVideoTrack
* @event
*/
declare function event_video_element_visible_status_2(data?: CheckVideoVisibleResult): void;
/**
* Occurs when the video state changes.
*
* @event
* @asMemberOf IRemoteTrack
*/
declare function event_video_state_changed(videoState: VideoState): void;
/**
* Reports all the speaking local and remote users and their volumes.
*
* It is disabled by default. You can enable this callback by calling {@link enableAudioVolumeIndicator}.
* If enabled, it reports the users' volumes every two seconds regardless of whether there are users speaking.
*
* The volume is an integer ranging from 0 to 100. Usually a user with volume above 60 is a speaking user.
*
* ``` javascript
* client.on("volume-indicator", function(result){
* result.forEach(function(volume, index){
* console.log(`${index} UID ${volume.uid} Level ${volume.level}`);
* });
* });
* ```
*
* @param result An object consisting of the following properties:
* - level: The volume of the speaking user, ranging from 0 to 100.
* - uid: The ID of the speaking user.
*
* @asMemberOf IAgoraRTCClient
* @event
*/
declare function event_volume_indicator(result: {
/**
* The volume of the speaking user, ranging from 0 to 100.
*/
level: number;
/**
* The ID of the speaking user.
*/
uid: UID;
}[]): void;
/**
* Parameters for reporting customized messages. Used when calling [AgoraRTCClient.sendCustomReportMessage]{@link IAgoraRTCClient.sendCustomReportMessage}.
*/
export declare interface EventCustomReportParams {
/**
* The ID of the message.
*/
reportId: string;
/**
* The category of the message.
*/
category: string;
/**
* The event name of the message.
*/
event: string;
/**
* The label of the message.
*/
label: string;
/**
* The value of the message.
*/
value: number;
}
/**
* The `EventEmitter` class provides a way to define, emit, and handle events.
*/
declare class EventEmitter {
private _events;
/**
* Gets all the listeners for a specified event.
*
* @param event The event name.
*/
getListeners(event: string): Function[];
/**
* Listens for a specified event.
*
* When the specified event happens, the SDK triggers the callback that you pass.
* @param event The event name.
* @param listener The callback to trigger.
*/
on(event: string, listener: Function): void;
/**
* Listens for a specified event once.
*
* When the specified event happens, the SDK triggers the callback that you pass and then removes the listener.
* @param event The event name.
* @param listener The callback to trigger.
*/
once(event: string, listener: Function): void;
/**
* Removes the listener for a specified event.
*
* @param event The event name.
* @param listener The callback that corresponds to the event listener.
*/
off(event: string, listener: Function): void;
/**
* Removes all listeners for a specified event.
*
* @param event The event name. If left empty, all listeners for all events are removed.
*/
removeAllListeners(event?: string): void;
private _indexOfListener;
}
/**
* The entry point of the Agora Web SDK.
*/
export declare interface IAgoraRTC extends EventEmitter {
/**
* The version of the Agora Web SDK.
*/
VERSION: string;
/**
* @since
* *4.18.0*
*
* @param event The event name.
* @param listener See {@link event_camera_changed}.
*/
on(event: "camera-changed", listener: typeof event_camera_changed): void;
/**
* @since
* *4.18.0*
*
* @param event The event name.
* @param listener See {@link event_microphone_changed}.
*/
on(event: "microphone-changed", listener: typeof event_microphone_changed): void;
/**
* @since
* *4.18.0*
*
* @param event The event name.
* @param listener See {@link event_playback_device_changed}.
*/
on(event: "playback-device-changed", listener: typeof event_playback_device_changed): void;
/**
* @since
* *4.18.0*
*
* @param event The event name.
* @param listener See {@link event_autoplay_failed}.
*/
on(event: "autoplay-failed", listener: typeof event_autoplay_failed): void;
/**
* @since
* *4.18.0*
*
* @param event The event name.
* @param listener See {@link event_security_policy_violation}.
*/
on(event: "security-policy-violation", listener: typeof event_security_policy_violation): void;
/**
* @since
* *4.20.0*
*
* @param event The event name.
* @param listener See {@link event_audio_context_state_changed}.
*/
on(event: "audio-context-state-changed", listener: typeof event_audio_context_state_changed): void;
/**
* Resumes audio and video playback.
*
* On some versions of iOS devices, the app call might not automatically resume after being interrupted by a WeChat call or system phone call. You can call this method to resume the app call.
*
* Agora recommends that you listen for the `"audio-context-state-changed"` event using {@link IAgoraRTC.on},
* and handle the following in the callback function {@link event_audio_context_state_changed}:
* - When the state changes to `"interrupted"`, display a pop-up to notify the user that the app call is interrupted and needs to be resumed by clicking a button. After the user clicks the button, call `resumeAudioContext`.
* - When the state changes to `"running"`, close the pop-up.
*
* @returns
*/
resumeAudioContext(): void;
/**
* Gets the codecs that the browser supports.
*
* This method gets a list of the codecs supported by the SDK and the web browser. The Agora Web SDK supports video codecs VP8 and H.264, and audio codec OPUS.
*
* > Note:
* > - The method works with all major browsers. It gets an empty list if it does not recognize the browser or the browser does not support WebRTC.
* > - The returned codec list is based on the [SDP](https://tools.ietf.org/html/rfc4566) used by the web browser and for reference only.
* > - Some Android phones claim to support H.264 but have problems in communicating with other platforms using this codec, in which case we recommend VP8 instead.
*
* ```javascript
* AgoraRTC.getSupportedCodec().then(result => {
* console.log(`Supported video codec: ${result.video.join(",")}`);
* console.log(`Supported audio codec: ${result.audio.join(",")}`);
* });
* ```
* @returns A `Promise` object. In the `.then(function(result){})` callback, `result` has the following properties:
* - `video`: array, the supported video codecs. The array may include `"H264"`, `"VP8"`, or be empty.
* - `audio`: array, the supported audio codecs. The array may include `"OPUS"`, or be empty.
*
*/
getSupportedCodec(): Promise<{
video: string[];
audio: string[];
}>;
/**
* Checks the compatibility of the current browser.
*
* Use this method before calling {@link createClient} to check if the SDK is compatible with the web browser.
*
* @returns
* - `true`: The SDK is compatible with the current web browser.
* - `false`: The SDK is incompatible with the current web browser.
*/
checkSystemRequirements(): boolean;
/**
* Preload channels using `appid`, `channel`, `token`, and `uid`.
*
* Calling this method reduces the time it takes to join a channel when the viewer switches channels frequently, thus shortening the time it takes for the viewer to hear the first frame of the host's audio as well as to see the first frame of the screen, and improving the video experience on the viewer's end.
*
* If the current channel has been preloaded successfully and the viewer needs to join the channel again after joining or leaving the channel, there is no need to re-preload the channel as long as the token passed in during preloading is still valid.
*
* > Note:
* > - Preload is only valid for two minutes.
* > - In order to protect page performance, this method adopts a one-time best-effort strategy and cannot guarantee success. However, a failed preload will not affect the viewer's ability to join the channel normally, nor will it increase the time taken to join the channel.
* > - The system caches up to 10 latest preloading data.
* > - Currently this method does not support forwarding via proxy.
*
* @param appid The [App ID](https://docs.agora.io/en/Agora%20Platform/terms?platform=All%20Platforms#appid) of your Agora project.
*
* @param token The token generated at your server:
* - For low-security requirements: You can use the temporary token generated at Console. For details, see [Get an RTC temporary token](https://docs.agora.io/en/Agora%20Platform/get_appid_token?platform=All%20Platforms#generate-an-rtc-temporary-token).
* - For high-security requirements: Set it as the token generated at your server. For details, see [Authenticate Your Users with Tokens](https://docs.agora.io/en/Video/token_server?platform=Web).
*
* @param channel A string that provides a unique channel name for the call. The length must be within 64 bytes. Supported character scopes:
* - All lowercase English letters: a to z.
* - All uppercase English letters: A to Z.
* - All numeric characters: 0 to 9.
* - The space character.
* - Punctuation characters and other symbols, including: "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", " {", "}", "|", "~", ",".
*
* @param uid The user ID, an integer or a string, ASCII characters only. Ensure this ID is unique. If you set the `uid` to `null`, the Agora server assigns an integer uid.
* - If you use a number as the user ID, it should be a 32-bit unsigned integer with a value ranging from 0 to (232 -1).
* - If you use a string as the user ID, the maximum length is 255 characters.
*
* To ensure a better end-user experience, Agora recommends using a number as the user ID.
*
* > Note:
* > - All users in the same channel should have the same type (number or string) of `uid`.
* > - You can use string UIDs to interoperate with the Native SDK 2.8 or later. Ensure that the Native SDK uses the User Account to join the channel. See [Use String User Accounts](https://docs.agora.io/en/faq/string).
* > - To ensure the data accuracy in Agora Analytics, Agora recommends that you specify `uid` for each user and ensure it is unique.
*/
preload(appid: string, channel: string, token: string | null, uid?: UID | null): Promise;
/**
* Creates a local client object for managing a call.
*
* This is usually the first step of using the Agora Web SDK.
* @param config The configurations for the client object, including channel profile and codec. The default codec is `vp8` and default channel profile is `rtc`. See {@link ClientConfig} for details.
* @category Agora Core
*/
createClient(config: ClientConfig): IAgoraRTCClient;
/**
* Creates a customized audio track.
*
* This method creates a customized audio track from a [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
*
* @param config Configurations for the customized audio track.
* @category Local Track
*/
createCustomAudioTrack(config: CustomAudioTrackInitConfig): ILocalAudioTrack;
/**
* Creates an audio track from the audio sampled by a microphone.
*
* @param config Configurations for the sampled audio, such as the capture device and the encoder configuration. See {@link MicrophoneAudioTrackInitConfig}.
* @category Local Track
*/
createMicrophoneAudioTrack(config?: MicrophoneAudioTrackInitConfig): Promise;
/**
*
* Creates an audio track from an audio file or [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer) object.
*
* This method works with both the local and online audio files, supporting the following formats:
* - MP3.
* - AAC.
* - Other audio formats supported by the browser.
* @param config Configurations such as the file path, caching strategies, and encoder configuration.
* @returns Unlike other audio track objects, this audio track object adds the methods for audio playback control, such as playing, pausing, seeking and playback status querying.
* @category Local Track
*/
createBufferSourceAudioTrack(config: BufferSourceAudioTrackInitConfig): Promise;
/**
* Creates a customized video track.
*
* This method creates a customized video track from a [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
* @param config Configurations for the customized video track. See {@link CustomVideoTrackInitConfig}.
* > As of v4.17.1, you can set the resolution and frame rate (in addition to the sending bitrate) for a customized video track by [config]{@link CustomVideoTrackInitConfig}.
* @category Local Track
*/
createCustomVideoTrack(config: CustomVideoTrackInitConfig): ILocalVideoTrack;
/**
* Creates a video track from the video captured by a camera.
*
* @param config Configurations for the captured video, such as the capture device and the encoder configuration.
* @category Local Track
*/
createCameraVideoTrack(config?: CameraVideoTrackInitConfig): Promise;
/**
* Creates an audio track and a video track.
*
* Creates an audio track from the audio sampled by a microphone and a video track from the video captured by a camera.
*
* > Calling this method differs from calling {@link createMicrophoneAudioTrack} and {@link createCameraVideoTrack} separately:
* > - This method call requires access to the microphone and the camera at the same time. In this case, users only need to do authorization once.
* > - Calling {@link createMicrophoneAudioTrack} and {@link createCameraVideoTrack} requires access to the microphone and the camera separately. In this case, users need to do authorization twice.
* @param audioConfig Configurations for the sampled audio, such as the capture device and the encoder configurations.
* @param videoConfig Configurations for the captured video, such as the capture device and the encoder configurations.
*/
createMicrophoneAndCameraTracks(audioConfig?: MicrophoneAudioTrackInitConfig, videoConfig?: CameraVideoTrackInitConfig): Promise<[IMicrophoneAudioTrack, ICameraVideoTrack]>;
/**
* Creates a video track for screen sharing.
*
* @param config Configurations for the screen-sharing video, such as encoder configuration and capture configuration.
* @param withAudio Whether to share the audio of the **screen sharing input source** when sharing the screen.
* - `enable`: Share the audio.
* - `disable`: (Default) Do not share the audio.
* - `auto`: Share the audio, dependent on whether the browser supports this function.
* > Note:
* > - This function is only available for desktop browsers that support the Web SDK instead of mobile devices. For the specific list of supported browsers, see [Supported platforms](https://docs.agora.io/en/video-calling/overview/supported-platforms?platform=web).
* > - Additional information on browser versions and feature support across different operating systems:
* > - On macOS, Chrome 74 or later supports audio and video sharing, only when sharing Chrome tabs. Firefox and Safari 14 or later support window and screen sharing, but do not support audio sharing.
* > - On Windows, Chrome 74 or later and Edge support audio sharing when sharing the screen and browser tabs, but not when sharing application windows. Firefox supports window and screen sharing, but does not support audio sharing.
* > - On ChromeOS, Chrome supports audio sharing when sharing the screen and browser tabs, but not when sharing application windows.
* > - For the audio sharing to take effect, the end user must check **Share audio** in the pop-up window when sharing the screen.
* @returns
* @returns
* @returns
* - If `withAudio` is `enable`, then this method returns a list containing a video track for screen sharing and an audio track. If the end user does not check **Share audio**, the SDK throws an error.
* - If `withAudio` is `disable`, then this method returns a video track for screen sharing.
* - If `withAudio` is `auto`, then the SDK attempts to share the audio on browsers supporting this function.
* - If the end user checks **Share audio**, then this method returns a list containing a video track for screen sharing and an audio track.
* - If the end user does not check **Share audio**, then this method only returns a video track for screen sharing.
* @category Local Track
*/
createScreenVideoTrack(config: ScreenVideoTrackInitConfig, withAudio: "enable"): Promise<[ILocalVideoTrack, ILocalAudioTrack]>;
/**
* Creates a video track for screen sharing.
*
* @param config Configurations for the screen-sharing video, such as encoder configuration and capture configuration.
* @param withAudio Whether to share the audio of the **screen sharing input source** when sharing the screen.
* - `enable`: Share the audio.
* - `disable`: (Default) Do not share the audio.
* - `auto`: Share the audio, dependent on whether the browser supports this function.
* > Note:
* > - This function is only available for desktop browsers that support the Web SDK instead of mobile devices. For the specific list of supported browsers, see [Supported platforms](https://docs.agora.io/en/video-calling/overview/supported-platforms?platform=web).
* > - Additional information on browser versions and feature support across different operating systems:
* > - On macOS, Chrome 74 or later supports audio and video sharing, only when sharing Chrome tabs. Firefox and Safari 14 or later support window and screen sharing, but do not support audio sharing.
* > - On Windows, Chrome 74 or later and Edge support audio sharing when sharing the screen and browser tabs, but not when sharing application windows. Firefox supports window and screen sharing, but does not support audio sharing.
* > - On ChromeOS, Chrome supports audio sharing when sharing the screen and browser tabs, but not when sharing application windows.
* > - For the audio sharing to take effect, the end user must check **Share audio** in the pop-up window when sharing the screen.
* @returns
* - If `withAudio` is `enable`, then this method returns a list containing a video track for screen sharing and an audio track. If the end user does not check **Share audio**, the SDK throws an error.
* - If `withAudio` is `disable`, then this method returns a video track for screen sharing.
* - If `withAudio` is `auto`, then the SDK attempts to share the audio on browsers supporting this function.
* - If the end user checks **Share audio**, then this method returns a list containing a video track for screen sharing and an audio track.
* - If the end user does not check **Share audio**, then this method only returns a video track for screen sharing.
*/
createScreenVideoTrack(config: ScreenVideoTrackInitConfig, withAudio: "disable"): Promise;
/**
* Creates a video track for screen sharing.
*
* @param config Configurations for the screen-sharing video, such as encoder configuration and capture configuration.
* @param withAudio Whether to share the audio of the **screen sharing input source** when sharing the screen.
* - `enable`: Share the audio.
* - `disable`: (Default) Do not share the audio.
* - `auto`: Share the audio, dependent on whether the browser supports this function.
* > Note:
* > - This function is only available for desktop browsers that support the Web SDK instead of mobile devices. For the specific list of supported browsers, see [Supported platforms](https://docs.agora.io/en/video-calling/overview/supported-platforms?platform=web).
* > - Additional information on browser versions and feature support across different operating systems:
* > - On macOS, Chrome 74 or later supports audio and video sharing, only when sharing Chrome tabs. Firefox and Safari 14 or later support window and screen sharing, but do not support audio sharing.
* > - On Windows, Chrome 74 or later and Edge support audio sharing when sharing the screen and browser tabs, but not when sharing application windows. Firefox supports window and screen sharing, but does not support audio sharing.
* > - On ChromeOS, Chrome supports audio sharing when sharing the screen and browser tabs, but not when sharing application windows.
* > - For the audio sharing to take effect, the end user must check **Share audio** in the pop-up window when sharing the screen.
* @returns
* - If `withAudio` is `enable`, then this method returns a list containing a video track for screen sharing and an audio track. If the end user does not check **Share audio**, the SDK throws an error.
* - If `withAudio` is `disable`, then this method returns a video track for screen sharing.
* - If `withAudio` is `auto`, then the SDK attempts to share the audio on browsers supporting this function.
* - If the end user checks **Share audio**, then this method returns a list containing a video track for screen sharing and an audio track.
* - If the end user does not check **Share audio**, then this method only returns a video track for screen sharing.
*/
createScreenVideoTrack(config: ScreenVideoTrackInitConfig, withAudio?: "enable" | "disable" | "auto"): Promise<[ILocalVideoTrack, ILocalAudioTrack] | ILocalVideoTrack>;
/**
* Enumerates the media input and output devices available, such as microphones, cameras, and headsets.
*
* If this method call succeeds, the SDK returns a list of media devices in an array of [MediaDeviceInfo](https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo) objects.
*
* > Note:
* > - Calling this method turns on the camera and microphone shortly for the device permission request. On browsers including Chrome 67+, Firefox 70+, and Safari 12+, the SDK cannot get accurate device information without permission for the media device.
* > - The [MediaDeviceInfo.deviceId](https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo/deviceId) property of a device may change. For example, it is reset when the user clears cookies. Agora does not recommend using the `deviceId` property to implement your business logic.
*
* ```javascript
* getDevices().then(devices => {
* console.log("first device id", devices[0].deviceId);
* }).catch(e => {
* console.log("get devices error!", e);
* });
* ```
* @param skipPermissionCheck Whether to skip the permission check. If you set this parameter as `true`, the SDK does not trigger the request for media device permission. In this case, the retrieved media device information may be inaccurate.
* - `true`: Skip the permission check.
* - `false`: (Default) Do not skip the permission check.
* @category Media Devices
*/
getDevices(skipPermissionCheck?: boolean): Promise;
/**
* Enumerates the audio sampling devices available, such as microphones.
*
* If this method call succeeds, the SDK returns a list of audio input devices in an array of [MediaDeviceInfo](https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo) objects.
*
* > Calling this method turns on the microphone shortly for the device permission request. On browsers including Chrome 67+, Firefox 70+, and Safari 12+, the SDK cannot get accurate device information without permission for the media device.
*
* @param skipPermissionCheck Whether to skip the permission check. If you set this parameter as `true`, the SDK does not trigger the request for media device permission. In this case, the retrieved media device information may be inaccurate.
* - `true`: Skip the permission check.
* - `false`: (Default) Do not skip the permission check.
* @category Media Devices
*/
getMicrophones(skipPermissionCheck?: boolean): Promise;
/**
* Enumerates the video capture devices available, such as cameras.
*
* If this method call succeeds, the SDK returns a list of video input devices in an array of [MediaDeviceInfo](https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo) objects.
*
* > Calling this method turns on the camera shortly for the device permission request. On browsers including Chrome 67+, Firefox 70+, and Safari 12+, the SDK cannot get accurate device information without permission for the media device.
*
* @param skipPermissionCheck Whether to skip the permission check. If you set this parameter as `true`, the SDK does not trigger the request for media device permission. In this case, the retrieved media device information may be inaccurate.
* - `true`: Skip the permission check.
* - `false`: (Default) Do not skip the permission check.
* @category Media Devices
*/
getCameras(skipPermissionCheck?: boolean): Promise;
/**
* @since
* *4.1.0*
*
* Enumerates the audio playback devices available, such as speakers.
*
* If this method call succeeds, the SDK returns a list of audio playback devices in an array of [MediaDeviceInfo](https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo) objects.
*
* > - This method is supported on Chrome, Firefox, and Edge, but is not supported on Safari.
* > - Calling this method turns on the microphone briefly for the device permission request. On browsers including Chrome 67+ and Firefox 70+, the SDK cannot get accurate device information without permission for the media device.
*
* @param skipPermissionCheck Whether to skip the permission check. If you set this parameter as `true`, the SDK does not trigger the request for media device permission. In this case, the retrieved media device information may be inaccurate.
* - `true`: Skip the permission check.
* - `false`: (Default) Do not skip the permission check.
* @category Media Devices
*/
getPlaybackDevices(skipPermissionCheck?: boolean): Promise;
/**
* Gets the sources for screen-sharing through Electron.
*
* > If your electron environment has set `contextIsolation: true`, calling this function will throw an error. You need to get screen source id with `contextBridge.exposeInMainWorld` method by yourself.
* ```
* // preload.js
*
* const {
* contextBridge, desktopCapturer
* } = require("electron");
*
* contextBridge.exposeInMainWorld(
* "electronDesktopCapturer", {
* getSources: async (...args) => {
* const sources = await desktopCapturer.getSources(...args);
* return sources;
* }
* }
* );
*
* // renderer.js
* (async () => {
* sources = await window.electronDesktopCapturer.getSources(["window", "screen"]);
* const source = sources[0]; // just for example ,you shuould make an UI for user to select the exact source.
* const screenVideoTrack = await AgoraRTC.createScreenVideoTrack({ electronScreenSourceId: source.id });
* })()
*
* ```
* If this method call succeeds, the SDK returns a list of screen sources in an array of {@link ElectronDesktopCapturerSource} objects.
* @param type The type of screen sources (window/application/screen) to get. See {@link ScreenSourceType}. If it is left empty, this method gets all the available sources.
* @category Media Devices
*/
getElectronScreenSources(type?: ScreenSourceType): Promise;
/**
* @ignore
*/
setAppType(type: AppType): void;
/**
* Sets the output log level of the SDK.
*
* Choose a level to see the logs preceding that level. The log level follows the sequence of NONE, ERROR, WARNING, INFO, and DEBUG.
*
* For example, if you set the log level as `AgoraRTC.setLogLevel(1);`, then you can see logs in levels INFO, ERROR, and WARNING.
* @param level The output log level.
* - 0: DEBUG. Output all API logs.
* - 1: INFO. Output logs of the INFO, WARNING and ERROR level.
* - 2: WARNING. Output logs of the WARNING and ERROR level.
* - 3: ERROR. Output logs of the ERROR level.
* - 4: NONE. Do not output any log.
* @category Logger
*/
setLogLevel(level: number): void;
/**
* Enables log upload.
*
* Call this method to enable log upload to Agora’s server.
*
* The log-upload function is disabled by default. To enable this function, you must call this method before calling all the other methods.
*
* > If a user fails to join the channel, the log information (for that user) is unavailable on Agora's server.
* @category Logger
*/
enableLogUpload(): void;
/**
* Disables log upload.
*
* The log-upload function is disabled by default. If you have called {@link enableLogUpload}, then call this method when you need to stop uploading the log.
* @category Logger
*/
disableLogUpload(): void;
/**
* Creates an object for configuring the media stream relay.
*/
createChannelMediaRelayConfiguration(): IChannelMediaRelayConfiguration;
/**
* Checks whether a video track is active.
*
* The SDK determines whether a video track is active by checking for image changes during the specified time frame.
*
* Agora recommends calling this method before starting a call to check the availability of the video capture device. You can pass the camera video track as a parameter in this method to check whether the camera works.
*
* > Notes:
* > - If a video track is muted, this method returns `false`.
* > - Do not call this method frequently as the check may affect web performance.
*
* ``` javascript
* const videoTrack = await AgoraRTC.createCameraVideoTrack({ cameraId });
* AgoraRTC.checkVideoTrackIsActive(videoTrack).then(result => {
* console.log(`${ cameraLabel } is ${ result ? "available" : "unavailable" }`);
* }).catch(e => {
* console.log("check video track error!", e);
* });
* ```
*
* @param track The local or remote video track to be checked.
* @param timeout The time frame (ms) for checking. The default value is 5,000 ms.
*
* @returns Whether the image in the specified video track changes during the specified time frame:
* - `true`: The image changes. For the camera video track, it means the video capture device works.
* - `false`: The image does not change. Possible reasons:
* - The video capturing device does not work properly or is blocked.
* - The video track is muted.
*/
checkVideoTrackIsActive(track: ILocalVideoTrack | IRemoteVideoTrack, timeout?: number): Promise;
/**
* Check whether an audio track is active.
*
* The SDK determines whether an audio track is active by checking whether the volume changes during the specified time frame.
*
* Agora recommends calling this method before starting a call to check the availability of the audio sampling device. You can pass the audio track from the audio sampled by a microphone as a parameter in this method to check whether the microphone works.
*
* > Notes:
* > - The check may fail in a quiet environment. Agora suggests you instruct the end user to speak or make some noise when calling this method.
* > - If an audio track is muted, this method returns `false`.
* > - Do not call this method frequently as the check may affect web performance.
*
* ``` javascript
* const audioTrack = await AgoraRTC.createMicrophoneAudioTrack({ microphoneId });
* AgoraRTC.checkAudioTrackIsActive(audioTrack).then(result => {
* console.log(`${ microphoneLabel } is ${ result ? "available" : "unavailable" }`);
* }).catch(e => {
* console.log("check audio track error!", e);
* });
* ```
*
* @param track The local or remote audio track to be checked.
* @param timeout The time frame (ms) for checking. The default value is 5,000 ms.
*
* @returns Whether the volume in the specified audio track changes during the specified time frame:
* - `true`: The volume changes. For the microphone audio track, it means the audio sampling device works.
* - `false`: The volume does not change. Possible reasons:
* - The audio sampling device does not work properly.
* - The volume in the customized audio track does not change.
* - The audio track is muted.
*/
checkAudioTrackIsActive(track: ILocalAudioTrack | IRemoteAudioTrack, timeout?: number): Promise;
/**
* Occurs when a video capture device is added or removed.
*
* ``` javascript
* AgoraRTC.onCameraChanged = (info) => {
* console.log("camera changed!", info.state, info.device);
* };
* ```
* **Parameters**
*
* - **deviceInfo**: The information of the video capture device. See {@link DeviceInfo}.
*
* @category Global Callback
*/
onCameraChanged?: (deviceInfo: DeviceInfo) => void;
/**
* Occurs when an audio sampling device is added or removed.
*
* ``` javascript
* AgoraRTC.onMicrophoneChanged = (info) => {
* console.log("microphone changed!", info.state, info.device);
* };
* ```
* **Parameters**
*
* - **deviceInfo**: The information of the device. See {@link DeviceInfo}.
* @category Global Callback
*/
onMicrophoneChanged?: (deviceInfo: DeviceInfo) => void;
/**
* @since
* *4.1.0*
*
* Occurs when an audio playback device is added or removed.
*
* ``` javascript
* AgoraRTC.onPlaybackDeviceChanged = (info) => {
* console.log("speaker changed!", info.state, info.device);
* };
* ```
* **Parameters**
*
* - **deviceInfo**: The information of the device. See {@link DeviceInfo}.
* @category Global Callback
*/
onPlaybackDeviceChanged?: (deviceInfo: DeviceInfo) => void;
/**
* Occurs when the autoplay of an audio track fails.
*
* @deprecated from v4.6.0. Use [[onAutoplayFailed]] instead.
*
* If multiple tracks call `play` and all trigger autoplay blocking, the SDK triggers `onAudioAutoplayFailed` multiple times.
*
* The autoplay failure is caused by browsers' [autoplay blocking](https://developer.mozilla.org/en-US/docs/Web/Media/Autoplay_guide#Autoplay_and_autoplay_blocking), which does not affect video tracks.
*
* In the Agora Web SDK, once the user has interacted with the webpage, the autoplay blocking is removed. You can deal with the issue in either of the following ways:
* - If you do not want to receive the `onAudioAutoplayFailed` callback, ensure that the user has interacted with the webpage before `RemoteAudioTrack.play` or `LocalAudioTrack.play` is called.
* - If you cannot guarantee a user interaction before the call of `RemoteAudioTrack.play` or `LocalAudioTrack.play`, you can display a button and instruct the user to click it in the `onAudioAutoplayFailed` callback.
*
* > As the number of visits on a webpage increases, the browser adds the webpage to the autoplay whitelist, but this information is not accessible by JavaScript.
*
* The following example shows how to display a button for the user to click when autoplay fails.
*
* ```javascript
* let isAudioAutoplayFailed = false;
* AgoraRTC.onAudioAutoplayFailed = () => {
* if (isAudioAutoplayFailed) return;
*
* isAudioAutoplayFailed = true;
* const btn = document.createElement("button");
* btn.innerText = "Click me to resume the audio playback";
* btn.onClick = () => {
* isAudioAutoplayFailed = false;
* btn.remove();
* };
* document.body.append(btn);
* };
* ```
* > If multiple audio tracks call `play`, the `onAudioAutoplayFailed` is triggered multiple times. The example uses the `isAudioAutoplayFailed` object to avoid repeatedly creating buttons.
*
* @category Global Callback
*/
onAudioAutoplayFailed?: () => void;
/**
*
* @since
* *4.6.0*
*
* Occurs when the autoplay of an audio track or a video track fails.
*
* Different from [[onAudioAutoplayFailed]], if multiple tracks call `play` and all trigger autoplay blocking, the SDK triggers `onAutoplayFailed` only once before a user gesture for removing the autoplay blocking occurs.
*
* The autoplay failure of audible media is caused by browsers' [autoplay blocking](https://developer.mozilla.org/en-US/docs/Web/Media/Autoplay_guide#Autoplay_and_autoplay_blocking). On most web browsers, inaudible media are not affected by autoplay blocking. However, on iOS Safari with low power mode enabled, or other iOS in-app browsers that implement a custom autoplay policy, such as WeChat browser, the autoplay of inaudible media is blocked.
*
* In the Agora Web SDK, once the user has interacted with the webpage, the autoplay blocking is removed. You can deal with the issue in either of the following ways:
* - If you do not want to receive the `onAutoplayFailed` callback, ensure that the user has interacted with the webpage before `RemoteTrack.play` or `LocalTrack.play` is called.
* - If you cannot guarantee a user interaction before the call of `RemoteTrack.play` or `LocalTrack.play`, you can display a button and instruct the user to click it in the `onAutoplayFailed` callback.
*
* > As the number of visits on a webpage increases, the browser may add the webpage to the autoplay whitelist, but this information is not accessible by JavaScript.
*
* The following example demonstrates how to display a button for the user to click when autoplay fails.
*
* ```javascript
* AgoraRTC.onAutoplayFailed = () => {
* const btn = document.createElement("button");
* btn.innerText = "Click me to resume the audio playback";
* btn.onClick = () => {
* btn.remove();
* };
* document.body.append(btn);
* };
* ```
* > Since the SDK only triggers `onAutoplayFailed` once before a user gesture that removes the autoplay blocking occurs, you do not need to maintain the state of `isAutoPlayFailed` as you did for the `onAudioAutoplayFailed` callback.
*
* @category Global Callback
* */
onAutoplayFailed?: () => void;
/**
* @since
* *4.15.0*
*
* Occurs when Agora-related services cause CSP (Content Security Policy) violations.
*
* When Agora fails to load a resource or send a request due to CSP violations, the SDK triggers this callback.
* After receiving this callback, modify your CSP configuration to ensure that you can access Agora-related services.
*
* @category Global Callback
*/
onSecurityPolicyViolation?: (event: SecurityPolicyViolationEvent) => void;
/**
* @ignore
*
* Intended for internal use
*/
onAudioContextStateChanged?: (currState: AudioContextState | "interrupted", prevState: AudioContextState | "interrupted" | undefined) => void;
/**
* @since
* *4.2.0*
*
* Sets the region for connection.
*
* This advanced feature applies to scenarios that have regional restrictions.
*
* By default, the SDK connects to nearby Agora servers. After specifying the region, the SDK connects to the Agora servers within that region.
*
* ```javascript
* // Specify the region for connection as North America.
* AgoraRTC.setArea({
* areaCode:"NORTH_AMERICA"
* })
* ```
*
* ```javascript
* // Exclude Mainland China from the regions for connection.
* AgoraRTC.setArea({
* areaCode:"GLOBAL",
* excludedArea:"CHINA"
* })
* ```
*
* @param area The region for connection. For supported regions, see {@link AREAS}. Choose either of the following ways to specify the region for connection:
* - Set the `areaCode` parameter to specify only one region for connection.
* - Set the `areaCode` parameter to specify a large region and the `excludedArea` parameter to specify a small region. The region for connection is the large region excluding the small region. You can only specify the large region as `"GLOBAL"`.
*/
setArea(area: AREAS[] | {
areaCode: AREAS[];
excludedArea?: AREAS;
}): void;
/**
* @since
* *4.5.0*
*
* Enables the AEC (Acoustic Echo Canceller) for the audio played on the local client.
* In a scenario where multiple users play a media file at the same time, such as watching a movie together, if the user A plays the media file through [HTMLMediaElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement) on Chrome with a speaker, the SDK captures the audio played by a speaker together with the voice of the user A. The other users can hear the audio sent by the user A and the audio played locally, which sounds like an echo. To deal with this echo issue, you can call `processExternalMediaAEC` and pass in the HTMLMediaElement to enable the AEC for the audio played on the local client.
*
* ```javascript
*
*
* ```
*
* > Note: If you play a cross-origin media file, you must set the `crossOrigin` property in [HTMLMediaElement](https://developer.mozilla.org/zh-CN/docs/Web/API/HTMLMediaElement) as `"anonymous"` to allow the SDK to capture the media.
*
* @param element The [HTMLMediaElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement) object to which the echo cancellation is applied.
*/
processExternalMediaAEC(element: HTMLMediaElement): void;
/**
* @since
* *4.10.0*
*
* Registers an extension.
*
* Agora supports the following extensions:
* - [Virtual Background Extension](https://docs.agora.io/en/Video/virtual_background_web_ng?platform=Web)
* - [AI Denoiser Extension](https://docs.agora.io/en/Video/noise_reduction_web_ng?platform=Web)
* - [Image Enhancement Extension](https://docs.agora.io/en/Video/beauty_effect_web_ng?platform=Web)
*
* @param extensions The extension instance.
*/
registerExtensions(extensions: IExtension[]): void;
}
/**
* An interface providing the local client with basic functions for a voice or video call, such as joining a channel, publishing tracks, or subscribing to tracks.
*
* An `AgoraRTCClient` object is created by the [[createClient]] method.
* @public
*/
export declare interface IAgoraRTCClient extends EventEmitter {
/**
* Connection state between the SDK and the Agora server.
*/
readonly connectionState: ConnectionState;
/**
* A list of the remote users in the channel, each of which includes the user ID and the corresponding track information.
*
* The list is empty if the local user has not joined a channel.
*/
readonly remoteUsers: IAgoraRTCRemoteUser[];
/**
* @since
* *4.0.0*
*
* The list of the local tracks that the local user is publishing.
*
* - After a success method call of [[publish]], the published track object is added to this list automatically.
* - After a success method call of [[unpublish]], the unpublished track object is removed from this list automatically.
*/
readonly localTracks: ILocalTrack[];
/**
* The ID of the local user.
*
* The value is `undefined` if the local user has not joined a channel.
*/
readonly uid?: UID;
/**
* The current channel name.
*
* The value is `undefined` if the local user has not joined a channel.
*/
readonly channelName?: string;
/**
* @ignore
*/
readonly localDataChannels: ILocalDataChannel[];
/**
* @ignore
* @since
* *4.18.1*
*
* The current channel profile.
*/
readonly mode: SDK_MODE;
/**
* @ignore
* @since
* *4.18.1*
*
* The current user role.
*/
readonly role: ClientRole;
/**
* @param event The event name.
* @param listener See [connection-state-change]{@link event_connection_state_change}.
*/
on(event: "connection-state-change", listener: typeof event_connection_state_change): void;
/**
* @param event The event name.
* @param listener See [user-joined]{@link event_user_joined}.
*/
on(event: "user-joined", listener: typeof event_user_joined): void;
/**
* @param event The event name.
* @param listener See [user-left]{@link event_user_left}.
*/
on(event: "user-left", listener: typeof event_user_left): void;
/**
* @param event The event name.
* @param listener See [user-published]{@link event_user_published}.
*/
on(event: "user-published", listener: typeof event_user_published): void;
/**
* @param event The event name.
* @param listener See [user-unpublished]{@link event_user_unpublished}.
*/
on(event: "user-unpublished", listener: typeof event_user_unpublished): void;
/**
* @param event The event name.
* @param listener See [user-info-updated]{@link event_user_info_updated}.
*/
on(event: "user-info-updated", listener: typeof event_user_info_updated): void;
/**
* @param event The event name.
* @param listener See [media-reconnect-start]{@link event_media_reconnect_start}.
*/
on(event: "media-reconnect-start", listener: typeof event_media_reconnect_start): void;
/**
* @param event The event name.
* @param listener See [media-reconnect-end]{@link event_media_reconnect_end}.
*/
on(event: "media-reconnect-end", listener: typeof event_media_reconnect_end): void;
/**
* @param event The event name.
* @param listener See [stream-type-changed]{@link event_stream_type_changed}.
*/
on(event: "stream-type-changed", listener: typeof event_stream_type_changed): void;
/**
* @param event The event name.
* @param listener See [stream-fallback]{@link event_stream_fallback}.
*/
on(event: "stream-fallback", listener: typeof event_stream_fallback): void;
/**
* @param event The event name.
* @param listener See [channel-media-relay-state]{@link event_channel_media_relay_state}.
*/
on(event: "channel-media-relay-state", listener: typeof event_channel_media_relay_state): void;
/**
* @param event The event name.
* @param listener See [channel-media-relay-event]{@link event_channel_media_relay_event}.
*/
on(event: "channel-media-relay-event", listener: typeof event_channel_media_relay_event): void;
/**
* @param event The event name.
* @param listener See [volume-indicator]{@link event_volume_indicator}.
*/
on(event: "volume-indicator", listener: typeof event_volume_indicator): void;
/**
* @param event The event name.
* @param listener See [crypt-error]{@link event_crypt_error}.
*/
on(event: "crypt-error", listener: typeof event_crypt_error): void;
/**
* @param event The event name.
* @param listener See [token-privilege-will-expire]{@link event_token_privilege_will_expire}.
*/
on(event: "token-privilege-will-expire", listener: typeof event_token_privilege_will_expire): void;
/**
* @param event The event name.
* @param listener See [token-privilege-did-expire]{@link event_token_privilege_did_expire}.
*/
on(event: "token-privilege-did-expire", listener: typeof event_token_privilege_did_expire): void;
/**
* @param event The event name.
* @param listener See [network-quality]{@link event_network_quality}.
*/
on(event: "network-quality", listener: typeof event_network_quality): void;
/**
* @param event The event name.
* @param listener See [live-streaming-error]{@link event_live_streaming_error}.
*/
on(event: "live-streaming-error", listener: typeof event_live_streaming_error): void;
/**
* @param event The event name.
* @param listener See [live-streaming-warning]{@link event_live_streaming_warning}.
*/
on(event: "live-streaming-warning", listener: typeof event_live_streaming_warning): void;
/**
* @param event The event name.
* @param listener See [exception]{@link event_exception}.
*/
on(event: "exception", listener: typeof event_exception): void;
/**
* @param event The event name.
* @param listener See [is-using-cloud-proxy]{@link event_is_using_cloud_proxy}.
*/
on(event: "is-using-cloud-proxy", listener: typeof event_is_using_cloud_proxy): void;
/**
* @deprecated from 4.19.0.
*
* @param event The event name.
* @param listener See [join-fallback-to-proxy]{@link event_join_fallback_to_proxy}.
*/
on(event: "join-fallback-to-proxy", listener: typeof event_join_fallback_to_proxy): void;
/**
* @param event The event name.
* @param listener See [published-user-list]{@link event_published_user_list}.
*/
on(event: "published-user-list", listener: typeof event_published_user_list): void;
/**
* @param event The event name.
* @param listener See [content-inspect-connection-state-change]{@link event_content_inspect_connection_state_change}.
*/
on(event: "content-inspect-connection-state-change", listener: typeof event_content_inspect_connection_state_change): void;
/**
* @param event The event name.
* @param listener See [content-inspect-error]{@link event_content_inspect_error}.
*/
on(event: "content-inspect-error", listener: typeof event_content_inspect_error): void;
/**
* When the specified event happens, the SDK triggers the callback that you pass.
*
* @param event The event name.
* @param listener See [image-moderation-connection-state-change]{@link event_image_moderation_connection_state_change}.
*/
on(event: "image-moderation-connection-state-change", listener: typeof event_image_moderation_connection_state_change): void;
/**
* @param event The event name.
* @param listener See [stream-message]{@link event_stream_message}.
*/
on(event: "stream-message", listener: typeof event_stream_message): void;
/**
* When the specified event happens, the SDK triggers the callback that you pass.
*
* @param event The event name.
* @param listener The callback function.
*/
on(event: string, listener: Function): void;
/**
* Allows a user to join a channel.
*
* Users in the same channel can talk to each other.
*
* When joining a channel, the [AgoraRTCClient.on("connection-state-change")]{@link event_connection_state_change} callback is triggered on the local client.
*
* After joining a channel, if the user is in the communication profile, or is a host in the Live Broadcast profile, the [AgoraRTCClient.on("user-joined")]{@link event_user_joined} callback is triggered on the remote client.
*
* @param appid The [App ID](https://docs.agora.io/en/Agora%20Platform/terms?platform=All%20Platforms#appid) of your Agora project.
* @param token The token generated at your server:
* - For low-security requirements: You can use the temporary token generated at Console. For details, see [Get an RTC temporary token](https://docs.agora.io/en/Agora%20Platform/get_appid_token?platform=All%20Platforms#generate-an-rtc-temporary-token).
* - For high-security requirements: Set it as the token generated at your server. For details, see [Authenticate Your Users with Tokens](https://docs.agora.io/en/Video/token_server?platform=Web).
* @param channel A string that provides a unique channel name for the call. The length must be within 64 bytes. Supported character scopes:
* - All lowercase English letters: a to z.
* - All uppercase English letters: A to Z.
* - All numeric characters: 0 to 9.
* - The space character.
* - Punctuation characters and other symbols, including: "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", " {", "}", "|", "~", ",".
* @param uid The user ID, an integer or a string, ASCII characters only. Ensure this ID is unique. If you set the `uid` to `null`, the Agora server assigns a number uid and returns it in the Promise object.
* - If you use a number as the user ID, it should be a 32-bit unsigned integer with a value ranging from 0 to (232 -1).
* - If you use a string as the user ID, the maximum length is 255 characters.
*
* To ensure a better end-user experience, Agora recommends using a number as the user ID.
*
* > Note:
* > - All users in the same channel should have the same type (number or string) of `uid`.
* > - You can use string UIDs to interoperate with the Native SDK 2.8 or later. Ensure that the Native SDK uses the User Account to join the channel. See [Use String User Accounts](https://docs.agora.io/en/faq/string).
* > - To ensure the data accuracy in Agora Analytics, Agora recommends that you specify `uid` for each user and ensure it is unique.
*
* @returns A Promise object with the user ID.
* - If you pass a number as the user ID, the SDK returns a number `uid`.
* - If you pass a string as the user ID, the SDK returns a string `uid`.
* - If you leave the `uid` parameter empty or set it as `null`, the Agora server assigns a number `uid` and returns it.
* @category Agora Core
*/
join(appid: string, channel: string, token: string | null, uid?: UID | null): Promise;
/**
* Leaves a channel.
*
* When leaving the channel, the [AgoraRTCClient.on("connection-state-change")]{@link IAgoraRTCClient.event_connection_state_change} callback is triggered on the local client.
*
* When a user (in the communication profile) or a host (in the live-broadcast profile) leaves the channel, the [AgoraRTCClient.on("user-left")]{@link IAgoraRTCClient.event_user_left} callback is triggered on each remote client in the channel.
* @category Agora Core
*/
leave(): Promise;
/**
* Publishes local audio and/or video tracks to a channel.
*
* After publishing the local tracks, the [AgoraRTCClient.on("user-published")]{@link event_user_published} callback is triggered on the remote client.
*
* > Note:
* > - In an interactive live streaming, call {@link setClientRole} to set the user role as the host before calling this method.
* > - You can call this method multiple times to add tracks for publishing.
* > - An `AgoraRTCClient` object can publish multiple audio tracks. The SDK automatically mixes the audio tracks into one audio track. Exception: Safari does not support publishing multiple audio tracks on versions earlier than Safari 12.
* > - An `AgoraRTCClient` object can publish **only one video track**. If you want to switch the published video track, for example, from a camera video track to a scree-sharing video track, you must unpublish the published video track.
* @param tracks Local tracks created by [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack} / [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack} or other methods.
* @category Agora Core
*/
publish(tracks: ILocalTrack | ILocalTrack[]): Promise;
/**
* @ignore
*/
publish(config: IDataChannelConfig): Promise;
/**
* @ignore
*/
publish(params: ILocalTrack | ILocalTrack[] | IDataChannelConfig): Promise;
/**
* Unpublishes the local audio and/or video tracks.
*
* After the local client unpublishes, the [AgoraRTCClient.on("user-unpublished")]{@link event_user_unpublished} callback is triggered on each remote client in the channel.
*
* > Note: In an interactive live streaming, after a host unpublishes the local tracks, the SDK does not automatically change the role of this host to the audience. To change the user role, you must call {@link setClientRole}.
*
* @param tracks The tracks to unpublish. If left empty, all the published tracks are unpublished.
* @category Agora Core
*/
unpublish(tracks?: ILocalTrack | ILocalTrack[]): Promise;
/**
* @ignore
*/
unpublish(dataChannel?: ILocalDataChannel): Promise;
/**
* @ignore
*/
unpublish(params?: ILocalTrack | ILocalTrack[] | ILocalDataChannel): Promise;
/**
* Subscribes to the audio and/or video tracks of a remote user.
*
* ```javascript
* await client.subscribe(user,"audio");
* user.audioTrack.play();
* ```
* @param user The remote user.
* @param mediaType The media type of the tracks to subscribe to.
* - `"video"`: Subscribe to the video track only.
* - `"audio"`: Subscribe to the audio track only.
*
* @returns When the subscription succeeds, the SDK adds the subscribed tracks to [user.audioTrack]{@link IAgoraRTCRemoteUser.audioTrack} and [user.videoTrack]{@link IAgoraRTCRemoteUser.videoTrack}. You can go on to call [audioTrack.play]{@link IRemoteAudioTrack.play} or [videoTrack.play]{@link IRemoteVideoTrack.play} to play the tracks.
* > The `Promise` object throws the `TRACK_IS_NOT_PUBLISHED` error if the specified tracks do not exist.
* @category Agora Core
*/
subscribe(user: IAgoraRTCRemoteUser | UID, mediaType: "video"): Promise;
/**
* Subscribes to the audio and/or video tracks of a remote user.
*
* ```javascript
* await client.subscribe(user,"audio");
* user.audioTrack.play();
* ```
* @param user The remote user.
* @param mediaType The media type of the tracks to subscribe to.
* - `"video"`: Subscribe to the video track only.
* - `"audio"`: Subscribe to the audio track only.
*
* @returns When the subscription succeeds, the SDK adds the subscribed tracks to [user.audioTrack]{@link IAgoraRTCRemoteUser.audioTrack} and [user.videoTrack]{@link IAgoraRTCRemoteUser.videoTrack}. You can go on to call [audioTrack.play]{@link IRemoteAudioTrack.play} or [videoTrack.play]{@link IRemoteVideoTrack.play} to play the tracks.
* > The `Promise` object throws the `TRACK_IS_NOT_PUBLISHED` error if the specified tracks do not exist.
* @category Agora Core
*/
subscribe(user: IAgoraRTCRemoteUser | UID, mediaType: "audio"): Promise;
/**
* @ignore
*/
subscribe(user: IAgoraRTCRemoteUser | UID, mediaType: "datachannel", channelId: number): Promise;
/**
* Subscribes to the audio and/or video tracks of a remote user.
*
* ```javascript
* await client.subscribe(user,"audio");
* user.audioTrack.play();
* ```
* @param user The remote user.
* @param mediaType The media type of the tracks to subscribe to.
* - `"video"`: Subscribe to the video track only.
* - `"audio"`: Subscribe to the audio track only.
* - `"datachannel"`: Reserved for future use.
*
* @returns When the subscription succeeds, the SDK adds the subscribed tracks to [user.audioTrack]{@link IAgoraRTCRemoteUser.audioTrack} and [user.videoTrack]{@link IAgoraRTCRemoteUser.videoTrack}. You can go on to call [audioTrack.play]{@link IRemoteAudioTrack.play} or [videoTrack.play]{@link IRemoteVideoTrack.play} to play the tracks.
* > The `Promise` object throws the `TRACK_IS_NOT_PUBLISHED` error if the specified tracks do not exist.
* @category Agora Core
*/
subscribe(user: IAgoraRTCRemoteUser | UID, mediaType: "video" | "audio" | "datachannel", channelId?: number): Promise;
presubscribe(uid: UID, mediaType: "audio"): Promise;
presubscribe(uid: UID, mediaType: "video"): Promise;
presubscribe(uid: UID, mediaType: "video" | "audio"): Promise;
/**
* @since
* *4.11.0*
*
* Subscribes to the audio and/or video tracks of multiple remote users at one time.
*
* ```javascript
* const result = await client.massSubscribe([{user:userA, mediaType:'audio'}, {user: userB, mediaType:'audio'}]);
*
* for(const {track, mediaType, error} of result) {
* if(error) {
* console.error(error);
* continue;
* }
*
* if(track) {
* if(mediaType === 'audio') {
* track.play();
* }else{
* track.play(document.querySelector('.video-container'));
* }
* }
* }
* ```
*
* @param subscribeList The list of remote users to subscribe to. Each list entry contains the following information:
* - `user`: The remote user. See [AgoraRTCRemoteUser]{@link IAgoraRTCRemoteUser}.
* - `mediaType`: The media type of the tracks to subscribe to.
* - `"video"`: Subscribe to this user's video track.
* - `"audio"`: Subscribe to this user's audio track.
*
* @returns Normally, the returned list has the same length and order as `subscribeList`, and each list entry contains the following information:
* - `user`: The remote user. See [AgoraRTCRemoteUser]{@link IAgoraRTCRemoteUser}.
* - `mediaType`: The media type of the tracks subscribed to:
* - `"video"`: The video track is subscribed to.
* - `"audio"`: The audio track is subscribed to.
* - `track`: The remote track. See [RemoteTrack]{@link IRemoteTrack}.
* - `error`: The error message. If subscription of a user's audio and/or video tracks fails, the error message is returned through this parameter.
*/
massSubscribe(subscribeList: {
user: IAgoraRTCRemoteUser;
mediaType: "audio" | "video";
}[]): Promise<{
user: IAgoraRTCRemoteUser;
mediaType: "audio" | "video";
track?: IRemoteTrack;
error?: AgoraRTCError;
}[]>;
/**
* Unsubscribes from the audio and/or video tracks of a remote user.
*
* @param user The remote user.
* @param mediaType The media type of the tracks to unsubscribe from:
* - `"video"`: Unsubscribe from the video track only.
* - `“audio”`: Unsubscribe from the audio track only.
* - empty: Unsubscribe from all the tracks published by the remote user.
* - `"datachannel"`: Reserved for future use.
* @param channelId Reserved for future use.
* @returns The `Promise` object throws the `TRACK_IS_NOT_SUBSCRIBED` error if the specified tracks do not exist.
* @category Agora Core
*/
unsubscribe(user: IAgoraRTCRemoteUser | UID, mediaType?: "video" | "audio" | "datachannel", channelId?: number): Promise;
/**
* @since
* *4.11.0*
*
* Unsubscribes from the audio and/or video tracks of multiple remote users at one time.
*
* ```javascript
* client.massUnsubscribe([{user:userA}, {user: userB}]);
* ```
* @param unsubscribeList The list of remote users to unsubscribe from. Each list entry contains the following information:
* - `user`: The remote user. See [AgoraRTCRemoteUser]{@link IAgoraRTCRemoteUser}.
* - `mediaType`: The media type of the tracks to unsubscribe from.
* - `"video"`: Unsubscribe from this user's video track.
* - `"audio"`: Unsubscribe from this user's audio track.
* - empty: Unsubscribe from all the tracks published by this user.
*/
massUnsubscribe(unsubscribeList: {
user: IAgoraRTCRemoteUser;
mediaType?: "audio" | "video";
}[]): Promise;
/**
* Sets the video profile of the low-quality video stream.
*
* If you have enabled the dual-stream mode by calling {@link enableDualStream}, use this method to set the low-quality video stream profile.
*
* If you do not set the low-quality video stream profile, the SDK assigns the default values based on your stream video profile.
*
* > Note:
* > - Due to different device and browser restrictions on video parameter settings, not all video parameters set with `setLowStreamParameter` will take effect:
* > - On Firefox, frame rate settings do not take effect. The browser sets the frame rate at 30 fps. Additionally, on Mac Firefox 73+, the actual active frame rate value that take effects is smaller than the set value and there exist potential inaccuracies with the resolution.
* > - On Safari 14 to 17.2, frame rate settings do not take effect. The browser sets the frame rate at around 15 fps, and the resolution of the low-quality stream must be proportional to the resolution of the high-quality stream. Additionally, setting `LowStreamParameter.bitrate` does not take effect on iOS Safari.
* > - On some devices and browsers, the resolution you set may get adjusted by the browser. In this case, billings are calculated based on the actual resolution.
* @param streamParameter The video profile of the low-quality video stream.
* @category Dual Stream
*/
setLowStreamParameter(streamParameter: LowStreamParameter): void;
/**
* Enables dual-stream mode.
*
* Enables dual-stream mode for the local stream. Dual streams are a hybrid of a high-quality video stream and a low-quality video stream:
* - High-quality video stream: High bitrate, high resolution.
* - Low-quality video stream: Low bitrate, low resolution. The default video profile of the low-quality stream is: A resolution (width × height) of 160 × 120 px, a bitrate of 50 Kbps, and a frame rate of 15 fps. Call {@link setLowStreamParameter} to customize the video profile of the low-quality stream.
*
* > Note:
* > - On some Android devices, the remote user may not be able to switch to the low-quality stream after you call `enableDualStream`.
* > - On Android Chrome, the Web SDK cannot send high-quality and low-quality streams in H.264.
* > - On Safari browsers of some Mac devices using Intel chips, calling `enableDualStream` to enable the dual-stream mode with H.264 encoding may cause system lag if the resolution ratio between the small and large streams is lower than 1/4.
*
* ```javascript
* client.enableDualStream().then(() => {
* console.log("Enable Dual stream success!");
* }).catch(err => {
* console.log(err);
* })
* ```
* @category Dual Stream
*/
enableDualStream(): Promise;
/**
* Disables dual-stream mode.
* @category Dual Stream
*/
disableDualStream(): Promise;
/**
* Sets the user role and level in a live streaming (when [mode]{@link ClientConfig.mode} is `"live"`).
*
* - The user role determines the permissions that the SDK grants to a user, such as permission to publish local streams, subscribe to remote streams, and push streams to a CDN address. You can set the user role as `"host"` or `"audience"`. A host can publish and subscribe to streams, while an audience member can only subscribe to streams. The default role in a live streaming is `"audience"`. Before publishing tracks, you must call this method to set the user role as `"host"`.
* - The detailed options of a user, including the user level. The user level determines the level of services that a user can enjoy within the permissions of the user's role. For example, an audience can choose to receive remote streams with low latency or ultra low latency. Levels affect prices.
*
* > Note:
* > - When [mode]{@link ClientConfig.mode} is `"rtc"`, this method does not take effect and all users are `"host"` by default.
* > - If the local client switches the user role after joining a channel, the SDK triggers the [AgoraRTCClient.on("user-joined")]{@link event_user_joined} or [AgoraRTCClient.on("user-left")]{@link event_user_left} callback on the remote client.
* > - To switch the user role to `"audience"` after calling {@link publish}, call {@link unpublish} first. Otherwise the method call fails and throws an exception.
*
* @param role The role of the user.
* @param options The detailed options of a user, including user level.
*/
setClientRole(role: ClientRole, options?: ClientRoleOptions): Promise;
/**
*
* Deploys your proxy server.
*
* Do not use this method and {@link startProxyServer} together. They have
* the following differences:
* - `setProxyServer`: This method allows you to use a custom proxy server
* for purposes including signaling transmission, log uploading, and event reporting. But it cannot be used for media transmission.
* - `startProxyServer`: This method provides Agora's cloud proxy service,
* which handles media and signaling transmission with simple setup. For more
* information, refer to [Using Cloud Proxy Service](https://docs.agora.io/en/
* video-call-4.x/cloud_proxy_web_ng?platform=Web).
*
* > Note:
* > - Call this method before {@link join}.
* > - Proxy services by different service providers may result in slow performance if you are using the Firefox browser. Therefore, Agora recommends using the same service provider for the proxy services. If you use different service providers, Agora recommends not using the Firefox browser.
* @param proxyServer Your proxy server domain name. ASCII characters only.
* @category Proxy
*/
setProxyServer(proxyServer: string): void;
/**
* @ignore
* Deploys a TURN server.
*
* You can also use cloud proxy by {@link startProxyServer}. See [Use Cloud Proxy](https://docs.agora.io/en/Interactive%20Broadcast/cloud_proxy_web?platform=Web) for details.
*
* > Call this method before {@link join}.
*
* @param turnServer The TURN server settings.
* @category Proxy
*/
setTurnServer(turnServer: TurnServerConfig): void;
/**
* Enables cloud proxy.
*
* You must call this method before joining the channel or after leaving the channel.
*
* For the extra settings required for using the cloud proxy service, see [Use Cloud Proxy](https://docs.agora.io/en/Interactive%20Broadcast/cloud_proxy_web_ng?platform=Web).
*
* @param mode Cloud proxy mode:
* - `3`: The cloud proxy for the UDP protocol, that is, the Force UDP cloud proxy mode. In this mode, the SDK always transmits data over UDP.
* - `5`: The cloud proxy for the TCP (encryption) protocol, that is, the Force TCP cloud proxy mode. In this mode, the SDK always transmits data over TLS 443.
*
* > Note:
* > As of v4.15.0, the default value of `mode` is `3`.
*
* @category Proxy
*/
startProxyServer(mode?: number): void;
/**
* Disables cloud proxy.
*
* You must call this method before joining the channel or after leaving the channel.
* @category Proxy
*/
stopProxyServer(): void;
/**
* Sets which video stream of a remote user to subscribe to.
*
* If a remote user enables dual-stream mode, the user sends a hybrid of a high-quality video stream and a low-quality video stream. Use this method to set which video stream to subscribe to. The local client subscribes to the high-quality video stream by default.
*
* > - This method works only if the remote user has enabled the dual-stream mode ({@link enableDualStream}).
* > - If both this method and {@link setStreamFallbackOption} are called, the actual video stream that the user subscribes to depends on your settings. The following are two cases:
* > - If the parameter of {@link setStreamFallbackOption} is set to `0` (DISABLE), the video stream type that the user subscribes to is determined by the setting of `setRemoteVideoStreamType`.
* > - If the parameter of {@link setStreamFallbackOption} is set to `1` (VIDEO_STREAM_LOW) or `2` (VIDEO_STREAM_HIGH), the video stream type that the user subscribes to is first set according to the `setRemoteVideoStreamType` setting, but is dynamically adjusted according to the network conditions. For example, if you set the `setRemoteVideoStreamType` parameter to `0` (subscribe to a high-quality video stream), but the network condition is poor, the SDK will perform a fallback operation according to the `setStreamFallbackOption` setting.
*
* @param uid The ID of the remote user.
* @param streamType The remote video stream type:
* - 0: High-bitrate, high-resolution video stream.
* - 1: Low-bitrate, low-resolution video stream.
* @category Dual Stream
*/
setRemoteVideoStreamType(uid: UID, streamType: RemoteStreamType): Promise;
/**
* Sets the video type of all of the remote stream.
*
* If a remote user enables dual-stream mode, after using this method, local client will subscribe the specified streamType by default. The local client subscribes to the high-quality video stream by default.
*
* > - Agora suggests calling `setRemoteDefaultVideoStreamType` before joining the channel.
* > - The method call of {@link setRemoteVideoStreamType} to set the video stream type of a specified remote user overrides this method.
*
* @param streamType The remote video stream type:
* - 0: High-bitrate, high-resolution video stream.
* - 1: Low-bitrate, low-resolution video stream.
* @category Dual Stream
*/
setRemoteDefaultVideoStreamType(streamType: RemoteStreamType): Promise;
/**
* @ignore
*/
pickSVCLayer(uid: UID, layerOptions: {
spatialLayer: 0 | 1 | 2 | 3;
temporalLayer: 0 | 1 | 2 | 3;
}): Promise;
/**
* @ignore
*/
setRTMConfig(config: RTMConfiguration): Promise;
/**
* Sets the stream fallback option.
*
* Use this method to set the fallback option for the subscribed video stream.
* Under poor network conditions, the SDK can subscribe to the low-quality video stream or only to the audio stream.
*
* The SDK triggers the [AgoraRTCClient.on("stream-type-changed")]{@link event_stream_type_changed} callback when the remote stream changes from a high-quality video stream to a low-quality video stream or vice versa, and triggers the [AgoraRTCClient.on("stream-fallback")]{@link event_stream_fallback} callback when the remote stream changes from a video stream to an audio stream or vice versa.
*
* > - This method works only if the remote user has enabled the dual-stream mode by {@link enableDualStream}.
* > - When the remote user enables the dual-stream mode, if `setStreamFallbackOption` is not called at the local client, the default stream fallback option is to automatically subscribes to the low-video stream under poor network conditions ({@link RemoteStreamFallbackType} is `1`).
* @param uid The ID of the remote user.
* @param fallbackType The fallback option. See {@link RemoteStreamFallbackType} for details.
* @category Dual Stream
*/
setStreamFallbackOption(uid: UID, fallbackType: RemoteStreamFallbackType): Promise;
/**
* Sets the encryption configurations.
*
* Use this method to enable the built-in encryption before joining a channel.
*
* If the encryption configurations are incorrect, the SDK triggers the [AgoraRTCClient.on("crypt-error")]{@link event_crypt_error} callback when publishing tracks or subscribing to tracks.
*
* > Notes:
* > - All users in a channel must use the same encryption mode, secret, and salt.
* > - You must call this method before joining a channel, otherwise the method call does not take effect and encryption is not enabled.
* > - As of v4.7.0, after a user leaves the channel, the SDK automatically disables the built-in encryption. To re-enable the built-in encryption, call this method before the user joins the channel again.
* > - Do not use this method for CDN live streaming.
*
* @param encryptionMode The encryption mode.
* @param secret The encryption secret. ASCII characters only. When a user uses a weak secret, the SDK outputs a warning message to the Web Console and prompts the users to set a strong secret. A strong secret must contain at least eight characters and be a combination of uppercase and lowercase letters, numbers, and special characters. Due to browser encryption algorithm limitations, the secret length cannot exceed 62 characters. Agora recommends you use OpenSSL to generate the secret on your server. For details, see [Media Stream Encryption](https://docs.agora.io/en/Video/channel_encryption_web_ng?platform=Web).
* @param salt The salt. Only valid when you set the encryption mode as `"aes-128-gcm2"` or `"aes-256-gcm2"`. Agora recommends you use OpenSSL to generate the salt on your server. For details, see [Media Stream Encryption](https://docs.agora.io/en/Video/channel_encryption_web_ng?platform=Web).
* @param encryptDataStream Whether to encrypt the data stream.The encryption mode for the data stream must be consistent with the media stream. Currently, data stream encryption only supports "aes-128-gcm2" and "aes-256-gcm2" modes. Using other encryption modes will result in an error.
* - `true`: Enable data stream encryption.
* - `false`: Disable data stream encryption.
*/
setEncryptionConfig(encryptionMode: EncryptionMode, secret: string, salt?: Uint8Array, encryptDataStream?: boolean): void;
/**
* Renews the token.
*
* The token expires after a set time once token is enabled. When the SDK triggers the [AgoraRTCClient.on("token-privilege-will-expire")]{@link event_token_privilege_will_expire} callback, call this method to pass a new token. Otherwise the SDK disconnects from the server.
* @param token The new token.
*/
renewToken(token: string): Promise;
/**
* Enables the volume indicator.
*
* This method enables the SDK to regularly report the local and remote users who are speaking and their volumes.
*
* After the volume indicator is enabled, the SDK triggers the [AgoraRTCClient.on("volume-indicator")]{@link event_volume_indicator} callback to report the volumes every two seconds, regardless of whether there are active speakers in the channel.
*
* > If the local user leaves the channel and rejoins the channel, you need to call `enableAudioVolumeIndicator` again.
*
* ```javascript
* client.enableAudioVolumeIndicator();
* client.on("volume-indicator", volumes => {
* volumes.forEach((volume, index) => {
* console.log(`${index} UID ${volume.uid} Level ${volume.level}`);
* });
* })
* ```
*/
enableAudioVolumeIndicator(): void;
/**
* Gets the statistics of the call.
*
* @returns The statistics of the call.
*/
getRTCStats(): AgoraRTCStats;
/**
* Sets the video layout and audio for CDN live streaming.
*
* > Ensure that you [enable the RTMP Converter service](https://docs.agora.io/en/Interactive%20Broadcast/cdn_streaming_web?platform=Web#prerequisites) before using this function.
* @param config Configurations for live transcoding. See {@link LiveStreamingTranscodingConfig} for details.
* @category Live Streaming
*/
setLiveTranscoding(config: LiveStreamingTranscodingConfig): Promise;
/**
* Publishes the local stream to the CDN.
*
* See [Push Streams to the CDN](https://docs.agora.io/en/Interactive%20Broadcast/cdn_streaming_web?platform=Web) for details.
*
* > Note:
* > - This method adds only one stream HTTP/HTTPS URL address each time it is called.
*
* @param url The CDN streaming URL in the RTMP format. ASCII characters only.
* @param transcodingEnabled Whether to enable live transcoding.
* Transcoding sets the audio and video profiles and the picture-in-picture layout for the stream to be pushed to the CDN. It is often used to combine the audio and video streams of multiple hosts in a CDN live stream.
* > If set as `true`, {@link setLiveTranscoding} must be called before this method.
* - `true`: Enable transcoding.
* - `false`: (Default) Disable transcoding.
* @category Live Streaming
*/
startLiveStreaming(url: string, transcodingEnabled?: boolean): Promise;
/**
* Removes a URL from CDN live streaming.
*
* This method removes only one URL address each time it is called. To remove multiple URLs, call this method multiple times.
* @param url The URL to be removed.
* @category Live Streaming
*/
stopLiveStreaming(url: string): Promise;
/**
* Starts relaying media streams across channels.
*
* After this method call, the SDK triggers the following callbacks:
*
* - [AgoraRTCClient.on("channel-media-relay-state")]{@link event_channel_media_relay_state}, which reports the state and error code of the media stream relay.
* - If the media stream relay fails, this callback returns `state` 3. Refer to `code` for the error code and call this method again.
* - [AgoraRTCClient.on("channel-media-relay-event")]{@link event_channel_media_relay_event}, which reports the events of the media stream relay.
* - If the media stream relay starts successfully, this callback returns `code` 4, reporting that the SDK starts relaying the media stream to the destination channel.
*
* > Note:
* >
* > - Contact sales-us@agora.io to enable this function.
* > - We do not support string user IDs in this API.
* > - Call this method only after joining a channel.
* > - In a live-broadcast channel, only a host can call this method.
* > - To call this method again after it succeeds, you must call {@link stopChannelMediaRelay} to quit the current relay.
*
* ```javascript
* client.startChannelMediaRelay(config).then(() => {
* console.log("startChannelMediaRelay success");
* }).catch(e => {
* console.log("startChannelMediaRelay failed", e);
* })
* ```
* @param config Configurations of the media stream relay.
* @returns A `Promise` object, which is resolved if the media relay starts successfully.
* @category Channel Media Relay
*/
startChannelMediaRelay(config: IChannelMediaRelayConfiguration): Promise;
/**
* Updates the destination channels for media stream relay.
*
* After the channel media relay starts, if you want to relay the media stream to more channels, or leave the current relay channel, you can call this method.
*
* > Note:
* >
* > - Call this method after {@link startChannelMediaRelay}.
* > - You can add a maximum of four destination channels to a relay.
* @param config Configurations of the media stream relay.
* @returns A Promise object, which is resolved if the update succeeds. If the update fails, the media relay state is reset, and you need to call {@link startChannelMediaRelay} again to restart the relay.
* @category Channel Media Relay
*/
updateChannelMediaRelay(config: IChannelMediaRelayConfiguration): Promise;
/**
* Stops the media stream relay.
*
* Once the relay stops, the user leaves all the destination channels.
*
* @returns A `Promise` object, which is resolved if the relay stops successfully.
* @category Channel Media Relay
*/
stopChannelMediaRelay(): Promise;
/**
* Reports customized messages to Agora's data center.
*
* > Temporarily, Agora supports reporting a maximum of 20 message pieces within 5 seconds.
*
* @param reports Messages. You can report multiple messages one time.
*
* ```js
* client.sendCustomReportMessage({
* reportId: "id1", category: "category1", event: "custom", label: "label1", value: 0,
* }).then(() => {
* console.log("send custom report success");
* }).catch(e => {
* console.error("send custom report error");
* });
* ```
*/
sendCustomReportMessage(reports: EventCustomReportParams[] | EventCustomReportParams): Promise;
/**
* @since
* *4.1.0*
*
* Gets the statistics of a local audio track.
*
*/
getLocalAudioStats(): LocalAudioTrackStats;
/**
* @since
* *4.1.0*
*
* Gets the statistics of remote audio tracks.
*
* > Note: The statistics are calculated after subscribing the remote stream,
* which may take at most 3 seconds. You can call this method periodically.
*
*/
getRemoteAudioStats(): {
[uid: string]: RemoteAudioTrackStats;
};
/**
* @since
* *4.2.0*
*
* Gets the network quality of all the remote users to whom the local user subscribes.
*
*/
getRemoteNetworkQuality(): {
[uid: string]: NetworkQuality;
};
/**
* @since
* *4.1.0*
*
* Gets the statistics of a local video track.
*
* > Note: You cannot get the `encodeDelay` property on iOS 14.4 and earlier.
*/
getLocalVideoStats(): LocalVideoTrackStats;
/**
* @since
* *4.1.0*
*
* Gets the statistics of remote video tracks.
*
* > Note: The statistics are calculated after subscribing the remote stream,
* which may take at most 3 seconds. You can call this method periodically.
*
*/
getRemoteVideoStats(): {
[uid: string]: RemoteVideoTrackStats;
};
/**
* @ignore
*
* @param inspectConfig
*/
enableContentInspect(inspectConfig: InspectConfiguration): Promise;
/**
* @ignore
*/
disableContentInspect(): Promise;
/**
* Disables the third-party video moderation service.
*
* @param enabled Default is `false` and can only be set to `false`.
*/
setImageModeration(enabled: false): Promise;
/**
* Enables and configures the third-party video moderation service.
*
* After calling this method, the SDK triggers the
* [image-moderation-connection-state-change]{@link event_image_moderation_connection_state_change} callback, and captures the
* snapshots of the locally sent video to send to the third-party service
* provider for moderation.
*
* > - Before calling this method, make sure the following requirements are met:
* > - You have activated the third-party video moderation service.
* > - The local user has joined the channel, and the local video track has been published and enabled.
*
* @param enabled Default is `true` and can only be set to `true`.
* @param config Configuration for the video moderation service. See {@link ImageModerationConfiguration}.
*/
setImageModeration(enabled: true, config: ImageModerationConfiguration): Promise;
/**
* @ignore
*/
setLicense(license: string): void;
/**
* @ignore
*
* @param LocalAccessPointConfig
*/
setLocalAccessPointsV2(config: LocalAccessPointConfig): void;
}
/**
* @ignore
*/
export declare interface IAgoraRTCError extends Error {
readonly code: AgoraRTCErrorCode;
readonly message: string;
readonly data?: any;
readonly name: string;
toString(): string;
print(level?: "error" | "warning", logger?: any): IAgoraRTCError;
throw(logger?: any): never;
}
/**
* Information about a remote user. You can get this through [AgoraRTCClient.remoteUsers]{@link IAgoraRTCClient.remoteUsers}.
*/
export declare interface IAgoraRTCRemoteUser {
/**
* The ID of the remote user.
*/
uid: UID;
/**
* The subscribed audio track.
*/
audioTrack?: IRemoteAudioTrack;
/**
* The subscribed video track.
*/
videoTrack?: IRemoteVideoTrack;
/**
* Whether the remote user is sending an audio track.
* - `true`: The remote user is sending an audio track.
* - `false`: The remote user is not sending an audio track.
*/
hasAudio: boolean;
/**
* Whether the remote user is sending a video track.
* - `true`: The remote user is sending an audio track.
* - `false`: The remote user is not sending an audio track.
*/
hasVideo: boolean;
/**
* @ignore
*/
dataChannels?: IRemoteDataChannel[];
}
/**
* Inherited from [LocalAudioTrack]{@link ILocalAudioTrack}, `BufferSourceAudioTrack` is an interface for the audio from a local audio file and adds several functions for controlling the processing of the audio buffer, such as starting processing, stopping processing, and seeking a specified time location.
*
* You can create an audio track from an audio file by calling [AgoraRTC.createBufferSourceAudioTrack]{@link IAgoraRTC.createBufferSourceAudioTrack}.
*/
export declare interface IBufferSourceAudioTrack extends ILocalAudioTrack {
/**
* The [source]{@link BufferSourceAudioTrackInitConfig.source} specified when creating an audio track.
*/
source: string | File | AudioBuffer | null;
/**
* The current state of audio processing, such as start, pause, or stop.
*/
currentState: AudioSourceState;
/**
* The total duration of the audio (seconds).
*/
duration: number;
/**
* @since
* *4.18.0*
*
* The playback speed of the current audio file. Valid range is [50, 400], where:
* - `50`: Half the original speed.
* - `100`: (Default) The original speed.
* - `400`: Four times the original speed.
*/
playbackSpeed: number;
/**
* @param event The event name.
* @param listener See [source-state-change]{@link event_source_state_change}.
*/
on(event: "source-state-change", listener: typeof event_source_state_change): void;
/**
* When the specified event happens, the SDK triggers the callback that you pass.
*
* @param event The event name.
* @param listener The callback function.
*/
on(event: string, listener: Function): void;
/**
* Gets the progress (seconds) of the audio buffer processing.
*
* @returns The progress (seconds) of the audio buffer processing.
*/
getCurrentTime(): number;
/**
* Starts processing the audio buffer.
*
* > Starting processing the audio buffer means that the processing unit in the SDK has received the audio data. If the audio track has been published, the remote user can hear the audio.
* > Whether the local user can hear the audio depends on whether the SDK calls the [[play]] method and sends the audio data to the sound card.
*
* @param options Options for processing the audio buffer. See [[AudioSourceOptions]].
*/
startProcessAudioBuffer(options?: AudioSourceOptions): void;
/**
* Pauses processing the audio buffer.
*/
pauseProcessAudioBuffer(): void;
/**
* Jumps to a specified time point.
*
* @param time The specified time point (seconds).
*/
seekAudioBuffer(time: number): void;
/**
* Resumes processing the audio buffer.
*/
resumeProcessAudioBuffer(): void;
/**
* Stops processing the audio buffer.
*/
stopProcessAudioBuffer(): void;
/**
* @since
* *4.18.0*
*
* Sets the playback speed for the current audio file.
*
* You can call this method before or after joining a channel.
*
* @param speed The playback speed. Valid range is [50, 400], where:
* - `50`: Half the original speed.
* - `100`: (Default) The original speed.
* - `400`: Four times the original speed.
*/
setAudioBufferPlaybackSpeed(speed: number): void;
}
/**
*
* Inherited from [LocalVideoTrack]{@link ILocalVideoTrack}, `CameraVideoTrack` is an interface for the video captured by a local camera and adds functions such as switching devices and adjusting video encoder configurations.
*
* You can create a local camera video track by calling [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}.
*/
export declare interface ICameraVideoTrack extends ILocalVideoTrack {
/**
* Sets the device for capturing video.
*
* > You can call this method either before or after publishing the video track.
*
* @param deviceId Device ID, which can be passed in using the following ways:
* - Pass a string: Pass the `deviceId` obtained using [AgoraRTC.getCameras]{@link IAgoraRTC.getCameras}.
* - Pass an object: Starting from version 4.19.0, you can pass an object
* containing `facingMode` or `deviceId`, but only one of these properties
* can be specified. `deviceId` can be obtained through [AgoraRTC.getCameras]{@link IAgoraRTC.getCameras}, and `facingMode` supports the following values:
* - `"environment"`: Use the rear camera.
* - `"user"`: Use the front camera.
*/
setDevice(deviceId: string | RequiredOnlyOneOf<{
facingMode: VideoFacingModeEnum;
deviceId: string;
}>): Promise;
/**
* @since
* *4.0.0*
*
* Enables/Disables the track.
*
* After a track is disabled, the SDK stops playing and publishing the track.
*
* > - Disabling a track does not trigger the [LocalTrack.on("track-ended")]{@link event_track_ended} event.
* > - If a track is published, disabling this track triggers the [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} event on the remote client, and re-enabling this track triggers the [user-published]{@link IAgoraRTCClient.event_user_published} event.
* > - Do not call `setEnabled` and `setMuted` together.
*
* @param enabled Whether to enable the track:
* - `true`: Enable the track.
* - `false`: Disable the track.
*/
setEnabled(enabled: boolean): Promise;
/**
* @since
* *4.19.0*
*
* Clones the current video track to create a new video track.
*
* In scenarios such as video conferencing and online education, you can use this method to display the same video stream with two sets of display parameters, including resolution, frame rate, and bitrate. For example, you can have one display set to high-definition and the other to low-definition.
*
* @param config The encoding configuration for the new video track. You can pass in the SDK's built-in encoding configuration through [[VideoEncoderConfiguration]], or customize the video encoding configuration by passing in a [[VideoEncoderConfigurationPreset]].
* @param cloneTrack Whether to clone the current track. Default is `true`.
* @returns The newly generated video track.
*/
clone(config?: VideoEncoderConfiguration | VideoEncoderConfigurationPreset, cloneTrack?: boolean): ICameraVideoTrack;
}
/**
* Configurations of the media stream relay.
*
* Use this interface to set the media stream relay when calling [startChannelMediaRelay]{@link IAgoraRTCClient.startChannelMediaRelay} or [updateChannelMediaRelay]{@link IAgoraRTCClient.updateChannelMediaRelay}.
*
* ```javascript
* const configuration = AgoraRTC.createChannelMediaRelayConfiguration();
* configuration.setSrcChannelInfo({ channelName: "test", token: "xxx", uid: 12345 });
* configuration.addDestChannelInfo({ channelName: "test2", token: "xxx", uid: 23456 });
* ```
*/
export declare interface IChannelMediaRelayConfiguration {
/**
* Sets the information of the source channel.
*
* ```javascript
* const config = AgoraRTC.createChannelMediaRelayConfiguration();
* config.setSrcChannelInfo({ channelName: "test", token: "xxx", uid: 123456 });
* ```
* @param info The information of the source channel.
*/
setSrcChannelInfo(info: ChannelMediaRelayInfo): void;
/**
* Adds a destination channel.
*
* To relay a media stream across multiple channels, call this method as many times (to a maximum of four).
*
* ```javascript
* const config = AgoraRTC.createChannelMediaRelayConfiguration();
* config.addDestChannelInfo({ channelName: "test2", token: "xxx", uid: 23456 });
* config.addDestChannelInfo({ channelName: "test3", token: "xxx", uid: 23457 });
* ```
*
* @param info The information of the destination channel.
*/
addDestChannelInfo(info: ChannelMediaRelayInfo): void;
/**
* Removes the destination channel added through {@link addDestChannelInfo}.
* @param channelName The name of the destination channel to be removed.
*/
removeDestChannelInfo(channelName: string): void;
}
/**
* @ignore
*/
export declare interface IDataChannel extends EventEmitter {
readonly id: number;
readonly maxRetransmits: number | null;
readonly ordered: boolean;
readonly readyState: RTCDataChannelState;
readonly metadata: string;
getChannelId(): number;
getConfig(): IDataChannelConfig;
}
/**
* @ignore
*/
export declare interface IDataChannelConfig {
id: number;
ordered: boolean;
metadata: string;
}
/**
* @ignore
*/
declare interface IElectronNativeImage {
toDataURL(): string;
getSize(): {
width: number;
height: number;
};
resize(options: {
width: number;
}): IElectronNativeImage;
}
/**
* `LocalAudioTrack` is the basic interface for local audio tracks, providing main methods of local audio tracks.
*
* You can create a local audio track by calling [AgoraRTC.createCustomAudioTrack]{@link IAgoraRTC.createCustomAudioTrack}.
*
* The following interfaces are inherited from `LocalAudioTrack`:
* - [MicrophoneAudioTrack]{@link IMicrophoneAudioTrack}, the interface for the audio sampled by a local microphone, which adds several microphone-related functions.
* - [BufferSourceAudioTrack]{@link IBufferSourceAudioTrack}, the interface for the audio from a local audio file, which adds several audio-file-related functions.
*/
export declare interface ILocalAudioTrack extends ILocalTrack {
/**
* Sets the volume of a local audio track.
*
* @param volume The volume. The value ranges from 0 (mute) to 1000 (maximum). A value of 100 is the original volume。 The volume change may not be obvious to human ear. If local track has been published, setting volume will affect the volume heard by remote users.
*/
setVolume(volume: number): void;
/**
* Gets the audio level of a local audio track.
*
* @returns The audio level. The value range is [0,1]. 1 is the highest audio level.
* Usually a user with audio level above 0.6 is a speaking user.
*
*/
getVolumeLevel(): number;
/**
* Sets the callback for getting raw audio data in PCM format.
*
* After you successfully set the callback, the SDK constantly returns the audio frames of a local audio track in this callback by using [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer).
*
* > You can set the `frameSize` parameter to determine the frame size in each callback, which affects the interval between the callbacks. The larger the frame size, the longer the interval between them.
*
* ```js
* track.setAudioFrameCallback((buffer) => {
* for (let channel = 0; channel < buffer.numberOfChannels; channel += 1) {
* // Float32Array with PCM data
* const currentChannelData = buffer.getChannelData(channel);
* console.log("PCM data in channel", channel, currentChannelData);
* }
* }, 2048);
*
* // ....
* // Stop getting the raw audio data
* track.setAudioFrameCallback(null);
* ```
*
* @param audioFrameCallback The callback function for receiving the [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer) object. If you set `audioBufferCallback` as `null`, the SDK stops getting raw audio data.
* @param frameSize The number of samples of each audio channel that an `AudioBuffer` object contains. You can set `frameSize` as 256, 512, 1024, 2048, 4096, 8192, or 16384. The default value is 4096.
*/
setAudioFrameCallback(audioFrameCallback: null | ((buffer: AudioBuffer) => void), frameSize?: number): void;
/**
* Plays a local audio track.
*
* > When playing a audio track, you do not need to pass any DOM element.
*/
play(): void;
/**
* @since
* *4.1.0*
*
* > Note:
* > - As of v4.7.0, this method no longer takes effect. Use [IRemoteAudioTrack.setPlaybackDevice]{@link IRemoteAudioTrack.setPlaybackDevice} instead.
*
* Sets the playback device (speaker) for the remote audio stream.
*
* @param deviceId The device ID, which can be retrieved by calling [[getPlaybackDevices]].
*/
setPlaybackDevice(deviceId: string): Promise;
/**
* Gets the statistics of a local audio track.
*
* @deprecated from v4.1.0. Use [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats} and [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats} instead.
*/
getStats(): LocalAudioTrackStats;
/**
* Inserts a `Processor` to the local audio track.
*
* @param processor The `Processor` instance. Each extension has a corresponding type of `Processor`.
*
* @returns The `Processor` instance.
*/
pipe(processor: IAudioProcessor): IAudioProcessor;
/**
* @since
* *4.10.0*
*
* Removes the `Processor` inserted to the local audio track.
*/
unpipe(): void;
/**
* @since
* *4.10.0*
*
* The destination of the current processing pipeline on the local audio track.
*/
processorDestination: IAudioProcessor;
}
/**
* @ignore
*/
export declare interface ILocalDataChannel extends IDataChannel {
send(data: ArrayBuffer): void;
}
/**
* `LocalTrack` is the basic interface for local tracks, providing public methods for [LocalAudioTrack]{@link ILocalAudioTrack} and [LocalVideoTrack]{@link ILocalVideoTrack}.
*/
export declare interface ILocalTrack extends ITrack {
/**
* @param event The event name.
* @param listener See [track-updated]{@link event_track_updated}.
*/
on(event: "track-updated", listener: typeof event_track_updated): void;
/**
* @param event The event name.
* @param listener See [track-ended]{@link event_track_ended}.
*/
on(event: "track-ended", listener: typeof event_track_ended): void;
/**
* Adds an event listener.
* @param event The event name.
* @param listener See [ILocalTrack.transceiver-updated]{@link event_transceiver_updated}.
*/
on(event: "transceiver-updated", listener: typeof event_transceiver_updated): void;
/**
* @since
* *4.0.0*
*
* Enables/Disables the track.
*
* After a track is disabled, the SDK stops playing and publishing the track.
*
* > - Disabling a track does not trigger the [LocalTrack.on("track-ended")]{@link event_track_ended} event.
* > - If a track is published, disabling this track triggers the [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} event on the remote client, and re-enabling this track triggers the [user-published]{@link IAgoraRTCClient.event_user_published} event.
* > - Do not call `setEnabled` and `setMuted` together.
*
* @param enabled Whether to enable the track:
* - `true`: Enable the track.
* - `false`: Disable the track.
*/
setEnabled(enabled: boolean): Promise;
/**
* @deprecated from v4.1.0. Use [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats} and [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats} instead.
*
* Gets the statistics of a local track.
*
* > Note: When getting the statistics of a local video track, you cannot get the `encodeDelay` property on iOS.
*/
getStats(): LocalVideoTrackStats | LocalAudioTrackStats;
/**
* Gets the label of a local track.
*
* @return The label that the SDK returns may include:
* - The [MediaDeviceInfo.label](https://developer.mozilla.org/en-US/docs/Web/API/MediaDeviceInfo/label) property, if the track is created by calling `createMicrophoneAudioTrack` or `createCameraVideoTrack`.
* - The `sourceId` property, if the track is created by calling `createScreenVideoTrack`.
* - The [MediaStreamTrack.label](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/label) property, if the track is created by calling `createCustomAudioTrack` or `createCustomVideoTrack`.
*/
getTrackLabel(): string;
/**
* Sends or stops sending the media data of the track.
*
* @since
* *4.6.0*
*
* If the track is published, a successful call of `setMuted(true)` triggers the [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} event on the remote client, and a successful call of `setMuted(false)` triggers the [user-published]{@link IAgoraRTCClient.event_user_published} event.
*
* > - Calling `setMuted(true)` does not stop capturing audio or video and takes shorter time to take effect than [[setEnabled]]. For details, see [What are the differences between setEnabled and setMuted?](https://docs.agora.io/en/interactive-live-streaming/develop/product-workflow?platform=web#setenabled-and-setmuted).
* > - Do not call `setEnabled` and `setMuted` together.
*
* @param muted Whether to stop sending the media data of the track:
* - `true`: Stop sending the media data of the track.
* - `false`: Resume sending the media data of the track.
*/
setMuted(muted: boolean): Promise;
/**
* Closes a local track and releases the audio and video resources that it occupies.
*
* Once you close a local track, you can no longer reuse it.
*/
close(): void;
muted: boolean;
enabled: boolean;
}
/**
* `LocalVideoTrack` is the basic interface for local video tracks, providing the main methods for local video tracks.
*
* You can get create a local video track by calling [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack} or [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack} method.
*
* Inherited from `LocalVideoTrack`, [CameraVideoTrack]{@link ICameraVideoTrack} is an interface for the video captured by a local camera and adds several camera-related functions.
*/
export declare interface ILocalVideoTrack extends ILocalTrack {
/**
* @param event The event name.
* @param listener See [track-updated]{@link event_track_updated}.
*/
on(event: "track-updated", listener: typeof event_track_updated): void;
/**
* @param event The event name.
* @param listener See [track-ended]{@link event_track_ended}.
*/
on(event: "track-ended", listener: typeof event_track_ended): void;
/**
* @param event The event name.
* @param listener See [video-element-visible-status]{@link event_video_element_visible_status}.
*/
on(event: "video-element-visible-status", listener: typeof event_video_element_visible_status): void;
/**
* Adds an event listener.
* @param event The event name.
* @param listener See [ILocalTrack.transceiver-updated]{@link event_transceiver_updated}.
*/
on(event: "transceiver-updated", listener: typeof event_transceiver_updated): void;
/**
* Plays a remote video track on the web page.
*
* @param element Specifies a DOM element. The SDK will create a `` element under the specified DOM element to play the video track. You can specify a DOM element in either of the following ways:
* - `string`: Specify the ID of the DOM element.
* - `HTMLElement`: Pass a DOM object.
* @param config Sets the playback configurations, such as display mode and mirror mode. See [[VideoPlayerConfig]]. By default, the SDK enables mirror mode for a local video track.
*/
play(element: string | HTMLElement, config?: VideoPlayerConfig): void;
/**
* Gets the statistics of a local video track.
*
* @deprecated from v4.1.0. Use [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats} and [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats} instead.
*/
getStats(): LocalVideoTrackStats;
/**
* @since
* *4.19.0*
*
* Clones the current video track to create a new video track.
*
* In scenarios such as video conferencing and online education, you can use this method to display the same video stream with two sets of display parameters, including resolution, frame rate, and bitrate. For example, you can have one display set to high-definition and the other to low-definition.
*
* @param config The encoding configuration for the new video track. You can pass in the SDK's built-in encoding configuration through [[VideoEncoderConfiguration]], or customize the video encoding configuration by passing in a [[VideoEncoderConfigurationPreset]].
* @param cloneTrack Whether to clone the current track. Default is `true`.
* @returns The newly generated video track.
*/
clone(config?: VideoEncoderConfiguration | VideoEncoderConfigurationPreset, cloneTrack?: boolean): ILocalVideoTrack;
/**
* @ignore
*
* Enables/Disables image enhancement and sets the options.
*
* > Notes:
* > - Agora is planning to sunset this built-in image enhancement feature.
* You can use the upgraded Image Enhancement Extension instead. For details, see
* [Use the Image Enhancement Extension](https://docs.agora.io/en/Video/beauty_effect_web_ng?platform=Web).
* > - This method supports the following browsers:
* > - Safari 12 or later.
* > - Chrome 65 or later.
* > - Firefox 70.0.1 or later.
* > - This function is not supported on mobile devices.
* > - If you enable dual-stream mode, the image enhancement options only apply to the high-quality video stream.
*
* @param enabled Whether to enable image enhancement:
* - `true`: Enable image enhancement.
* - `false`: Disable image enhancement.
* @param options Sets image enhancement options. See [[BeautyEffectOptions]].
*/
setBeautyEffect(enabled: boolean, options?: BeautyEffectOptions): Promise;
/**
* @since
* *4.1.0*
*
* Gets the data of the video frame being rendered.
*
* > You should call this method after calling [[play]]. Otherwise, the method call returns null.
*
* @returns An `ImageData` object that stores RGBA data. `ImageData` is a web API supported by the browser. For details, see [ImageData](https://developer.mozilla.org/en-US/docs/Web/API/ImageData).
*/
getCurrentFrameData(): ImageData;
/**
*
* @ignore
*/
getCurrentFrameImage(imageType: string, quality: number): Promise;
/**
* @since
* *4.2.0*
*
* Sets the video transmission optimization mode.
*
* You can call this method during a video call, a live streaming or screen sharing to dynamically change the optimization mode. For example, during the screen sharing, before you change the shared content from text to video, you can change the optimization mode from `"detail"` to `"motion"` to ensure smoothness in poor network conditions.
*
* > Note: This method supports Chrome only.
*
* @param mode The video transmission optimization mode:
* - `"balanced"`: Uses the default optimization mode.
* - For a screen-sharing video track, the default transmission optimization strategy is to prioritizes clarity.
* - For the other types of video tracks, the SDK may reduce the frame rate or the sending resolution in poor network conditions.
* - `"detail"`: Prioritizes video quality.
* - The SDK ensures high-quality images by automatically calculating a minimum bitrate based on the capturing resolution and frame rate. No matter how poor the network condition is, the sending bitrate will never be lower than the minimum value.
* - In most cases, the SDK does not reduce the sending resolution, but may reduce the frame rate.
* - `"motion"`: Since 4.21.0, the SDK prioritizes video smoothness.
* - In poor network conditions, the SDK reduces the sending bitrate to minimize video freezes.
* - In most cases, the SDK does not reduce the frame rate, but may reduce the sending resolution.
*/
setOptimizationMode(mode: "balanced" | "motion" | "detail"): Promise;
/**
* @since
* *4.8.0*
*
* Gets the visibility of the `` HTML tag.
*
* After you call `localVideoTrack.play`, the SDK creates an [``](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video) tag for playing video tracks. When `localVideoTrack.isPlaying` is `true` but you cannot see any video, call this method to check whether the `` tag is visible or not and learn the reason when the `` tag is invisible.
*
* @returns The [[CheckVideoVisibleResult]] object. If this method returns `undefined`, it may be due to the following reasons:
* - `localVideoTrack.isPlaying` is `false`.
* - The `` tag does not exist.
* - The `` tag is not created by calling the `play` method.
*/
getVideoElementVisibleStatus(): CheckVideoVisibleResult | undefined;
/**
* Inserts a `Processor` to the local video track.
*
* @param processor The `Processor` instance. Each extension has a corresponding type of `Processor`.
*
* @returns The `Processor` instance.
*/
pipe(processor: IBaseProcessor): IBaseProcessor;
/**
* @since
* *4.10.0*
*
* Removes the `Processor` inserted to the local video track.
*/
unpipe(): void;
/**
* @since
* *4.10.0*
*
* The destination of the current processing pipeline on the local video track.
*/
processorDestination: IBaseProcessor;
/**
* @since
* *4.17.0*
*
* Replaces the local video track.
*
* You can call this method before or after publishing the local video stream:
* - If you call this method before publishing, the new video track is played locally.
* - If you call this method after publishing, the new video track is received by the remote user.
*
* The new video track can be retrieved by the {@link ILocalVideoTrack.getMediaStreamTrack} or `mediaStream.getVideoTracks` method. You can choose to either stop or retain the replaced track.
*
* > Notes:
* > - This method supports Chrome 65+, Safari, and the latest Firefox.
* > - This method might not take effect on some mobile devices.
* > - Agora recommends switching between video tracks that are of the same type and have the same encoder configurations for the following reasons:
* > - If the video track types are different, such as replacing a `CameraVideoTrack` object with a `ScreenVideoTrack` object, the video is flipped horizontally due to the mirror effect enabled by default on `CameraVideoTrack` (see {@link VideoPlayerConfig.mirror} for details).
* > - If the encoder configurations (`encoderConfig`) are different, the actual sending resolution or frame rate might be different from what you set.
* > - The subscriber will not be notified if the track gets replaced.
* > - To switch the media input devices, Agora recommends using {@link ICameraVideoTrack.setDevice}.
*
* **Example**
* ```javascript
* // Current video track
* const localVideoTrack = await AgoraRTC.createCameraVideoTrack();
* // Gets the new video track (option one)
* const newTrack = localVideoTrack.getMediaStreamTrack();
* // Gets the new video track (option two)
* const newTrack = await navigator.mediaDevices.getUserMedia({audio: true, video: true}).then(mediaStream => mediaStream.getVideoTracks()[0]);
* // Replaces and stops the current video track
* await localVideoTrack.replaceTrack(newTrack, true);
* ```
* @param track The new video track, which is a [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
* @param stopOldTrack Whether to stop the old video track:
* - true: Stops the old video track.
* - false: Retains the old video track.
*/
replaceTrack(track: MediaStreamTrack, stopOldTrack: boolean): Promise;
/**
* Sets the video encoder configurations, such as resolution, frame rate, and bitrate.
*
* @param config The video encoder configurations. You can pass either [[VideoEncoderConfigurationPreset]] or a customized [[VideoEncoderConfiguration]] object.
*/
setEncoderConfiguration(config: VideoEncoderConfiguration | VideoEncoderConfigurationPreset): Promise;
/**
* Add the SEI data to the H.264 video stream.
*
* @param config SEI data.
*/
sendSeiData(sei: Uint8Array): void;
}
/**
* Configuration for the video moderation service. Used in the {@link setImageModeration} method.
*/
declare interface ImageModerationConfiguration {
/**
* Interval for taking video screenshots (ms), with a minimum value of `1000`.
*/
interval: number;
/**
* Additional information, with a maximum length of 1024 bytes.
*
* The SDK sends the screenshots and additional information on the video content to the Agora server. Once the video screenshot and upload process is completed, the Agora server sends the additional information and the callback notification to your server.
*/
extraInfo?: string;
/**
* @ignore
*/
vendor?: string;
}
/**
* Connection state between the SDK and the third-party video moderation service.
*/
declare enum ImageModerationConnectionState {
/**
* The SDK is connecting to the third-party service.
*/
CONNECTING = "CONNECTING",
/**
* The SDK is reconnecting to the third-party service.
*/
RECONNECTING = "RECONNECTING",
/**
* The SDK is connected to the third-party service.
*/
CONNECTED = "CONNECTED",
/**
* The SDK has disconnected from the third-party service.
*/
CLOSED = "CLOSED"
}
export declare interface ImageTypedData {
buffer: Uint8Array;
width: number;
height: number;
}
/**
* Inherited from [LocalAudioTrack]{@link ILocalAudioTrack}, `MicrophoneAudioTrack` is an interface for the audio sampled by a local microphone and adds several functions such as switching devices.
*
* You can create a local microphone audio track by calling [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack}.
*/
export declare interface IMicrophoneAudioTrack extends ILocalAudioTrack {
/**
* Sets the device for sampling audio.
*
* > You can call the method either before or after publishing an audio track.
*
* @param deviceId The ID of the specified device. You can get the `deviceId` by calling [AgoraRTC.getMicrophones]{@link IAgoraRTC.getMicrophones}.
*/
setDevice(deviceId: string): Promise;
/**
* @since
* *4.0.0*
*
* Enables/Disables the track.
*
* After a track is disabled, the SDK stops playing and publishing the track.
*
* > - Disabling a track does not trigger the [LocalTrack.on("track-ended")]{@link event_track_ended} event.
* > - If a track is published, disabling this track triggers the [user-unpublished]{@link IAgoraRTCClient.event_user_unpublished} event on the remote client, and re-enabling this track triggers the [user-published]{@link IAgoraRTCClient.event_user_published} event.
* > - Do not call `setEnabled` and `setMuted` together.
*
* @param enabled Whether to enable the track:
* - `true`: Enable the track.
* - `false`: Disable the track.
*/
setEnabled(enabled: boolean): Promise;
}
/**
* @ignore
*/
export declare interface InspectConfiguration {
interval: number;
ossFilePrefix?: string;
extraInfo?: string;
inspectType?: ("supervise" | "moderation")[];
}
/**
* @ignore
*/
declare enum InspectState {
CONNECTING = "CONNECTING",
RECONNECTING = "RECONNECTING",
CONNECTED = "CONNECTED",
CLOSED = "CLOSED"
}
/**
* `RemoteAudioTrack` is the basic interface for the remote audio track.
*
* You can get create a remote audio track by the [AgoraRTCRemoteUser.audioTrack]{@link IAgoraRTCRemoteUser.audioTrack} object after calling [subscribe]{@link IAgoraRTCClient.subscribe}.
*/
export declare interface IRemoteAudioTrack extends IRemoteTrack {
/**
* Gets the statistics of a remote audio track.
*
* @return An [[RemoteAudioTrackStats]] object.
*/
getStats(): RemoteAudioTrackStats;
/**
* Plays a remote audio track.
*
* > When playing the audio track, you do not need to pass any DOM element.
*/
play(): void;
/**
* @since
* *4.1.0*
*
* Sets the audio playback device, for example, the speaker.
*
* > This method supports Chrome and Edge on desktop devices only. Other browsers throw a `NOT_SUPPORTED` error when calling this method.
* @param deviceId Device ID, which can be retrieved by calling [[getPlaybackDevices]].
*/
setPlaybackDevice(deviceId: string): Promise;
/**
* Sets the callback for getting raw audio data in PCM format.
*
* After you successfully set the callback, the SDK constantly returns the audio frames of a remote audio track in this callback by using [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer).
*
* > You can set the `frameSize` parameter to determine the frame size in each callback, which affects the interval between the callbacks. The larger the frame size, the longer the interval between them.
*
* ```js
* track.setAudioFrameCallback((buffer) => {
* for (let channel = 0; channel < buffer.numberOfChannels; channel += 1) {
* // Float32Array with PCM data
* const currentChannelData = buffer.getChannelData(channel);
* console.log("PCM data in channel", channel, currentChannelData);
* }
* }, 2048);
*
* // ....
* // Stop getting the raw audio data
* track.setAudioFrameCallback(null);
* ```
*
* @param audioFrameCallback The callback function for receiving the [AudioBuffer](https://developer.mozilla.org/en-US/docs/Web/API/AudioBuffer) object. If you set `audioBufferCallback` as `null`, the SDK stops getting raw audio data.
* @param frameSize The number of samples of each audio channel that an `AudioBuffer` object contains. You can set `frameSize` as 256, 512, 1024, 2048, 4096, 8192, or 16384. The default value is 4096.
*/
setAudioFrameCallback(audioFrameCallback: null | ((buffer: AudioBuffer) => void), frameSize?: number): void;
/**
* Sets the volume of a remote audio track.
*
* @param volume The volume. The value ranges from 0 (mute) to 100 (maximum). A value of 100 is the current volume.
*/
setVolume(volume: number): void;
/**
* Gets the audio level of a remote audio track.
*
* @returns The audio level. The value range is [0,1]. 1 is the highest audio level.
* Usually a user with audio level above 0.6 is a speaking user.
*/
getVolumeLevel(): number;
/**
* @since
* *4.13.0*
*
* Inserts a `Processor` to the remote audio track.
*
* @param processor The `Processor` instance. Each extension has a corresponding type of `Processor`.
*
* @returns The `Processor` instance.
*/
pipe(processor: IAudioProcessor): IAudioProcessor;
/**
* @since
* *4.13.0*
*
* Removes the `Processor` inserted to the remote audio track.
*/
unpipe(): void;
/**
* @since
* *4.13.0*
*
* The destination of the current processing pipeline on the remote audio track.
*/
processorDestination: IAudioProcessor;
}
/**
* @ignore
*/
export declare interface IRemoteDataChannel extends IDataChannel {
}
/**
* `RemoteTrack` is the basic interface for remote tracks, providing public methods for [RemoteAudioTrack]{@link IRemoteAudioTrack} and [RemoteVideoTrack]{@link IRemoteVideoTrack}.
*/
export declare interface IRemoteTrack extends ITrack {
/**
* @param event The event name.
* @param listener See [track-updated]{@link event_track_updated}.
*/
on(event: "track-updated", listener: typeof event_track_updated): void;
/**
* @param event The event name.
* @param listener See [first-frame-decoded]{@link event_first_frame_decoded}.
*/
on(event: "first-frame-decoded", listener: typeof event_first_frame_decoded): void;
/**
* Adds an event listener.
* @param event The event name.
* @param listener See [IRemoteTrack.transceiver-updated]{@link event_transceiver_updated_2}.
*/
on(event: "transceiver-updated", listener: typeof event_transceiver_updated_2): void;
/**
* Gets the `uid` of the remote user who publishes the remote track.
*
* @return The `uid` of the remote user.
*/
getUserId(): UID;
/**
* Gets the statistics of a remote track.
*
* @deprecated from v4.1.0. Use [AgoraRTCClient.getRemoteVideoStats]{@link IAgoraRTCClient.getRemoteVideoStats} and [AgoraRTCClient.getRemoteAudioStats]{@link IAgoraRTCClient.getRemoteAudioStats} instead.
* @return An [[RemoteAudioTrackStats]] or [[RemoteVideoTrackStats]] object.
*/
getStats(): RemoteAudioTrackStats | RemoteVideoTrackStats;
}
/**
* `RemoteVideoTrack` is the basic interface for the remote video track.
*
* You can get create a remote video track by the [AgoraRTCRemoteUser.videoTrack]{@link IAgoraRTCRemoteUser.videoTrack} object after calling [subscribe]{@link IAgoraRTCClient.subscribe}.
*/
export declare interface IRemoteVideoTrack extends IRemoteTrack {
/**
* @param event The event name.
* @param listener See [track-updated]{@link event_track_updated}.
*/
on(event: "track-updated", listener: typeof event_track_updated): void;
/**
* @param event The event name.
* @param listener See [video-state-changed]{@link event_video_state_changed}.
*/
on(event: "video-state-changed", listener: typeof event_video_state_changed): void;
/**
* @param event The event name.
* @param listener See [first-frame-decoded]{@link event_first_frame_decoded}.
*/
on(event: "first-frame-decoded", listener: typeof event_first_frame_decoded): void;
/**
* @param event The event name.
* @param listener See [video-element-visible-status]{@link event_video_element_visible_status_2}.
*/
on(event: "video-element-visible-status", listener: typeof event_video_element_visible_status_2): void;
/**
* Adds an event listener.
* @param event The event name.
* @param listener See [IRemoteTrack.transceiver-updated]{@link event_transceiver_updated_2}.
*/
on(event: "transceiver-updated", listener: typeof event_transceiver_updated_2): void;
/**
* Adds an event listener.
* @param event The event name.
* @param listener See [ILocalTrack.sei-received]{@link event_sei_received}.
*/
on(event: "sei-received", listener: typeof event_sei_received): void;
/**
* Gets the statistics of a remote video track.
*
* @return An [[RemoteVideoTrackStats]] object。
*/
getStats(): RemoteVideoTrackStats;
/**
* Plays a remote video track on the web page.
*
* @param element Specifies a DOM element. The SDK will create a `` element under the specified DOM element to play the video track. You can specify a DOM element in either of following ways:
* - `string`: Specify the ID of the DOM element.
* - `HTMLElement`: Pass a DOM object.
* @param config Sets the playback configurations, such as display mode and mirror mode. See [[VideoPlayerConfig]]. By default, the SDK enables mirror mode for a local video track.
*/
play(element: string | HTMLElement, config?: VideoPlayerConfig): void;
/**
* @since
* *4.1.0*
*
* Gets the data of the video frame being rendered.
*
* > You should call this method after calling [[play]]. Otherwise, the method call returns null.
*
* @returns An `ImageData` object that stores RGBA data. `ImageData` is a web API supported by the browser. For details, see [ImageData](https://developer.mozilla.org/en-US/docs/Web/API/ImageData).
*/
getCurrentFrameData(): ImageData;
/**
* @since
* *4.8.0*
*
* Gets the visibility of the `` HTML tag.
*
* After you call `remoteVideoTrack.play`, the SDK creates an [``](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/video) tag for playing video tracks. When `remoteVideoTrack.isPlaying` is `true` but you cannot see any video, call this method to check whether the `` tag is visible or not and learn the reason when the `` tag is invisible.
*
* @returns The [[CheckVideoVisibleResult]] object. If this method returns `undefined`, it may be due to the following reasons:
* - `remoteVideoTrack.isPlaying` is `false`.
* - The `` tag does not exist.
* - The `` tag is not created by calling the `play` method.
*/
getVideoElementVisibleStatus(): CheckVideoVisibleResult | undefined;
/**
* @since
* *4.13.0*
*
* Inserts a `Processor` to the remote video track.
*
* @param processor The `Processor` instance. Each extension has a corresponding type of `Processor`.
*
* @returns The `Processor` instance.
*/
pipe(processor: IBaseProcessor): IBaseProcessor;
/**
* @since
* *4.13.0*
*
* Removes the `Processor` inserted to the remote video track.
*/
unpipe(): void;
/**
* @since
* *4.13.0*
*
* The destination of the current processing pipeline on the remote video track.
*/
processorDestination: IBaseProcessor;
}
export declare interface ITrack extends EventEmitter {
/**
* The type of a media track:
* - `"audio"`: Audio track.
* - `"video"`: Video track.
*/
trackMediaType: "audio" | "video";
/**
* Whether a media track is playing on the webpage:
* - `true`: The media track is playing on the webpage.
* - `false`: The media track is not playing on the webpage.
*/
isPlaying: boolean;
/**
* Gets the ID of a media track, a unique identifier generated by the SDK.
*
* @return The media track ID.
*/
getTrackId(): string;
/**
* Gets an [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
*
* @return An [MediaStreamTrack](https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack) object.
*/
getMediaStreamTrack(): MediaStreamTrack;
/**
* Gets the [RTCRtpTransceiver](https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver) instance of the current track.
*
* This method is currently mainly used for end-to-end encryption of video streams (Beta).
*
* > If the SDK experiences a reconnection, the `RTCRtpTransceiver` instance corresponding to the current track might change. You can obtain the new `RTCRtpTransceiver` instance through the following callbacks:
* > - For a local track: [ILocalTrack.transceiver-updated]{@link event_transceiver_updated}
* > - For a remote track: [IRemoteTrack.transceiver-updated]{@link event_transceiver_updated_2}
*
* @param type The type of the video stream. See {@link StreamType}.
* @returns The [RTCRtpTransceiver](https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver) instance of the current track.
*/
getRTCRtpTransceiver(type?: StreamType): RTCRtpTransceiver | undefined;
/**
* Plays a media track on the webpage.
*
* @param element Specifies a DOM element. The SDK will create a `` element under the specified DOM element to play the video track. You can specify a DOM element in either of following ways:
* - `string`: Specify the ID of the DOM element.
* - `HTMLElement`: Pass a DOM object.
*/
play(element?: string | HTMLElement): void;
/**
* Stops playing the media track.
*/
stop(): void;
}
/**
* The configurations for CDN live stream transcoding. To be used when you call [setLiveTranscoding]{@link IAgoraRTCClient.setLiveTranscoding}.
*/
export declare interface LiveStreamingTranscodingConfig {
/**
* The audio bitrate (Kbps) of the CDN live stream.
*
* A positive integer. The default value is 48, and the highest value is 128.
*/
audioBitrate?: number;
/**
* The number of audio channels for the CDN live stream.
*
* Agora recommends choosing 1 (mono), or 2 (stereo) audio channels. Special players are required if you choose 3, 4, or 5.
*
* - 1: (Default) Mono
* - 2: Stereo
* - 3: Three audio channels
* - 4: Four audio channels
* - 5: Five audio channels
*/
audioChannels?: 1 | 2 | 3 | 4 | 5;
/**
* The audio sampling rate:
*
* - 32000: 32 kHz
* - 44100: 44.1 kHz
* - 48000: (Default) 48 kHz
*/
audioSampleRate?: 32000 | 44100 | 48000;
/**
* The background color in RGB hex.
*
* Value only. Do not include a preceding #. The default value is 0x000000.
*/
backgroundColor?: number;
/**
* The height of the video in pixels.
*
* A positive integer, the default value is 360.
*
* - When pushing video streams to the CDN, ensure that `height` is at least 64; otherwise, the Agora server adjusts the value to 64.
* - When pushing audio streams to the CDN, set `width` and `height` as 0.
*/
height?: number;
/**
* The width of the video in pixels.
*
* A positive integer, the default value is 640.
*
* - When pushing video streams to the CDN, ensure that `width` is at least 64; otherwise, the Agora server adjusts the value to 64.
* - When pushing audio streams to the CDN, set `width` and `height` as 0.
*/
width?: number;
/**
* @ignore
*/
lowLatency?: boolean;
/**
* The bitrate (Kbps) of the output video stream.
*
* The default value is 400.
*/
videoBitrate?: number;
/**
* The video codec profile type.
*
* Set it as `66`, `77`, or `100` (default). If you set this parameter to any other value, the Agora server adjusts it to the default value `100`.
*
* - `66`: Baseline video codec profile. Generally used for video calls on mobile phones.
* - `77`: Main video codec profile. Generally used for mainstream electronic devices, such as MP4 players, portable video players, PSP, and iPads.
* - `100`: (Default) High video codec profile. Generally used for high-resolution broadcasts or television.
*/
videoCodecProfile?: 66 | 77 | 100;
/**
* The video frame rate (fps) of the CDN live stream.
*
* The default value is 15. The Agora server adjusts any value over 30 to 30.
*/
videoFrameRate?: number;
/**
* The video GOP in frames.
*
* The default value is 30.
*/
videoGop?: number;
/**
* @deprecated
*
* Watermark images for the CDN live stream.
*/
images?: LiveStreamingTranscodingImage[];
/**
* Watermark image for the CDN live stream.
*/
watermark?: LiveStreamingTranscodingImage;
/**
* Background image for the CDN live stream.
*/
backgroundImage?: LiveStreamingTranscodingImage;
/**
* Manages the user layout configuration in the CDN live streaming.
*
* Agora supports a maximum of 17 transcoding users in a CDN streaming channel.
*/
transcodingUsers?: LiveStreamingTranscodingUser[];
userConfigExtraInfo?: string;
}
/**
* Configurations for the watermark and background images to put on top of the video in [LiveStreamingTranscodingConfig]{@link LiveStreamingTranscodingConfig}.
*/
export declare interface LiveStreamingTranscodingImage {
/**
* The HTTP/HTTPS URL address of the image on the video.
*
* Supports online PNG only.
*/
url: string;
/**
* The horizontal distance (pixel) between the image's top-left corner and the video's top-left corner.
*
* The default value is `0`.
*/
x?: number;
/**
* The vertical distance (pixel) between the image's top-left corner and the video's top-left corner.
*
* The default value is `0`.
*/
y?: number;
/**
* The width (pixel) of the image.
*
* The default value is `160`.
*/
width?: number;
/**
* The height (pixel) of the image.
*
* The default value is `160`.
*/
height?: number;
/**
* The transparency level of the image.
*
* The value range is [0.0,1.0]:
* - 0.0: Completely transparent.
* - 1.0: (Default) Opaque.
*/
alpha?: number;
}
/**
* Manages the user layout configuration in [LiveStreamingTranscodingConfig]{@link LiveStreamingTranscodingConfig}.
*/
export declare interface LiveStreamingTranscodingUser {
/**
* The transparency level of the user's video.
*
* The value ranges between 0.0 and 1.0:
*
* - 0.0: Completely transparent.
* - 1.0: (Default) Opaque.
*/
alpha?: number;
/**
* The height of the video.
*
* The default value is 640.
*/
height?: number;
/**
* The user ID of the CDN live host.
*/
uid: UID;
/**
* The width of the video.
*
* The default value is 360.
*/
width?: number;
/**
* The position of the top-left corner of the video on the horizontal axis.
*
* The default value is 0.
*/
x?: number;
/**
* The position of the top-left corner of the video on the vertical axis.
*
* The default value is 0.
*/
y?: number;
/**
* The layer index of the video frame.
*
* An integer. The value range is [0,100].
*
* - 0: (Default) Bottom layer.
* - 100: Top layer.
*/
zOrder?: number;
/**
* The audio channel ranging between 0 and 5. The default value is 0.
* - 0: (default) Supports dual channels. Depends on the upstream of the broadcaster.
* - 1: The audio stream of the broadcaster uses the FL audio channel. If the broadcaster’s upstream uses multiple audio channels, these channels are mixed into mono first.
* - 2: The audio stream of the broadcaster uses the FC audio channel. If the broadcaster’s upstream uses multiple audio channels, these channels are mixed into mono first.
* - 3: The audio stream of the broadcaster uses the FR audio channel. If the broadcaster’s upstream uses multiple audio channels, these channels are mixed into mono first.
* - 4: The audio stream of the broadcaster uses the BL audio channel. If the broadcaster’s upstream uses multiple audio channels, these channels are mixed into mono first.
* - 5: The audio stream of the broadcaster uses the BR audio channel. If the broadcaster’s upstream uses multiple audio channels, these channels are mixed into mono first.
*/
audioChannel?: number;
}
/**
* @ignore
*
* -----暂不对外的备用内容-----
* - `cds`:(可选)配置下发服务。默认关闭。包含可选属性 `hostname` 和 `port`。
*
* 默认值
* |`cds.hostname`| `accessPoints` 所包含的 hostname 列表|
* |`cds.port`|443|
*/
export declare type LocalAccessPointConfig = {
[serve in "log" | "report" | "cds"]?: {
hostname?: string[];
port?: number;
};
} & {
accessPoints: {
serverList: string[];
domain: string;
port?: number;
};
};
/**
* Information of the local audio track, which can be retrieved by calling [AgoraRTCClient.getLocalAudioStats]{@link IAgoraRTCClient.getLocalAudioStats}.
*/
export declare interface LocalAudioTrackStats {
/**
* The audio codec.
*
* - `"opus"`: The audio codec is OPUS。
* - `"aac"`: The audio codec is AAC。
* - `"pcmu"`: Reserved for future use.
* - `"pcma"`: Reserved for future use.
* - `"g722"`: Reserved for future use.
*
* > Firefox does not support this property.
*/
codecType?: "opus" | "aac" | "PCMU" | "PCMA" | "G722";
/**
* The energy level of the sent audio.
*
* The value range is [0,32767].
*
* > This value is retrieved by calling WebRTC-Stats and may not be up-to-date. To get the real-time sound volume, call [LocalAudioTrack.getVolumeLevel]{@link ILocalAudioTrack.getVolumeLevel}.
*/
sendVolumeLevel: number;
/**
* The bitrate (bps) of the sent audio.
*/
sendBitrate: number;
/**
* The total bytes of the sent audio.
*/
sendBytes: number;
/**
* The total packets of the sent audio.
*/
sendPackets: number;
/**
* The total number of lost audio packets that were sent.
*
* > You can not get this property on Safari.
*/
sendPacketsLost: number;
/**
* Jitter (ms) of the audio packets that were sent.
*/
sendJitterMs: number;
/**
* Round-trip time delay (ms) of the audio packets that were sent.
*/
sendRttMs: number;
/**
* The packet loss rate of the sent audio in 400ms.
*/
currentPacketLossRate: number;
}
/**
* Information of the local video track, which can be retrieved by calling [AgoraRTCClient.getLocalVideoStats]{@link IAgoraRTCClient.getLocalVideoStats}.
*/
export declare interface LocalVideoTrackStats {
/**
* The video codec.
*
* - `"H264"`: The video codec is H.264.
* - `"VP8"`: The video codec is VP8.
* - `"VP9"`: The video codec is VP9.
* - `"AV1X"`: Reserved for future use.
* - `"AV1"`: The video codec is AV1.
*
* > You can not get this property on Firefox.
*/
codecType?: "H264" | "H265" | "VP8" | "VP9" | "AV1X" | "AV1";
/**
* The total bytes of the sent video.
*/
sendBytes: number;
/**
* The frame rate (fps) of the sent video.
*
* > You can not get this property on Firefox.
*/
sendFrameRate?: number;
/**
* The frame rate (fps) of the captured video.
*
* > You can not get this property on Safari and Firefox.
*/
captureFrameRate?: number;
/**
* The total packets of the sent video.
*/
sendPackets: number;
/**
* The total number of lost video packets that were sent.
*
* > - You can not get this property on Safari.
* > - This property is inaccurate on Firefox.
*/
sendPacketsLost: number;
/**
* Jitter (ms) of the video packets that were sent.
*/
sendJitterMs: number;
/**
* Round-trip time delay (ms) of the video packets that were sent.
*/
sendRttMs: number;
/**
* The resolution height (pixel) of the sent video.
*/
sendResolutionHeight: number;
/**
* The resolution width (pixel) of the sent video.
*/
sendResolutionWidth: number;
/**
* The resolution height (pixel) of the captured video.
*/
captureResolutionHeight: number;
/**
* The resolution width (pixel) of the captured video.
*/
captureResolutionWidth: number;
/**
* The time (ms) required for encoding the captured video.
*/
encodeDelay?: number;
/**
* The bitrate (bps) of the sent video.
*/
sendBitrate: number;
/**
* The target bitrate (bps) of the sent video, namely the bitrate set in {@link VideoEncoderConfiguration}.
*/
targetSendBitrate: number;
/**
* The total duration of the sent video in seconds.
*/
totalDuration: number;
/**
* The total freeze time of the encoded video in seconds.
*/
totalFreezeTime: number;
/**
* The packet loss rate of the sent video in 400ms.
*/
currentPacketLossRate: number;
}
/**
* The video profile of the low-quality video stream. Set the the video profile of the low-quality video stream when calling [setLowStreamParameter]{@link IAgoraRTCClient.setLowStreamParameter}.
*/
export declare interface LowStreamParameter {
/**
* Width of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
width: ConstrainULong;
/**
* Height of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
height: ConstrainULong;
/**
* Frame rate of the video (fps).
*
* You can pass a `number`, or a constraint such as `{ max: 30, min: 5 }`.
*
* For details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
framerate?: ConstrainULong;
/**
* Bitrate of the video (Kbps).
*/
bitrate?: number;
}
/**
* Configurations for the audio track from the audio captured by a microphone. Set these configurations when calling [AgoraRTC.createMicrophoneAudioTrack]{@link IAgoraRTC.createMicrophoneAudioTrack}.
*/
export declare interface MicrophoneAudioTrackInitConfig {
/**
* The audio encoder configurations.
*
* You can set the audio encoder configurations in either of the following ways:
* - Pass the preset audio encoder configurations by using [[AudioEncoderConfigurationPreset]].
* - Pass your customized audio encoder configurations by using [[AudioEncoderConfiguration]].
*
* > Firefox does not support setting the audio encoding rate.
*/
encoderConfig?: AudioEncoderConfiguration | AudioEncoderConfigurationPreset;
/**
* Whether to enable acoustic echo cancellation:
* - `true`: Enable acoustic echo cancellation.
* - `false`: Do not enable acoustic echo cancellation.
*/
AEC?: boolean;
/**
* Whether to enable audio gain control:
* - `true`: Enable audio gain control.
* - `false`: Do not enable audio gain control.
*/
AGC?: boolean;
/**
* Whether to enable automatic noise suppression:
* - `true`: Enable automatic noise suppression.
* - `false`: Do not automatic noise suppression.
*/
ANS?: boolean;
/**
* @ignore
*/
DTX?: boolean;
/**
* Specifies the microphone ID.
*
* You can get a list of the available microphones by calling [AgoraRTC.getMicrophones]{@link IAgoraRTC.getMicrophones}.
*/
microphoneId?: string;
/**
* @ignore
* Specifies whether or not audio track pass through WebAudio.
*/
bypassWebAudio?: boolean;
}
/**
* The last-mile network quality.
*
* Last mile refers to the connection between the local device and Agora edge server.
*
* - After the local user joins the channel, the SDK triggers the [AgoraRTCClient.on("network-quality")]{@link IAgoraRTCClient.event_network_quality} callback once every two seconds and provides the uplink and downlink last-mile network conditions of the user through this interface.
* - You can call [AgoraRTCClient.getRemoteNetworkQuality]{@link IAgoraRTCClient.getRemoteNetworkQuality} to get the network quality of all remote users to whom the local user subscribes.
*
* > The returned network quality is a relative value and is for reference only.
*/
export declare interface NetworkQuality {
/**
* The uplink network quality.
*
* It is calculated based on the uplink transmission bitrate, uplink packet loss rate, RTT (round-trip time) and jitter.
*
* - 0: The quality is unknown.
* - 1: The quality is excellent.
* - 2: The quality is good, but the bitrate is less than optimal.
* - 3: Users experience slightly impaired communication.
* - 4: Users can communicate with each other, but not very smoothly.
* - 5: The quality is so poor that users can barely communicate.
* - 6: The network is disconnected and users cannot communicate.
*/
uplinkNetworkQuality: 0 | 1 | 2 | 3 | 4 | 5 | 6;
/**
* The downlink network quality.
*
* It is calculated based on the uplink transmission bitrate, uplink packet loss rate, RTT (round-trip time) and jitter.
*
* - 0: The quality is unknown.
* - 1: The quality is excellent.
* - 2: The quality is good, but the bitrate is less than optimal.
* - 3: Users experience slightly impaired communication.
* - 4: Users can communicate with each other, but not very smoothly.
* - 5: The quality is so poor that users can barely communicate.
* - 6: The network is disconnected and users cannot communicate.
*/
downlinkNetworkQuality: 0 | 1 | 2 | 3 | 4 | 5 | 6;
}
declare type OptimizationMode = "motion" | "detail";
/**
* Statistics of the remote audio track, such as connection and transmission statistics, which can be retrieved by calling [AgoraRTCClient.getRemoteAudioStats]{@link IAgoraRTCClient.getRemoteAudioStats}.
*/
export declare interface RemoteAudioTrackStats {
/**
* Transmission delay (ms).
*
* The delay (ms) between a remote client sending the audio and the local client receiving the audio.
*/
transportDelay: number;
/**
* The audio codec.
*
* - `"opus"`: The audio codec is OPUS。
* - `"aac"`: The audio codec is AAC。
* - `"pcmu"`: Reserved for future use.
* - `"pcma"`: Reserved for future use.
* - `"g722"`: Reserved for future use.
*
* > Firefox does not support this property.
*/
codecType?: "opus" | "aac" | "PCMU" | "PCMA" | "G722";
/**
* End-to-end delay (ms).
*
* The delay (ms) between a remote client sampling the audio and the local client playing the audio.
* This delay does not include the time spent in encoding at the remote client and the time spent in decoding at the local client.
*/
end2EndDelay: number;
/**
* The bitrate (bps) of the received audio.
*/
receiveBitrate: number;
/**
* The energy level of the received audio.
*
* The value range is [0,32767].
*
* > This value is retrieved by calling WebRTC-Stats and may not be up-to-date. To get the real-time sound volume, call [RemoteAudioTrack.getVolumeLevel]{@link IRemoteAudioTrack.getVolumeLevel}.
*/
receiveLevel: number;
/**
* The total bytes of the received audio.
*/
receiveBytes: number;
/**
* The delay (ms) between a remote client sending the audio and the local client playing the audio.
*
* > This property is inaccurate on Safari and Firefox.
*/
receiveDelay: number;
/**
* The total packets of the received audio.
*/
receivePackets: number;
/**
* The total number of lost audio packets that should be received.
*/
receivePacketsLost: number;
/**
* The number of packets discarded by the jitter buffer due to early or late arrival.
*/
receivePacketsDiscarded: number;
/**
* The packet loss rate of the received audio.
*/
packetLossRate: number;
/**
* The packet loss rate of the received audio.
*/
currentPacketLossRate: number;
/**
* The total duration of the received audio in seconds.
*/
totalDuration: number;
/**
* The total freeze time of the received audio in seconds.
*/
totalFreezeTime: number;
/**
* The freeze rate of the received audio.
*/
freezeRate: number;
publishDuration: number;
}
/**
* The stream fallback option. Set the stream fallback option when calling [setStreamFallbackOption]{@link IAgoraRTCClient.setStreamFallbackOption}.
*
*/
export declare enum RemoteStreamFallbackType {
/**
* 0: Disable the fallback.
*/
DISABLE = 0,
/**
* 1: Automatically subscribe to the low-video stream under poor network conditions. */
LOW_STREAM = 1,
/**
* 2: Subscribe to the low-quality video stream when the network conditions worsen, and subscribe to audio only when the conditions become too poor to support video transmission.
*/
AUDIO_ONLY = 2,
HIGH_STREAM_LAYER1 = 3,
HIGH_STREAM_LAYER2 = 4,
HIGH_STREAM_LAYER3 = 5,
HIGH_STREAM_LAYER4 = 6,
HIGH_STREAM_LAYER5 = 7,
HIGH_STREAM_LAYER6 = 8
}
/**
* The video type of the remote stream. Set the video type of the remote stream when calling [setRemoteVideoStreamType]{@link IAgoraRTCClient.setRemoteVideoStreamType}.
*/
export declare enum RemoteStreamType {
/**
* 0: High-quality video stream (high-bitrate, high-resolution).
*/
HIGH_STREAM = 0,
/**
* 1: Low-quality video stream (low-bitrate, low-resolution).
*/
LOW_STREAM = 1,
HIGH_STREAM_LAYER1 = 4,
HIGH_STREAM_LAYER2 = 5,
HIGH_STREAM_LAYER3 = 6,
HIGH_STREAM_LAYER4 = 7,
HIGH_STREAM_LAYER5 = 8,
HIGH_STREAM_LAYER6 = 9
}
/**
* Statistics of the remote video track, such as connection and transmission statistics, which can be retrieved by calling [AgoraRTCClient.getRemoteVideoStats]{@link IAgoraRTCClient.getRemoteVideoStats}.
*/
export declare interface RemoteVideoTrackStats {
/**
* Transmission delay (ms).
*
* The delay (ms) between a remote client sending the video and the local client receiving the video.
*/
transportDelay: number;
/**
* The video codec.
*
* - `"H264"`: The video codec is H.264.
* - `"VP8"`: The video codec is VP8.
* - `"VP9"`: The video codec is VP9.
* - `"AV1X"`: Reserved for future use.
* - `"AV1"`: The video codec is AV1.
*
* > You can not get this property on Firefox.
*/
codecType?: "H264" | "H265" | "VP8" | "VP9" | "AV1X" | "AV1";
/**
* End-to-end delay (ms).
*
* The delay (ms) a remote client capturing the video and the local client playing the video.
* This delay does not include the time spent in encoding at the remote client and the time spent in decoding at the local client.
*/
end2EndDelay: number;
/**
* The bitrate (bps) of the received video.
*/
receiveBitrate: number;
/**
* The delay (ms) between a remote client sending the video and the local client playing the video.
*
* > This property is inaccurate on Safari and Firefox.
*/
receiveDelay: number;
/**
* The total byes of the received video.
*/
receiveBytes: number;
/**
* The frame rate (fps) of the decoded video.
*/
decodeFrameRate?: number;
/**
* The frame rate (fps) of the received video.
*/
receiveFrameRate?: number;
/**
* The rendering frame rate (fps) of the decoded video.
*/
renderFrameRate?: number;
/**
* The total bytes of the received video.
*/
receivePackets: number;
/**
* The total number of lost video packets that should be received.
*/
receivePacketsLost: number;
/**
* The packet loss rate of the received video.
*/
packetLossRate: number;
/**
* The packet loss rate of the received video.
*/
currentPacketLossRate: number;
/**
* The resolution height (pixel) of the received video.
*/
receiveResolutionHeight: number;
/**
* The resolution width (pixel) of the received video.
*/
receiveResolutionWidth: number;
/**
* The total duration of the received video in seconds.
*/
totalDuration: number;
/**
* The total freeze time of the received video in seconds.
*/
totalFreezeTime: number;
/**
* The freeze rate of the received video.
*/
freezeRate: number;
publishDuration: number;
}
declare type RequiredOnlyOneOf = {
[P in K]: {
[S in P]: T[S];
} & {
[U in keyof Omit]?: never;
};
}[K] extends infer O ? {
[K in keyof O]: O[K];
} : never;
/**
* @ignore
*/
declare interface RetryConfiguration {
timeout: number;
timeoutFactor: number;
maxRetryCount: number;
maxRetryTimeout: number;
}
declare interface RTMConfiguration {
apRTM: boolean;
rtmFlag: number;
}
/**
* The preset video encoder configurations for screen sharing.
*
* You can pass the preset video encoder configurations when calling [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}.
*
* The following table lists all the preset video profiles for screen sharing.
*
* | Video Profile | Resolution (Width×Height) | Frame Rate (fps) |
* | -------- | --------------- | ----------- |
* | "480p" | 640 × 480 | 5 |
* | "480p_1" | 640 × 480 | 5 |
* | "480p_2" | 640 × 480 | 30 |
* | "480p_3" | 640 × 480 | 15 |
* | "720p" | 1280 × 720 | 5 |
* | "720p_1" | 1280 × 720 | 5 |
* | "720p_2" | 1280 × 720 | 30 |
* | "720p_3" | 1280 × 720 | 15 |
* | "720p_auto" ① | 1280 × 720 | 30 |
* | "1080p" | 1920 × 1080 | 5 |
* | "1080p_1" | 1920 × 1080 | 5 |
* | "1080p_2" | 1920 × 1080 | 30 |
* | "1080p_3" | 1920 × 1080 | 15 |
*
* > ① `"720p_auto"` is only recommended to be set on Safari to ensure dynamic adjustment of the encoding resolution. For details, see the release notes.
*/
export declare type ScreenEncoderConfigurationPreset = keyof typeof SUPPORT_SCREEN_ENCODER_CONFIG_LIST;
/**
* The type of the source for screen sharing.
* - `"screen"`: Sharing the whole screen.
* - `"application"`: Sharing all windows of an app.
* - `"window"`: Sharing a window of an app.
*/
export declare type ScreenSourceType = "screen" | "window" | "application";
/**
* Configurations for the video track for screen sharing. Set these configurations when calling [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}.
*/
export declare interface ScreenVideoTrackInitConfig {
/**
* The video encoder configurations for screen sharing.
*
* You can set the video encoder configurations in either of the following ways:
* - Pass the preset video encoder configurations by using [[ScreenEncoderConfigurationPreset]].
* - Pass your customized video encoder configurations by using [[VideoEncoderConfiguration]].
* - Leave this property empty to use the SDK's default value, `"1080p_2"` (resolution: 1920 × 1080, frame rate: 30 fps, bitrate: 3000 Kbps).
*/
encoderConfig?: VideoEncoderConfiguration | ScreenEncoderConfigurationPreset;
/**
* The `sourceId` when you share the screen through Electron.
*/
electronScreenSourceId?: string;
/**
* The `extensionId` when you share the screen with a Chrome extension.
*/
extensionId?: string;
/**
*
* @deprecated from v4.17.1. Use {@link displaySurface} instead.
*
* The type of the source for screen sharing.
*/
screenSourceType?: ScreenSourceType;
/**
* @since
* *4.2.0*
*
* Sets the video transmission optimization mode.
*
* You can call this method during a video call, a live streaming or screen sharing to dynamically change the optimization mode. For example, during the screen sharing, before you change the shared content from text to video, you can change the optimization mode from `"detail"` to `"motion"` to ensure smoothness in poor network conditions.
*
* > Note: This method supports Chrome only.
*
* @param mode The video transmission optimization mode:
* - `"detail"`: Prioritizes video quality.
* - The SDK ensures high-quality images by automatically calculating a minimum bitrate based on the capturing resolution and frame rate. No matter how poor the network condition is, the sending bitrate will never be lower than the minimum value.
* - In most cases, the SDK does not reduce the sending resolution, but may reduce the frame rate.
* - `"motion"`: Since 4.21.0, the SDK prioritizes video smoothness.
* - In poor network conditions, the SDK reduces the sending bitrate to minimize video freezes.
* - In most cases, the SDK does not reduce the frame rate, but may reduce the sending resolution.
*/
optimizationMode?: "motion" | "detail";
/**
* @ignore
*
* @since
* *4.18.0*
*
* Configurations for Scalable Video Coding (SVC).
*
* You can set the configurations using one of the following options:
* - Use the preset SVC configurations provided by the SDK through {@link SVCConfigurationPreset}.
* - Use your custom SVC configurations through {@link SVCConfiguration}.
*/
scalabiltyMode?: SVCConfiguration | SVCConfigurationPreset;
/**
*
* @since
* *4.17.1*
*
* The pre-selected pane in the media picker. See [displaySurface](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#displaySurface) for details.
*
* > Note: This property is supported on Chrome 107 and later, as well as Edge 107 and later.
*/
displaySurface?: "browser" | "window" | "monitor";
/**
*
* @since
* *4.17.1*
*
* Whether to allow the user to share the current tab:
* - `"include"`: (Default) Allows the user to share the current tab.
* - `"exclude"`: Prevents the user from sharing the current tab.
*
* See [displaySurface](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#displaySurface) for details.
*
* > Note:
* > - This property is defaulted to `"exclude"` on Chrome 107. For better compatibility with earlier versions, the SDK changes the default value to "include", which ensures that users can still share the current tab after upgrading to Chrome 107.
* > - This property is supported on Chrome 107 and later, as well as Edge 107 and later.
*/
selfBrowserSurface?: "include" | "exclude";
/**
*
* @since
* *4.17.1*
*
* Whether to allow the user to dynamically switch between shared tabs:
* - `"include"`: (Default) The user can dynamically switch between shared tabs.
* - `"exclude"`: The user cannot dynamically switch between shared tabs.
*
* See [surfaceSwitching](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#surfaceSwitching) for details.
*
* > Note: This property is supported on Chrome 107 and later, as well as Edge 107 and later.
*/
surfaceSwitching?: "include" | "exclude";
/**
*
* @since
* *4.17.1*
*
* Whether to capture system audio:
* - `"include"`: (Default) Captures system audio.
* - `"exclude"`: Avoids capturing system audio
*
* See [systemAudio](https://developer.chrome.com/docs/web-platform/screen-sharing-controls/#systemAudio) for details.
*
* > Note: This property is supported on Chrome 105 and later (Windows only), as well as Edge 105 and later (Windows only).
*/
systemAudio?: "include" | "exclude";
}
/**
* @ignore
*/
export declare type SDK_AUDIO_CODEC = keyof typeof AudioCodec;
/**
* The codec that the Web browser uses for encoding.
* - `"vp8"`: Use VP8 for encoding.
* - `"h264"`: Use H.264 for encoding.
* - `"vp9"`: (Beta) Use VP9 for encoding.
* - `"av1"`: Use AV1 for encoding.
*
* > Safari 12.1 or earlier does not support the VP8 codec.
*/
export declare type SDK_CODEC = keyof typeof VideoCodec;
/**
* The channel profile.
*
* The SDK differentiates channel profiles and applies different optimization algorithms accordingly. For example, it prioritizes smoothness and low latency for a video call, and prioritizes video quality for a video broadcast.
*
* The SDK supports the following channel profiles:
* - `"live"`: Sets the channel profile as live broadcast. You need to go on to call [setClientRole]{@link IAgoraRTCClient.setClientRole} to set the client as either a host or an audience. A host can send and receive audio or video, while an audience can only receive audio or video.
* - `"rtc"`: Sets the channel profile as communication. It is used for a one-on-one call or a group call where all users in the channel can converse freely.
*/
export declare type SDK_MODE = "live" | "rtc" | "p2p";
declare enum StreamType {
/**
* 0: High-quality video stream (high-bitrate, high-resolution).
*/
HIGH_STREAM = 0,
/**
* 1: Low-quality video stream (low-bitrate, low-resolution).
*/
LOW_STREAM = 1
}
/**
* @ignore
*/
declare const SUPPORT_SCREEN_ENCODER_CONFIG_LIST: Record;
/**
* @ignore
*/
declare const SUPPORT_SVC_CONFIG_LIST: Record;
/**
* @ignore
*/
declare const SUPPORT_VIDEO_ENCODER_CONFIG_LIST: Record;
/**
* @ignore
* @since
* *4.18.0*
*
* Custom SVC encoding configurations.
*
* You can control the SVC configurations for local video by passing `SVCConfiguration` in the following methods:
* - [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}
* - [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack}
* - [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}
*/
export declare interface SVCConfiguration {
/**
* Specifies the number of spatial layer in SVC.
*/
numSpatialLayers: 1 | 2 | 3;
/**
* Specifies the number of temporal layer in SVC.
*/
numTemporalLayers: 1 | 3;
}
/**
* @ignore
*
* @since
* *4.18.0*
*
* The preset SVC configurations provided by the SDK.
* - `"1SL1TL"`: 1 spatial layer, 1 temporal layer.
* - `"3SL3TL"`: 3 spatial layers, 3 temporal layers.
* - `"2SL3TL"`: 2 spatial layers, 3 temporal layers.
*
* You can control the SVC configurations for local video by passing these preset values in the following methods:
* - [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack}
* - [AgoraRTC.createCustomVideoTrack]{@link IAgoraRTC.createCustomVideoTrack}
* - [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}
*
*/
export declare type SVCConfigurationPreset = keyof typeof SUPPORT_SVC_CONFIG_LIST;
/**
* @ignore
* The configuration of your TURN server. Used when calling [setTurnServer]{@link IAgoraRTCClient.setTurnServer}.
*/
export declare interface TurnServerConfig {
/**
* The URL of your TURN server. ASCII characters only.
*/
turnServerURL: string;
/**
* The password of Your TURN server. ASCII characters only.
*/
password: string;
/**
* The TCP port(s) you want add to your TURN server.
*/
udpport?: number;
/**
* The username of your TURN server. ASCII characters only.
*/
username: string;
/**
* Whether to force data transfer by the TURN Server:
* - `true`: Force data transfer.
* - `false`: (default) Do not force data transfer.
*/
forceturn?: boolean;
/**
* The UDP port(s) you want to add to your TURN server.
*/
tcpport?: number;
security?: boolean;
}
/**
* The user ID to identify a user in the channel.
*
* Each user in the same channel should have a unique user ID with the same data type (number or string).
*
* To ensure a better end-user experience, Agora recommends using a number as the user ID. See {@link join} for details.
*/
export declare type UID = number | string;
declare enum VideoCodec {
h264 = "h264",
h265 = "h265",
vp8 = "vp8",
vp9 = "vp9",
av1 = "av1"
}
/**
* `VideoEncoderConfiguration` is the interface that defines the video encoder configurations.
*
* You can customize the video encoder configurations when calling [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack} or [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}.
*
* The SDK provides the preset video encoder configurations. For more information, see [[VideoEncoderConfigurationPreset]].
*
* > The actual bitrate may differ slightly from the value you set due to the limitations of the operation system or the web browser. Agora recommends setting the bitrate between 100 Kbps and 5000 Kbps.
*/
export declare interface VideoEncoderConfiguration {
/**
* Width of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
width?: number | ConstrainLong;
/**
* Height of the video.
*
* You can pass a `number`, or a constraint such as `{ max: 1280, min: 720 }`.
*
* For more details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
height?: number | ConstrainLong;
/**
* Frame rate of the video (fps).
*
* You can pass a `number`, or a constraint such as `{ max: 30, min: 5 }`.
*
* For details about the constraint, see [ConstrainLong]{@link ConstrainLong}.
*/
frameRate?: number | ConstrainLong;
/**
* The minimum bitrate of the video (Kbps).
*/
bitrateMin?: number;
/**
* The maximum bitrate of the video (Kbps).
*/
bitrateMax?: number;
/**
* @ignore
*/
scaleResolutionDownBy?: number;
}
/**
*
* The preset video encoder configurations.
*
* You can pass the preset video encoder configurations when calling [AgoraRTC.createCameraVideoTrack]{@link IAgoraRTC.createCameraVideoTrack} or [AgoraRTC.createScreenVideoTrack]{@link IAgoraRTC.createScreenVideoTrack}.
*
* The following table lists all the preset video profiles. The SDK uses `"480p_1"` by default.
*
* | Video Profile | Resolution (Width×Height) | Frame Rate (fps) | Chrome | Firefox | Safari |
* | -------- | --------------- | ----------- | ------ | ------- | ------ |
* | 120p | 160 × 120 | 15 | ✓ | | |
* | 120p_1 | 160 × 120 | 15 | ✓ | | |
* | 120p_3 | 120 × 120 | 15 | ✓ | | |
* | 180p | 320 × 180 | 15 | ✓ | | |
* | 180p_1 | 320 × 180 | 15 | ✓ | | |
* | 180p_3 | 180 × 180 | 15 | ✓ | | |
* | 180p_4 | 240 × 180 | 15 | ✓ | | |
* | 240p | 320 × 240 | 15 | ✓ | | |
* | 240p_1 | 320 × 240 | 15 | ✓ | | |
* | 240p_3 | 240 × 240 | 15 | ✓ | | |
* | 240p_4 | 424 × 240 | 15 | ✓ | | |
* | 360p | 640 × 360 | 15 | ✓ | | |
* | 360p_1 | 640 × 360 | 15 | ✓ | | |
* | 360p_3 | 360 × 360 | 15 | ✓ | | |
* | 360p_4 | 640 × 360 | 30 | ✓ | | |
* | 360p_6 | 360 × 360 | 30 | ✓ | | |
* | 360p_7 | 480 × 360 | 15 | ✓ | | |
* | 360p_8 | 480 × 360 | 30 | ✓ | | |
* | 360p_9 | 640 × 360 | 15 | ✓ | | |
* | 360p_10 | 640 × 360 | 24 | ✓ | | |
* | 360p_11 | 640 × 360 | 24 | ✓ | | |
* | 480p | 640 × 480 | 15 | ✓ | ✓ | ✓ |
* | 480p_1 | 640 × 480 | 15 | ✓ | ✓ | ✓ |
* | 480p_2 | 640 × 480 | 30 | ✓ | ✓ | ✓ |
* | 480p_3 | 480 × 480 | 15 | ✓ | ✓ | ✓ |
* | 480p_4 | 640 × 480 | 30 | ✓ | ✓ | ✓ |
* | 480p_6 | 480 × 480 | 30 | ✓ | ✓ | ✓ |
* | 480p_8 | 848 × 480 | 15 | ✓ | ✓ | ✓ |
* | 480p_9 | 848 × 480 | 30 | ✓ | ✓ | ✓ |
* | 480p_10 | 640 × 480 | 10 | ✓ | ✓ | ✓ |
* | 720p | 1280 × 720 | 15 | ✓ | ✓ | ✓ |
* | 720p_1 | 1280 × 720 | 15 | ✓ | ✓ | ✓ |
* | 720p_2 | 1280 × 720 | 30 | ✓ | ✓ | ✓ |
* | 720p_3 | 1280 × 720 | 30 | ✓ | ✓ | ✓ |
* | 720p_auto ① | 1280 × 720 | 30 | ✓ | ✓ | ✓ |
* | 720p_5 | 960 × 720 | 15 | ✓ | ✓ | ✓ |
* | 720p_6 | 960 × 720 | 30 | ✓ | ✓ | ✓ |
* | 1080p | 1920 × 1080 | 15 | ✓ | | ✓ |
* | 1080p_1 | 1920 × 1080 | 15 | ✓ | | ✓ |
* | 1080p_2 | 1920 × 1080 | 30 | ✓ | | ✓ |
* | 1080p_3 | 1920 × 1080 | 30 | ✓ | | ✓ |
* | 1080p_5 | 1920 × 1080 | 60 | ✓ | | ✓ |
*
* > ① `"720p_auto"` is only recommended to be set on Safari to ensure dynamic adjustment of the encoding resolution. For details, see the release notes.
*/
export declare type VideoEncoderConfigurationPreset = keyof typeof SUPPORT_VIDEO_ENCODER_CONFIG_LIST;
/**
* Playback configurations for a video track. Set the playback configurations for a video track when calling [ILocalVideoTrack.play]{@link ILocalVideoTrack.play}.
*/
export declare interface VideoPlayerConfig {
/**
* Sets whether to enable mirror mode:
* - `true`: Enable mirror mode.
* - `false`: Disable mirror mode.
*
* > Notes:
* > - The SDK enables mirror mode for the local video track by default.
* > - The SDK disables mirror mode for the remote video track by default.
*/
mirror?: boolean;
/**
* Sets video display mode:
* - `"cover"`: The image files the height and width of the box, while maintaining its aspect ratio but often cropping the image in the process. For more information, see the `cover` option of `object-fit` in CSS.
* - `"contain"`: The size of the image increases or decreases to fill the box while preserving its aspect-ratio. Areas that are not filled due to the disparity in the aspect ratio are filled with black. For more information, see the `contain` option of `object-fit` in CSS.
* - `"fill"`: The image stretches to fit the box, regardless of its aspect-ratio. For more information, see the `fill` option of `object-fit` in CSS.
*
* > Notes:
* > - When playing the local camera video track, the SDK uses cover mode by default; when playing the local video track of screen sharing, the SDK uses contain mode by default.
* > - When playing the remote video track, the SDK uses cover mode by default.
*/
fit?: "cover" | "contain" | "fill";
}
/**
* The state of the video stream.
*/
export declare enum VideoState {
/**
* 0: The initial state of the video.
*/
VideoStateStopped = 0,
/**
* 1: The local user has received the first video packet.
*/
VideoStateStarting = 1,
/**
* 2: The video stream is being decoded and played normally.
*/
VideoStateDecoding = 2,
/**
* 3: The video stream is frozen.
*/
VideoStateFrozen = 3
}
/**
* @ignore
*/
declare enum VisibleHiddenReason {
COVERED = "COVERED",
POSITION = "POSITION",
SIZE = "SIZE",
STYLE = "STYLE"
}
declare interface VisibleHiddenResult {
visible: false;
reason: keyof typeof VisibleHiddenReason;
}
/**
* @ignore
*/
declare interface VisibleResultInner {
visible: true;
}
export { }