import { AgoraRTCError } from '@agora-js/shared'; import { CheckVisibleResult } from '@agora-js/shared'; import { ElectronDesktopCapturerSource } from '@agora-js/shared'; import { EventEmitter } from '@agora-js/shared'; import { IAudioProcessor } from 'agora-rte-extension'; import type { IAudioProcessorContext } from 'agora-rte-extension'; import { IBaseProcessor } from 'agora-rte-extension'; import type { IProcessorContext } from 'agora-rte-extension'; import { isElectron } from '@agora-js/shared'; import type { Kind } from 'agora-rte-extension'; import type { NetworkQuality } from '@agora-js/shared'; import type { ProcessorStats } from 'agora-rte-extension'; import { PromiseMutex } from '@agora-js/shared'; import { RequiredOnlyOneOf } from '@agora-js/shared'; import { SDKStore } from '@agora-js/shared'; import { UID } from '@agora-js/shared'; import { Usage } from 'agora-rte-extension'; import type { UsageWithDirection } from 'agora-rte-extension'; export declare const __TRACK_LIST__: Track[]; export declare function addTrack(track: Track): void; export declare interface AgoraRTCCompatibility { /** 是否支持标准的无插件屏幕共享 API */ getDisplayMedia: boolean; /** 是否支持通过 chrome 插件采集屏幕 */ getStreamFromExtension: boolean; /** 是否支持 unified-plan */ supportUnifiedPlan: boolean; /** 是否支持配置最小码率 */ supportMinBitrate: boolean; /** 是否支持修改 RTCRtpSender 的参数 */ supportSetRtpSenderParameters: boolean; /** 浏览器是否支持开启大小流 */ supportDualStream: boolean; /** WebAudio 是否支持输出 MediaStream */ webAudioMediaStreamDest: boolean; /** RTCRtpSender 是否支持 replaceTrack */ supportReplaceTrack: boolean; /** 是否支持 WebGL */ supportWebGL: boolean; /** 是否支持通过 WebAudio 实现回声消除 */ webAudioWithAEC: boolean; /** 是否支持 CanvasCaptureMediaStreamTrack.requestFrame */ supportRequestFrame: boolean; /** 是否支持屏幕共享音频 */ supportShareAudio: boolean; /** 是否支持使用RTCRtpEncodingParameters设置小流参数*/ supportDualStreamEncoding: boolean; /** 是否支持Datachannel*/ supportDataChannel: boolean; /** 是否支持PeerConnection.setConfiguration*/ supportPCSetConfiguration: boolean; /** 是否支持WebRTC Encoded Transform*/ supportWebRTCEncodedTransform: boolean; /** 是否支持WebRTC Insertable Stream */ supportWebRTCInsertableStream: boolean; /** 是否支持requestVideoFrameCallback, 用于计算卡顿率 */ supportRequestVideoFrameCallback: boolean; /** 是否支持WebCrypto */ supportWebCrypto: boolean; } /** * @internal */ declare class AgoraRTCPlayer extends VideoPlayer { private container?; private slot; constructor(config: PlayerConfig); updateConfig(config: PlayerConfig): void; updateVideoTrack(track?: MediaStreamTrack): void; play(sessionId?: string): void; getCurrentFrame(): ImageData; getCurrentFrameToUint8Array(type: string, quality?: number): Promise; destroy(): void; private createElements; private mountedVideoElement; private unmountedVideoElement; protected resetVideoElement(): void; getContainerElement(): HTMLDivElement | undefined; } /** * 当前通话的统计信息,可以通过 [AgoraRTCClient.getRTCStats]{@link IAgoraRTCClient.getRTCStats} 获取。 * @public */ /** @en * Statistics of the call, which can be retrieved by calling [AgoraRTCClient.getRTCStats]{@link IAgoraRTCClient.getRTCStats}. */ export declare interface AgoraRTCStats { /** * 在当前频道内的时长,单位为秒。 */ /** @en * Call duration in seconds. */ Duration: number; /** * 音视频总接收码率,单位为 bps,瞬间值。 */ /** @en * The total bitrate (bps) of the received audio and video, represented by an instantaneous value. */ RecvBitrate: number; /** * 接收字节数,累计值。 */ /** @en * The total number of bytes received, represented by an aggregate value. */ RecvBytes: number; /** * 音视频总发送码率,单位为 bps,瞬间值。 */ /** @en * The total bitrate (bps) of the sent audio and video, represented by an instantaneous value. */ SendBitrate: number; /** * 发送字节数,累计值。 */ /** @en * The total number of bytes sent, represented by an aggregate value. */ SendBytes: number; /** * 通信场景下,该值为当前频道内的用户人数。 * * 直播场景下,如果本地用户为主播,该值为当前频道内的主播人数;如果本地用户为观众,该值为当前频道内的主播人数 + 1。 */ /** @en * The number of users in the channel. * * - Communication profile: The number of users in the channel. * - Live Broadcast profile: * - If the local user is an audience: The number of users in the channel = The number of hosts in the channel + 1. * - If the local user is a host: The number of users in the channel = The number of hosts in the channel. */ UserCount: number; /** * SDK 到声网边缘服务器的 RTT (Round-Trip Time),单位 ms。 */ /** @en * RTT (Round-Trip Time) between the SDK and Agora's edge server, in ms. */ RTT: number; /** * 上行可用带宽估计,单位为 Kbps。 */ /** @en * The estimated bandwidth (Kbps) of the uplink network. */ OutgoingAvailableBandwidth: number; } export declare enum AUDIO_CONTEXT_EVENT { IOS_15_16_INTERRUPTION_START = "ios15_16-interruption-start", IOS_15_16_INTERRUPTION_END = "ios15_16-interruption-end", IOS_INTERRUPTION_START = "ios-interruption-start", IOS_INTERRUPTION_END = "ios-interruption-end", STATE_CHANGE = "state-change" } /** @en * @ignore */ export declare const AUDIO_ENCODER_CONFIG_SETTINGS: { speech_low_quality: AudioEncoderConfiguration; speech_standard: AudioEncoderConfiguration; music_standard: AudioEncoderConfiguration; standard_stereo: AudioEncoderConfiguration; high_quality: AudioEncoderConfiguration; high_quality_stereo: AudioEncoderConfiguration; }; export declare enum AUDIO_TRACK_EVENT { UPDATE_TRACK_SOURCE = "update-track-source" } declare class AudioBufferSource extends AudioSource { private audioBuffer; protected sourceNode?: AudioBufferSourceNode; /** * 开始播放的时间(context 时间) */ private startPlayTime; /** * 开始播放的时间 (buffer 时间) */ private startPlayOffset; /** * 暂停播放的时间 (buffer 时间) */ private pausePlayTime; private options; private currentLoopCount; private currentPlaybackSpeed; set currentState(state: AudioSourceState); get currentState(): AudioSourceState; private _currentState; constructor(buffer: AudioBuffer, options?: AudioSourceOptions); createWebAudioDiagram(): GainNode; get duration(): number; get playbackSpeed(): number; /** * 计算当前播放时间的公式是 * ((当前时间 - 开始播放时间) * 当前播放速度 + 开始播放的 offset)% 总时长 * 如果发现播放被暂停了,返回暂停那一刻记录的 currentTime * 如果发现播放还没开始,返回 0 */ get currentTime(): number; updateOptions(options: AudioSourceOptions): void; startProcessAudioBuffer(): void; /** * AudioSourceNode 并没有提供 pause 方法 * 一个 AudioSourceNode stop 后也不能再 start 了 * 所以只能删掉这个 AudioSourceNode 创建新的来实现 */ pauseProcessAudioBuffer(): void; /** * 同 pause,需要重新创建 AudioSourceNode * @time 单位是秒 */ seekAudioBuffer(time: number): void; /** * 会从暂停的位置开始播放 */ resumeProcessAudioBuffer(): void; stopProcessAudioBuffer(): void; destroy(): void; /** * 与native setAudioMixingPlaybackSpeed对齐,推荐范围[50-400] */ setAudioBufferPlaybackSpeed(speed: number): void; private startSourceNode; private createSourceNode; private handleSourceNodeEnded; private reset; } export declare const audioContextState: AudioState; /** * 理论上来说任何情况下 SDK 都不会使用