diff --git a/README.md b/README.md index c84be907..59041a46 100644 --- a/README.md +++ b/README.md @@ -1,96 +1,33 @@ [![Build Status](https://img.shields.io/github/actions/workflow/status/devopvoid/webrtc-java/build.yml?label=Build&logo=github)](https://github.com/devopvoid/webrtc-java/actions) [![Maven Central](https://img.shields.io/maven-central/v/dev.onvoid.webrtc/webrtc-java?label=Maven%20Central&logo=apache-maven)](https://search.maven.org/artifact/dev.onvoid.webrtc/webrtc-java) -## webrtc-java +# webrtc-java -Java native interface implementation based on the free, open [WebRTC](https://webrtc.org) project. The goal of this project is to enable development of RTC applications for desktop platforms running Java. This project wraps the [WebRTC Native API](https://webrtc.github.io/webrtc-org/native-code/native-apis) and is similar to the [JS API](https://w3c.github.io/webrtc-pc). +webrtc-java is a Java wrapper for the [WebRTC Native API](https://webrtc.github.io/webrtc-org/native-code/native-apis), providing similar functionality to the [W3C JavaScript API](https://w3c.github.io/webrtc-pc). It allows Java developers to build real-time communication applications for desktop platforms without having to work directly with native code. -### Maven +The library provides a comprehensive set of Java classes that map to the WebRTC C++ API, making it possible to establish peer-to-peer connections, transmit audio and video, share screens, and exchange arbitrary data between applications. -```xml - - dev.onvoid.webrtc - webrtc-java - 0.12.0 - -``` +## Features -### Gradle +- **Complete WebRTC API implementation** - Includes peer connections, media devices, data channels, and more +- **Cross-platform support** - Works on Windows, macOS, and Linux (x64, ARM, ARM64) +- **Media capabilities** - Audio and video capture from cameras and microphones +- **Desktop capture** - Screen and application window sharing +- **Data channels** - Bidirectional peer-to-peer data exchange +- **Statistics API** - Detailed metrics for monitoring connection quality +- **Simple integration** - Available as a Maven dependency +- **Native performance** - Thin JNI layer with minimal overhead -```groovy -implementation "dev.onvoid.webrtc:webrtc-java:0.12.0" -implementation group: "dev.onvoid.webrtc", name: "webrtc-java", version: "0.12.0", classifier: "windows-x86_64" -implementation group: "dev.onvoid.webrtc", name: "webrtc-java", version: "0.12.0", classifier: "macos-x86_64" -implementation group: "dev.onvoid.webrtc", name: "webrtc-java", version: "0.12.0", classifier: "macos-aarch64" -implementation group: "dev.onvoid.webrtc", name: "webrtc-java", version: "0.12.0", classifier: "linux-x86_64" -implementation group: "dev.onvoid.webrtc", name: "webrtc-java", version: "0.12.0", classifier: "linux-aarch64" -implementation group: "dev.onvoid.webrtc", name: "webrtc-java", version: "0.12.0", classifier: "linux-aarch32" -``` +## Getting Started -### Supported Platforms -Maven Central artifacts contain native libraries that can be loaded on the following platforms: +For more detailed examples and guides, check out the [quickstart guide](quickstart.md) and the specific examples in the project repository. - - - - - - - - - - - - - - - - - - - - - - - - - -
x64armarm64
Linux✔ armeabi-v7a✔ arm64-v8a
macOS-
Windows--
+## License -The native libraries were built using the m138 (7204) WebRTC branch as the stable release, dated June 24, 2025. -### Build Notes +Copyright (c) 2019 Alex Andres -To build the native code, be sure to install the prerequisite software (follow the links): +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at -**Note**: You don't have to install the Depot Tools, the build script will do that for you. +[http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0) - - - - - - - - - - - - - -
LinuxUbuntu, other distros
macOSXcode 9 or higher
WindowsVisual Studio
- -Assuming you have all the prerequisites installed for your OS, run: - -``` -mvn install -``` - -On the first run, the WebRTC source tree will be loaded into the `//webrtc` directory. This will take a while and require about 20 GB of disk space. - -#### Build Parameters - -| Parameter | Description | Default Value | -| ------------------ | ------------------------------------------------------ |-----------------------------| -| webrtc.branch | The WebRTC branch to checkout. | branch-heads/7204 | -| webrtc.src.dir | The absolute checkout path for the WebRTC source tree. | /\/webrtc | -| webrtc.install.dir | The install path for the compiled WebRTC library. Is also used to link against a pre-compiled WebRTC library to reduce build time. | /\/webrtc/build | +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. \ No newline at end of file diff --git a/docs/.nojekyll b/docs/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/docs/_coverpage.md b/docs/_coverpage.md new file mode 100644 index 00000000..ac2af381 --- /dev/null +++ b/docs/_coverpage.md @@ -0,0 +1,38 @@ +# webrtc-java + +> Connecting the Java world through WebRTC + +
    +
  • + devices + Cross-platform (Windows, macOS, Linux) +
  • +
  • + connect_without_contact + Peer-to-peer communication +
  • +
  • + videocam + Audio and video streaming +
  • +
  • + screen_share + Screen Sharing +
  • +
  • + swap_calls + Data Channels +
  • +
  • + bar_chart + Statistics API for monitoring +
  • +
+ + + + +![color](#ffffff) \ No newline at end of file diff --git a/docs/_sidebar.md b/docs/_sidebar.md new file mode 100644 index 00000000..37a8c238 --- /dev/null +++ b/docs/_sidebar.md @@ -0,0 +1,18 @@ +- Getting started + - [Overview](README.md) + - [Quick start](quickstart.md) + - [Examples](examples.md) + +- Guide + - [Overview](guide/overview.md) + - [Media Devices](guide/media_devices.md) + - [Audio Device Selection](guide/audio_devices.md) + - [Audio Processing](guide/audio_processing.md) + - [Bitrate and Framerate Constraints](guide/constraints.md) + - [Desktop Capture](guide/desktop_capture.md) + - [Data Channels](guide/data_channels.md) + - [RTC Stats](guide/rtc_stats.md) + - [Logging](guide/logging.md) + +- [**Build Notes**](build.md) +- [**Changelog**](changelog.md) \ No newline at end of file diff --git a/docs/assets/styles.css b/docs/assets/styles.css new file mode 100644 index 00000000..45824056 --- /dev/null +++ b/docs/assets/styles.css @@ -0,0 +1,139 @@ +:root { + --theme-color: #6366f1; +} + +/* Custom button styles for coverpage */ +.cover .buttons { + margin-top: 40px; + display: flex; + justify-content: center; + gap: 20px; + flex-wrap: wrap; /* Ensure buttons wrap on small screens */ +} + +.cover .buttons a { + display: inline-block; + padding: 10px 20px; + font-size: 1rem; + font-weight: 500; + text-decoration: none; + border-radius: 4px; + transition: all 0.2s ease; + margin: 5px; /* Add margin for when buttons wrap */ + background-color: transparent; /* Outlined button */ + color: var(--theme-color); /* Text color matches border for outlined style */ + border: 1px solid var(--theme-color); +} + +.cover .buttons a:hover { + background-color: rgba(99, 102, 241, 0.1); /* Light background on hover */ + border-color: #5253d4; + color: #5253d4; +} + +.cover .buttons a:active { + background-color: rgba(99, 102, 241, 0.2); /* Slightly darker background when active */ + border-color: #4338ca; + color: #4338ca; +} + +.cover .buttons a span { + display: inline-block; /* Ensure the span behaves properly */ +} + +/* Feature list styles */ +.cover .features-list { + list-style-type: none; + padding: 0; + margin: 30px auto; + max-width: 700px; + display: grid; + grid-template-columns: repeat(2, 1fr); + grid-gap: 6px; +} + +.cover .feature-list-item { + display: flex; + align-items: center; + margin: 0; + padding: 8px 12px; +} + +.cover .feature-icon { + margin-right: 15px; + font-size: 28px; + color: var(--theme-color); + flex-shrink: 0; +} + +.cover .feature-text { + font-size: 16px; + line-height: 1.4; + margin: 0; +} + +/* Media queries for responsive design */ +@media screen and (max-width: 768px) { + .cover .buttons { + gap: 15px; + } + + .cover .buttons a { + padding: 9px 18px; + font-size: 0.95rem; + } + + .cover .features-list { + max-width: 90%; + grid-gap: 5px; + grid-template-columns: repeat(2, 1fr); + } + + .cover .feature-list-item { + padding: 6px 10px; + } + + .cover .feature-icon { + font-size: 24px; + margin-right: 10px; + } + + .cover .feature-text { + font-size: 15px; + } +} + +@media screen and (max-width: 480px) { + .cover .buttons { + flex-direction: column; + align-items: center; + gap: 12px; + } + + .cover .buttons a { + padding: 8px 16px; + font-size: 0.9rem; + width: 80%; + max-width: 200px; + text-align: center; + } + + .cover .features-list { + max-width: 95%; + grid-gap: 4px; + grid-template-columns: 1fr; + } + + .cover .feature-list-item { + padding: 5px 8px; + } + + .cover .feature-icon { + font-size: 20px; + margin-right: 8px; + } + + .cover .feature-text { + font-size: 14px; + } +} \ No newline at end of file diff --git a/docs/assets/versions.js b/docs/assets/versions.js new file mode 100644 index 00000000..8de2fe5a --- /dev/null +++ b/docs/assets/versions.js @@ -0,0 +1,4 @@ +window.PROJECT_VARS = { + VERSION: '0.12.0', + VERSION_SNAPSHOT: '0.13.0-SNAPSHOT' +}; \ No newline at end of file diff --git a/docs/build.md b/docs/build.md new file mode 100644 index 00000000..3d23e1d6 --- /dev/null +++ b/docs/build.md @@ -0,0 +1,36 @@ +# Build Notes + +To build the native code, be sure to install the prerequisite software (follow the links): + +**Note**: You don't have to install the Depot Tools, the build script will do that for you. + + + + + + + + + + + + + + +
LinuxUbuntu, other distros
macOSXcode 9 or higher
WindowsVisual Studio
+ +Assuming you have all the prerequisites installed for your OS, run: + +```shell +mvn install +``` + +On the first run, the WebRTC source tree will be loaded into the `//webrtc` directory. This will take a while and require about 20 GB of disk space. + +## Build Parameters + +| Parameter | Description | Default Value | +| ------------------ | ------------------------------------------------------ |-----------------------------| +| webrtc.branch | The WebRTC branch to checkout. | branch-heads/7204 | +| webrtc.src.dir | The absolute checkout path for the WebRTC source tree. | /\/webrtc | +| webrtc.install.dir | The install path for the compiled WebRTC library. Is also used to link against a pre-compiled WebRTC library to reduce build time. | /\/webrtc/build | diff --git a/docs/examples.md b/docs/examples.md new file mode 100644 index 00000000..872be2f1 --- /dev/null +++ b/docs/examples.md @@ -0,0 +1,67 @@ +# Examples + +This section provides an overview of the example applications included in the `webrtc-examples` Maven module. These examples demonstrate various features and capabilities of the webrtc-java library. + +## PeerConnectionExample + +The `PeerConnectionExample` demonstrates how to set up a peer connection with audio and video tracks to be able to send and receive media. + +**Key features demonstrated:** +- Creating a PeerConnectionFactory +- Creating audio and video tracks +- Setting up a peer connection +- Adding tracks to the peer connection for sending media +- Implementing callbacks to receive incoming audio and video frames + +This example provides a foundation for building WebRTC applications that need to handle audio and video communication. + +## WhepExample + +The `WhepExample` demonstrates an implementation of WebRTC HTTP Egress Protocol (WHEP) client, which is a standardized protocol for WebRTC ingestion. + +**Key features demonstrated:** +- Setting up a WebRTC peer connection +- Creating and sending an SDP offer to a WHEP endpoint +- Receiving and processing an SDP answer +- Establishing media streaming over WebRTC + +This example is useful for applications that need to receive media streams from WHEP-compatible servers, such as live streaming platforms. + +## CodecListExample + +The `CodecListExample` demonstrates how to list all supported codecs with the WebRTC peer-connection-factory. + +**Key features demonstrated:** +- Creating a PeerConnectionFactory +- Getting the supported codecs for both sending and receiving audio and video +- Displaying detailed information about each codec + +This example is useful for understanding what codecs are available on the current system, which can help with debugging compatibility issues or optimizing media quality. + +## DesktopVideoExample + +The `DesktopVideoExample` demonstrates how to set up a peer connection with a desktop video source for screen or window capture. + +**Key features demonstrated:** +- Creating a PeerConnectionFactory +- Getting available desktop sources (screens and windows) +- Creating a VideoDesktopSource for capturing screen or window content +- Configuring the VideoDesktopSource properties +- Creating a video track with the desktop source +- Setting up a peer connection + +This example is particularly useful for applications that need to implement screen sharing or remote desktop functionality. + + +## Running the Examples + +To run these examples, you need to navigate into the directory `webrtc-java/webrtc-example` in the project. +You can then execute the main method of each example class. + +For example, to run the `CodecListExample`: + +```bash +mvn exec:java -D"exec.mainClass=dev.onvoid.webrtc.examples.CodecListExample" +``` + +Note that these examples focus on setting up the local components for WebRTC communication. In a real application, you would need to establish a connection with a remote peer through a signaling channel (e.g., WebSocket). \ No newline at end of file diff --git a/docs/guide.md b/docs/guide.md new file mode 100644 index 00000000..f2f6032b --- /dev/null +++ b/docs/guide.md @@ -0,0 +1,23 @@ +# Guides + +This section provides detailed guides for various features of the webrtc-java library. + +## Media Guides + +- [Media Devices Guide](guide/media_devices.md) - Working with audio and video devices +- [Audio Device Selection Guide](guide/audio_devices.md) - Selecting and configuring audio devices +- [Bitrate and Framerate Constraints Guide](guide/constraints.md) - Controlling media quality +- [Desktop Video Track Guide](guide/desktop_capture.md) - Capturing and sharing screens and windows + +## Data Communication + +- [Data Channels Guide](guide/data_channels.md) - Sending and receiving data between peers + +## Monitoring and Debugging + +- [RTC Stats Guide](guide/rtc_stats.md) - Monitoring connection quality and performance +- [Logging Guide](guide/logging.md) - Configuring and using the logging system + +## Additional Resources + +For a complete API reference, check the [JavaDoc](https://javadoc.io/doc/dev.onvoid.webrtc/webrtc-java/latest/index.html). \ No newline at end of file diff --git a/docs/guide/audio_devices.md b/docs/guide/audio_devices.md new file mode 100644 index 00000000..93950d81 --- /dev/null +++ b/docs/guide/audio_devices.md @@ -0,0 +1,125 @@ +# Audio Device Selection + +This guide focuses on setting up a peer connection with audio device selection capabilities, which allows you to choose specific microphones and speakers for your WebRTC connection. + +## Audio Device Selection + +To enable audio device selection, you need to: + +1. List available audio devices +2. Create and configure an AudioDeviceModule +3. Pass the AudioDeviceModule to the PeerConnectionFactory + +### Listing Available Audio Devices + +The `MediaDevices` class provides methods to list available audio devices: + +```java +import dev.onvoid.webrtc.media.MediaDevices; +import dev.onvoid.webrtc.media.audio.AudioDevice; +import java.util.List; + +// Get available microphones (capture devices) +List captureDevices = MediaDevices.getAudioCaptureDevices(); +System.out.println("Available microphones:"); +for (AudioDevice device : captureDevices) { + System.out.println(" - " + device.getName()); +} + +// Get available speakers (render devices) +List renderDevices = MediaDevices.getAudioRenderDevices(); +System.out.println("Available speakers:"); +for (AudioDevice device : renderDevices) { + System.out.println(" - " + device.getName()); +} + +// Get default devices +AudioDevice defaultMicrophone = MediaDevices.getDefaultAudioCaptureDevice(); +AudioDevice defaultSpeaker = MediaDevices.getDefaultAudioRenderDevice(); +``` + +### Creating and Configuring an AudioDeviceModule + +The `AudioDeviceModule` class allows you to select specific audio devices: + +```java +import dev.onvoid.webrtc.media.audio.AudioDeviceModule; + +// Create an AudioDeviceModule +AudioDeviceModule audioModule = new AudioDeviceModule(); + +// Select specific devices +AudioDevice selectedMicrophone = captureDevices.get(0); // Choose the first microphone +AudioDevice selectedSpeaker = renderDevices.get(0); // Choose the first speaker + +audioModule.setRecordingDevice(selectedMicrophone); +audioModule.setPlayoutDevice(selectedSpeaker); + +// Initialize recording and playback +audioModule.initRecording(); +audioModule.initPlayout(); +``` + +### Integrating with PeerConnectionFactory + +Pass the configured AudioDeviceModule to the PeerConnectionFactory constructor: + +```java +// Create a PeerConnectionFactory with the custom AudioDeviceModule +PeerConnectionFactory factory = new PeerConnectionFactory(audioModule); +``` + + +## Additional Features + +The `AudioDeviceModule` provides additional methods for controlling audio: + +### Volume Control +```java +// Get current volume levels +int micVolume = audioModule.getMicrophoneVolume(); +int speakerVolume = audioModule.getSpeakerVolume(); + +// Get volume ranges +int minMicVolume = audioModule.getMinMicrophoneVolume(); +int maxMicVolume = audioModule.getMaxMicrophoneVolume(); +int minSpeakerVolume = audioModule.getMinSpeakerVolume(); +int maxSpeakerVolume = audioModule.getMaxSpeakerVolume(); + +// Set volume levels +audioModule.setMicrophoneVolume(75); // Set to 75% of max +audioModule.setSpeakerVolume(80); // Set to 80% of max +``` + +### Mute Control +```java +// Check mute status +boolean isMicMuted = audioModule.isMicrophoneMuted(); +boolean isSpeakerMuted = audioModule.isSpeakerMuted(); + +// Set mute status +audioModule.setMicrophoneMute(true); // Mute microphone +audioModule.setSpeakerMute(false); // Unmute speaker +``` + +### Device Change Handling +```java +// Add a device change listener to handle device hot-plugging +MediaDevices.addDeviceChangeListener(event -> { + System.out.println("Audio devices changed. Refreshing device list..."); + + // Refresh device lists + List newCaptureDevices = MediaDevices.getAudioCaptureDevices(); + List newRenderDevices = MediaDevices.getAudioRenderDevices(); + + // Update UI or device selection as needed +}); +``` + +--- + +## Conclusion + +This guide has walked you through the process of implementing audio device selection in your WebRTC application. +By implementing these capabilities, your application can provide users with greater control over their audio experience, accommodating different hardware setups and preferences. +For more advanced audio processing options, consider exploring the audio processing APIs available in this documentation. \ No newline at end of file diff --git a/docs/guide/audio_processing.md b/docs/guide/audio_processing.md new file mode 100644 index 00000000..60d514eb --- /dev/null +++ b/docs/guide/audio_processing.md @@ -0,0 +1,310 @@ +# Audio Processing + +This guide explains how to use the audio processing capabilities provided by the `dev.onvoid.webrtc.media.audio.AudioProcessing` class. The library provides a collection of voice processing components designed for real-time communications software. + +## Overview + +The `AudioProcessing` class offers several audio processing features: + +- **Echo Cancellation**: Removes echo from audio signals +- **Noise Suppression**: Reduces background noise +- **Gain Control**: Adjusts audio levels automatically +- **High-Pass Filtering**: Removes low-frequency noise + +These features are particularly useful for VoIP applications, video conferencing, and other real-time communication systems. + +## Two Approaches to Audio Processing + +There are two main approaches to using audio processing in the library: + +1. **Automatic Processing with PeerConnectionFactory**: Set a configured `AudioProcessing` instance to the `PeerConnectionFactory`. This is the recommended approach for most applications. +2. **Manual Processing**: Create an `AudioProcessing` instance and manually process audio streams. This gives you more control but requires more work. + +### Automatic Processing with PeerConnectionFactory + +The simplest way to use audio processing is to set a configured `AudioProcessing` instance to the `PeerConnectionFactory`: + +```java +import dev.onvoid.webrtc.PeerConnectionFactory; +import dev.onvoid.webrtc.media.audio.AudioProcessing; +import dev.onvoid.webrtc.media.audio.AudioProcessingConfig; +import dev.onvoid.webrtc.media.audio.NoiseSuppression; + +// Create and configure an AudioProcessing instance +AudioProcessing audioProcessing = new AudioProcessing(); +AudioProcessingConfig config = new AudioProcessingConfig(); + +// Enable echo cancellation +config.echoCanceller.enabled = true; + +// Enable noise suppression +config.noiseSuppression.enabled = true; +config.noiseSuppression.level = NoiseSuppression.Level.MODERATE; + +// Apply the configuration +audioProcessing.applyConfig(config); + +// Create a PeerConnectionFactory with the configured AudioProcessing +PeerConnectionFactory factory = new PeerConnectionFactory(audioProcessing); + +// Now all audio processing will be handled automatically by the WebRTC framework +// ... + +// Don't forget to dispose when done +factory.dispose(); +audioProcessing.dispose(); +``` + +With this approach, the WebRTC framework automatically applies the audio processing to all audio streams. You don't need to manually process audio data - the WebRTC framework handles it internally based on your configuration. + +### Manual Processing + +For more control, you can manually process audio streams: + +```java +import dev.onvoid.webrtc.media.audio.AudioProcessing; +import dev.onvoid.webrtc.media.audio.AudioProcessingConfig; +import dev.onvoid.webrtc.media.audio.AudioProcessingStreamConfig; + +// Create an AudioProcessing instance +AudioProcessing audioProcessing = new AudioProcessing(); + +try { + // Configure audio processing + AudioProcessingConfig config = new AudioProcessingConfig(); + + audioProcessing.applyConfig(config); + + // Process audio streams + // ... +} finally { + // Always dispose when done to release native resources + audioProcessing.dispose(); +} +``` + +## Configuration + +The `AudioProcessingConfig` class allows you to enable and configure various audio processing features: + +### Echo Cancellation + +Echo cancellation removes echo from audio signals, which is essential for full-duplex audio communication: + +```java +AudioProcessingConfig config = new AudioProcessingConfig(); + +// Enable echo cancellation +config.echoCanceller.enabled = true; + +// Enable high-pass filtering during echo cancellation +config.echoCanceller.enforceHighPassFiltering = true; +``` + +When echo cancellation is enabled, you should set the stream delay to help the echo canceller: + +```java +// Set the delay between far-end and near-end audio in milliseconds +audioProcessing.setStreamDelayMs(70); +``` + +### Noise Suppression + +Noise suppression reduces background noise in the audio signal: + +```java +// Enable noise suppression +config.noiseSuppression.enabled = true; + +// Set the level of noise suppression +// Options: LOW, MODERATE, HIGH, VERY_HIGH +config.noiseSuppression.level = NoiseSuppression.Level.MODERATE; +``` + +### Gain Control + +Gain control adjusts the audio level automatically: + +```java +// Enable gain control +config.gainControl.enabled = true; + +// Configure fixed digital gain (in dB) +config.gainControl.fixedDigital.gainDb = 5.0f; + +// Or configure adaptive digital gain +config.gainControl.adaptiveDigital.enabled = true; +config.gainControl.adaptiveDigital.headroomDb = 3.0f; +config.gainControl.adaptiveDigital.maxGainDb = 30.0f; +config.gainControl.adaptiveDigital.initialGainDb = 8.0f; +config.gainControl.adaptiveDigital.maxGainChangeDbPerSecond = 3.0f; +config.gainControl.adaptiveDigital.maxOutputNoiseLevelDbfs = -50.0f; +``` + +### High-Pass Filter + +High-pass filtering removes low-frequency noise: + +```java +// Enable high-pass filtering +config.highPassFilter.enabled = true; +``` + +## Processing Audio + +The `AudioProcessing` class processes audio in 10ms chunks of linear PCM audio data. You need to configure the input and output formats using `AudioProcessingStreamConfig`: + +```java +// Define input and output stream configurations +int inputSampleRate = 48000; // 48 kHz +int inputChannels = 1; // Mono +int outputSampleRate = 48000; // 48 kHz +int outputChannels = 1; // Mono + +AudioProcessingStreamConfig inputConfig = + new AudioProcessingStreamConfig(inputSampleRate, inputChannels); +AudioProcessingStreamConfig outputConfig = + new AudioProcessingStreamConfig(outputSampleRate, outputChannels); +``` + +### Calculate Buffer Size + +Before processing, you need to calculate the appropriate buffer size: + +```java +// Calculate buffer size for destination buffer +int bufferSize = audioProcessing.getTargetBufferSize(inputConfig, outputConfig); + +// Create source and destination buffers +byte[] sourceBuffer = new byte[inputSampleRate / 100 * inputChannels * 2]; // 10ms of audio +byte[] destBuffer = new byte[bufferSize]; +``` + +### Process Near-End Audio + +Process audio captured from the local microphone: + +```java +// Fill sourceBuffer with audio data from microphone +// ... + +// Process the audio +int result = audioProcessing.processStream( + sourceBuffer, inputConfig, outputConfig, destBuffer); + +// Check result (0 means success) +if (result == 0) { + // Use processed audio in destBuffer + // ... +} +``` + +### Process Far-End Audio + +For echo cancellation, you also need to process audio received from the remote end: + +```java +// Fill sourceBuffer with audio data from remote participant +// ... + +// Process the far-end audio +int result = audioProcessing.processReverseStream( + sourceBuffer, inputConfig, outputConfig, destBuffer); + +// Check result (0 means success) +if (result == 0) { + // Use processed audio in destBuffer (usually for playback) + // ... +} +``` + +## Format Conversion + +The `AudioProcessing` class can also convert between different audio formats: + +### Down-mixing (Stereo to Mono) + +```java +// Configure for down-mixing +AudioProcessingStreamConfig stereoConfig = + new AudioProcessingStreamConfig(48000, 2); // Stereo input +AudioProcessingStreamConfig monoConfig = + new AudioProcessingStreamConfig(48000, 1); // Mono output + +// Process with format conversion +audioProcessing.processStream( + stereoBuffer, stereoConfig, monoConfig, monoDestBuffer); +``` + +### Up-mixing (Mono to Stereo) + +```java +// Configure for up-mixing +AudioProcessingStreamConfig monoConfig = + new AudioProcessingStreamConfig(48000, 1); // Mono input +AudioProcessingStreamConfig stereoConfig = + new AudioProcessingStreamConfig(48000, 2); // Stereo output + +// Process with format conversion +audioProcessing.processStream( + monoBuffer, monoConfig, stereoConfig, stereoDestBuffer); +``` + +### Sample Rate Conversion + +```java +// Configure for sample rate conversion +AudioProcessingStreamConfig highRateConfig = + new AudioProcessingStreamConfig(48000, 1); // 48 kHz +AudioProcessingStreamConfig lowRateConfig = + new AudioProcessingStreamConfig(16000, 1); // 16 kHz + +// Process with sample rate conversion +audioProcessing.processStream( + highRateBuffer, highRateConfig, lowRateConfig, lowRateDestBuffer); +``` + +## Statistics + +The `AudioProcessing` class provides statistics about the audio processing performance: + +```java +// Get statistics +AudioProcessingStats stats = audioProcessing.getStatistics(); + +// Echo cancellation statistics +System.out.println("Echo Return Loss: " + stats.echoReturnLoss + " dB"); +System.out.println("Echo Return Loss Enhancement: " + stats.echoReturnLossEnhancement + " dB"); +System.out.println("Divergent Filter Fraction: " + stats.divergentFilterFraction); + +// Delay statistics +System.out.println("Current Delay: " + stats.delayMs + " ms"); +System.out.println("Median Delay: " + stats.delayMedianMs + " ms"); +System.out.println("Delay Standard Deviation: " + stats.delayStandardDeviationMs + " ms"); + +// Residual echo statistics +System.out.println("Residual Echo Likelihood: " + stats.residualEchoLikelihood); +System.out.println("Recent Max Residual Echo Likelihood: " + stats.residualEchoLikelihoodRecentMax); +``` + +These statistics are particularly useful for monitoring the performance of echo cancellation. + +## Best Practices + +1. **Always dispose**: Call `dispose()` when you're done with the `AudioProcessing` instance to free native resources. + +2. **Configure before processing**: Apply your configuration before processing any audio for best results. + +3. **Set stream delay**: For echo cancellation to work effectively, set the stream delay using `setStreamDelayMs()`. + +4. **Process in 10ms chunks**: The audio processing is designed to work with 10ms chunks of audio. + +5. **Monitor statistics**: Use the statistics to monitor the performance of echo cancellation and adjust settings if needed. + +--- + +## Conclusion + +The `AudioProcessing` class provides powerful audio processing capabilities for real-time communications. By properly configuring and using these features, you can significantly improve the audio quality in your applications. + +Remember that audio processing is CPU-intensive, so consider the performance implications when enabling multiple features, especially on resource-constrained devices. \ No newline at end of file diff --git a/docs/guide/constraints.md b/docs/guide/constraints.md new file mode 100644 index 00000000..8437c5de --- /dev/null +++ b/docs/guide/constraints.md @@ -0,0 +1,127 @@ +# Bitrate and Framerate Constraints + +This guide explains how to set bitrate and framerate constraints for MediaStreamTrack of RTCRtpSender. It covers: + +- Understanding RTCRtpSender Parameters +- Setting maximum and minimum bitrate constraints +- Setting maximum framerate constraints +- Scaling video resolution + +## Understanding RTCRtpSender Parameters + +The `RTCRtpSender` class allows you to control how a `MediaStreamTrack` is encoded and transmitted to a remote peer. You can modify encoding parameters such as bitrate and framerate by getting the current parameters, modifying them, and then setting the updated parameters. + +The key methods for this process are: + +- `getParameters()` - Returns the current parameters for how the track is encoded and transmitted +- `setParameters(RTCRtpSendParameters parameters)` - Updates how the track is encoded and transmitted + +It's important to note that you must first call `getParameters()`, modify the returned object, and then call `setParameters()` with the modified object. This is because the parameters object contains a transaction ID that ensures there are no intervening changes. + +## Setting Bitrate Constraints + +You can set both maximum and minimum bitrate constraints for a MediaStreamTrack. These constraints help control the quality and bandwidth usage of the transmitted media. + +```java +// Import required classes +import dev.onvoid.webrtc.RTCRtpSender; +import dev.onvoid.webrtc.RTCRtpSendParameters; +import dev.onvoid.webrtc.RTCRtpEncodingParameters; + +// Assuming you have an RTCRtpSender instance +RTCRtpSender sender = /* ... */; + +// Get the current parameters +RTCRtpSendParameters parameters = sender.getParameters(); + +// Check if there are any encodings +if (parameters.encodings != null && !parameters.encodings.isEmpty()) { + // Set maximum bitrate (in bits per second) + // For example, 1,000,000 bps = 1 Mbps + parameters.encodings.get(0).maxBitrate = 1000000; + + // Set minimum bitrate (in bits per second) + // For example, 100,000 bps = 100 Kbps + parameters.encodings.get(0).minBitrate = 100000; + + // Apply the modified parameters + sender.setParameters(parameters); +} +``` + +Setting a maximum bitrate helps ensure that your application doesn't use excessive bandwidth, which is particularly important for users with limited data plans or slower connections. Setting a minimum bitrate can help maintain a certain level of quality, though it may cause issues if the available bandwidth falls below this threshold. + +## Setting Framerate Constraints + +You can also set a maximum framerate constraint for video tracks. This can be useful for limiting CPU usage or bandwidth consumption. + +```java +// Import required classes +import dev.onvoid.webrtc.RTCRtpSender; +import dev.onvoid.webrtc.RTCRtpSendParameters; +import dev.onvoid.webrtc.RTCRtpEncodingParameters; + +// Assuming you have an RTCRtpSender instance with a video track +RTCRtpSender sender = /* ... */; + +// Get the current parameters +RTCRtpSendParameters parameters = sender.getParameters(); + +// Check if there are any encodings +if (parameters.encodings != null && !parameters.encodings.isEmpty()) { + // Set maximum framerate (in frames per second) + // For example, 15.0 fps + parameters.encodings.get(0).maxFramerate = 15.0; + + // Apply the modified parameters + sender.setParameters(parameters); +} +``` + +Setting a lower framerate can significantly reduce bandwidth usage, which is beneficial for users with limited bandwidth. However, it may result in less smooth video playback. + +## Scaling Video Resolution + +In addition to bitrate and framerate constraints, you can also scale down the resolution of a video track. This is done using the `scaleResolutionDownBy` parameter. + +```java +// Import required classes +import dev.onvoid.webrtc.RTCRtpSender; +import dev.onvoid.webrtc.RTCRtpSendParameters; +import dev.onvoid.webrtc.RTCRtpEncodingParameters; + +// Assuming you have an RTCRtpSender instance with a video track +RTCRtpSender sender = /* ... */; + +// Get the current parameters +RTCRtpSendParameters parameters = sender.getParameters(); + +// Check if there are any encodings +if (parameters.encodings != null && !parameters.encodings.isEmpty()) { + // Scale down resolution by a factor of 2 (each dimension) + // This will reduce the video size to 1/4 of the original + parameters.encodings.get(0).scaleResolutionDownBy = 2.0; + + // Apply the modified parameters + sender.setParameters(parameters); +} +``` + +The `scaleResolutionDownBy` parameter specifies how much to scale down the video in each dimension. For example, a value of 2.0 means the video will be scaled down by a factor of 2 in both width and height, resulting in a video that is 1/4 the size of the original. + +> Note that these constraints are applied without requiring SDP renegotiation, making them suitable for dynamic adaptation to changing network conditions. + +--- + +## Conclusion + +In this guide, we've explored several important techniques for controlling media quality and bandwidth usage in WebRTC applications. +These constraints provide powerful tools for adapting media quality dynamically in response to changing network conditions or device capabilities. +By implementing these techniques, you can: + +- Improve user experience on limited bandwidth connections +- Reduce data consumption for users with data caps +- Optimize performance on lower-powered devices +- Ensure more consistent connection quality across various network conditions + +You can use this code as a starting point for your own applications that need to control media quality and bandwidth usage. \ No newline at end of file diff --git a/docs/guide/data_channels.md b/docs/guide/data_channels.md new file mode 100644 index 00000000..a4a18b26 --- /dev/null +++ b/docs/guide/data_channels.md @@ -0,0 +1,306 @@ +# Data Channels + +This guide explains how to use WebRTC data channels with the webrtc-java library. Data channels provide a bidirectional communication mechanism that can be used to send arbitrary data between peers. + +## Overview + +WebRTC data channels allow you to: +- Send and receive text or binary data between peers +- Configure reliability and ordering properties +- Set up multiple channels with different configurations +- Monitor channel state changes and buffer amounts + +Data channels are created through an `RTCPeerConnection` and use the SCTP protocol for data transmission. + +## Creating a Data Channel + +To create a data channel, you need an established `RTCPeerConnection`. You can then call the `createDataChannel` method with a label and optional configuration: + +```java +import dev.onvoid.webrtc.RTCPeerConnection; +import dev.onvoid.webrtc.RTCDataChannel; +import dev.onvoid.webrtc.RTCDataChannelInit; + +// Assuming you already have a PeerConnectionFactory and RTCConfiguration +RTCPeerConnection peerConnection = factory.createPeerConnection(config, peerConnectionObserver); + +// Create a data channel with default configuration +RTCDataChannel dataChannel = peerConnection.createDataChannel("myChannel", new RTCDataChannelInit()); +``` + +### Data Channel Configuration + +You can customize the behavior of a data channel by configuring the `RTCDataChannelInit` object: + +```java +RTCDataChannelInit config = new RTCDataChannelInit(); + +// Configure ordering (default: true) +config.ordered = true; // Messages will be delivered in order + +// Configure reliability +// Option 1: Reliable (default) +config.maxPacketLifeTime = -1; +config.maxRetransmits = -1; + +// Option 2: Time-limited reliability +config.maxPacketLifeTime = 1000; // Retransmit for up to 1000ms +config.maxRetransmits = -1; // Don't use retransmit count limit + +// Option 3: Count-limited reliability +config.maxPacketLifeTime = -1; // Don't use time limit +config.maxRetransmits = 5; // Retransmit up to 5 times + +// Note: You cannot set both maxPacketLifeTime and maxRetransmits + +// Configure channel negotiation (default: false) +config.negotiated = false; // Channel will be announced in-band + +// Configure channel ID (default: -1, auto-assigned) +config.id = -1; // Let WebRTC assign an ID + +// Configure sub-protocol (default: null) +config.protocol = "my-protocol"; // Optional sub-protocol name + +// Configure priority (default: LOW) +config.priority = RTCPriorityType.LOW; + +// Create the data channel with this configuration +RTCDataChannel dataChannel = peerConnection.createDataChannel("myChannel", config); +``` + +## Handling Data Channel Events + +To receive events from a data channel, you need to implement the `RTCDataChannelObserver` interface and register it with the data channel: + +```java +import dev.onvoid.webrtc.RTCDataChannelObserver; +import dev.onvoid.webrtc.RTCDataChannelBuffer; +import dev.onvoid.webrtc.RTCDataChannelState; + +dataChannel.registerObserver(new RTCDataChannelObserver() { + @Override + public void onBufferedAmountChange(long previousAmount) { + // Called when the buffered amount changes + long currentAmount = dataChannel.getBufferedAmount(); + System.out.println("Buffered amount changed from " + previousAmount + + " to " + currentAmount + " bytes"); + } + + @Override + public void onStateChange() { + // Called when the data channel state changes + RTCDataChannelState state = dataChannel.getState(); + System.out.println("Data channel state changed to: " + state); + + // Handle different states + switch (state) { + case CONNECTING: + System.out.println("Data channel is being established"); + break; + case OPEN: + System.out.println("Data channel is open and ready to use"); + break; + case CLOSING: + System.out.println("Data channel is being closed"); + break; + case CLOSED: + System.out.println("Data channel is closed"); + break; + } + } + + @Override + public void onMessage(RTCDataChannelBuffer buffer) { + // Called when a message is received + // IMPORTANT: The buffer data will be freed after this method returns, + // so you must copy it if you need to use it asynchronously + + if (buffer.binary) { + // Handle binary data + handleBinaryMessage(buffer.data); + } else { + // Handle text data + handleTextMessage(buffer.data); + } + } +}); +``` + +### Receiving Data Channels + +When the remote peer creates a data channel, the `onDataChannel` method of your `PeerConnectionObserver` will be called: + +```java +import dev.onvoid.webrtc.PeerConnectionObserver; + +public class MyPeerConnectionObserver implements PeerConnectionObserver { + // Other PeerConnectionObserver methods... + + @Override + public void onDataChannel(RTCDataChannel dataChannel) { + System.out.println("Received data channel: " + dataChannel.getLabel()); + + // Register an observer to handle events from this channel + dataChannel.registerObserver(new MyDataChannelObserver()); + } +} +``` + +## Sending and Receiving Data + +### Sending Data + +You can send text or binary data through a data channel using the `send` method: + +```java +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; + +// Send text data +String textMessage = "Hello, WebRTC!"; +ByteBuffer textBuffer = ByteBuffer.wrap(textMessage.getBytes(StandardCharsets.UTF_8)); +RTCDataChannelBuffer textChannelBuffer = new RTCDataChannelBuffer(textBuffer, false); + +try { + dataChannel.send(textChannelBuffer); +} catch (Exception e) { + System.err.println("Failed to send text message: " + e.getMessage()); +} + +// Send binary data +byte[] binaryData = new byte[] { 0x01, 0x02, 0x03, 0x04 }; +ByteBuffer binaryBuffer = ByteBuffer.wrap(binaryData); +RTCDataChannelBuffer binaryChannelBuffer = new RTCDataChannelBuffer(binaryBuffer, true); + +try { + dataChannel.send(binaryChannelBuffer); +} catch (Exception e) { + System.err.println("Failed to send binary data: " + e.getMessage()); +} +``` + +### Receiving Data + +To receive data, implement the `onMessage` method in your `RTCDataChannelObserver`: + +```java +@Override +public void onMessage(RTCDataChannelBuffer buffer) { + ByteBuffer data = buffer.data; + + if (buffer.binary) { + // Handle binary data + byte[] binaryData; + + if (data.hasArray()) { + binaryData = data.array(); + } else { + binaryData = new byte[data.remaining()]; + data.get(binaryData); + } + + System.out.println("Received binary data, " + binaryData.length + " bytes"); + // Process binary data... + } else { + // Handle text data + byte[] textBytes; + + if (data.hasArray()) { + textBytes = data.array(); + } else { + textBytes = new byte[data.remaining()]; + data.get(textBytes); + } + + String text = new String(textBytes, StandardCharsets.UTF_8); + System.out.println("Received text message: " + text); + // Process text message... + } +} +``` + +## Data Channel Properties + +You can query various properties of a data channel: + +```java +// Get the channel label +String label = dataChannel.getLabel(); + +// Check if the channel is reliable +boolean reliable = dataChannel.isReliable(); + +// Check if messages are delivered in order +boolean ordered = dataChannel.isOrdered(); + +// Get the maximum packet lifetime (in milliseconds) +int maxPacketLifeTime = dataChannel.getMaxPacketLifeTime(); + +// Get the maximum number of retransmits +int maxRetransmits = dataChannel.getMaxRetransmits(); + +// Get the sub-protocol +String protocol = dataChannel.getProtocol(); + +// Check if the channel was negotiated by the application +boolean negotiated = dataChannel.isNegotiated(); + +// Get the channel ID +int id = dataChannel.getId(); + +// Get the current state +RTCDataChannelState state = dataChannel.getState(); + +// Get the amount of buffered data (in bytes) +long bufferedAmount = dataChannel.getBufferedAmount(); +``` + +## Closing and Cleanup + +When you're done with a data channel, you should properly clean it up: + +```java +// Unregister the observer +dataChannel.unregisterObserver(); + +// Close the data channel +dataChannel.close(); + +// Dispose of native resources +dataChannel.dispose(); +``` + +## Best Practices + +1. **Error Handling**: Always wrap `send` calls in try-catch blocks as they can throw exceptions if the buffer is full or the channel is not in the OPEN state. + +2. **Buffer Management**: Monitor the buffered amount to avoid overwhelming the channel. If `getBufferedAmount()` returns a large value, consider pausing sending until it decreases. + +3. **Copy Received Data**: Remember that the data in the `RTCDataChannelBuffer` will be freed after the `onMessage` method returns. If you need to process the data asynchronously, make a copy of it. + +4. **Proper Cleanup**: Always unregister observers, close channels, and dispose of native resources to prevent memory leaks. + +5. **State Checking**: Check the channel state before sending data to avoid exceptions: + ```java + if (dataChannel.getState() == RTCDataChannelState.OPEN) { + // Safe to send data + } + ``` + +6. **Multiple Channels**: Consider using multiple data channels with different configurations for different types of data (e.g., one reliable channel for critical data and one unreliable channel for real-time updates). + +7. **Binary vs. Text**: Use the appropriate flag when creating `RTCDataChannelBuffer` objects: + - `false` for UTF-8 text data + - `true` for binary data + +--- + +## Conclusion + +WebRTC data channels provide a powerful way to establish peer-to-peer communication for transferring arbitrary data between clients. +Data channels complement WebRTC's audio and video capabilities, making it possible to build comprehensive real-time applications that include text chat, file transfers, game state synchronization, and other custom data exchange requirements. + +For optimal performance, remember to follow the best practices outlined in this guide, particularly regarding buffer management and proper cleanup of resources. + +For more information on other WebRTC features, refer to the additional guides in the documentation. \ No newline at end of file diff --git a/docs/guide/desktop_capture.md b/docs/guide/desktop_capture.md new file mode 100644 index 00000000..9e04d42c --- /dev/null +++ b/docs/guide/desktop_capture.md @@ -0,0 +1,172 @@ +# Desktop Video Track + +This guide focuses on setting up a peer connection with desktop video capture capabilities, which allows you to capture and stream content from your screens or application windows in your WebRTC connection. + +## Desktop Video Source Selection + +To enable desktop video capture, you need to: + +1. List available desktop sources (screens and windows) +2. Create and configure a VideoDesktopSource +3. Create a video track with the desktop source +4. Add the track to your peer connection + +### Listing Available Desktop Sources + +The library provides classes to list available screens and windows: + +```java +import dev.onvoid.webrtc.media.video.desktop.DesktopSource; +import dev.onvoid.webrtc.media.video.desktop.ScreenCapturer; +import dev.onvoid.webrtc.media.video.desktop.WindowCapturer; +import java.util.List; + +// Get available screens +ScreenCapturer screenCapturer = new ScreenCapturer(); +List screens = screenCapturer.getDesktopSources(); +System.out.println("Available screens:"); +for (DesktopSource screen : screens) { + System.out.printf(" Screen: %s (ID: %d)%n", screen.title, screen.id); +} + +// Get available windows +WindowCapturer windowCapturer = new WindowCapturer(); +List windows = windowCapturer.getDesktopSources(); +System.out.println("Available windows:"); +for (DesktopSource window : windows) { + System.out.printf(" Window: %s (ID: %d)%n", window.title, window.id); +} + +// Clean up the capturers after use +screenCapturer.dispose(); +windowCapturer.dispose(); +``` + +### Creating and Configuring a VideoDesktopSource + +The `VideoDesktopSource` class allows you to capture video from a desktop source: + +```java +import dev.onvoid.webrtc.media.video.VideoDesktopSource; + +// Create a desktop video source +VideoDesktopSource videoSource = new VideoDesktopSource(); + +// Configure the desktop video source +// Set frame rate (e.g., 30 fps) +videoSource.setFrameRate(30); + +// Set maximum frame size (e.g., 1920x1080) +videoSource.setMaxFrameSize(1920, 1080); + +// Select a specific source to capture +// For a screen (isWindow = false) +videoSource.setSourceId(screenId, false); +// OR for a window (isWindow = true) +videoSource.setSourceId(windowId, true); + +// Start capturing +videoSource.start(); +``` + +### Creating a Video Track with the Desktop Source + +Once you have configured your desktop video source, you can create a video track: + +```java +import dev.onvoid.webrtc.PeerConnectionFactory; +import dev.onvoid.webrtc.media.video.VideoTrack; + +// Create a PeerConnectionFactory +PeerConnectionFactory factory = new PeerConnectionFactory(); + +// Create a video track with the desktop source +VideoTrack videoTrack = factory.createVideoTrack("video0", videoSource); +``` + +### Adding the Track to a Peer Connection + +Add the video track to your peer connection: + +```java +import java.util.ArrayList; +import java.util.List; +import dev.onvoid.webrtc.RTCPeerConnection; + +// Assuming you already have a configured RTCPeerConnection +RTCPeerConnection peerConnection = factory.createPeerConnection(config, observer); + +// Add the track to the peer connection +List streamIds = new ArrayList<>(); +streamIds.add("stream1"); +peerConnection.addTrack(videoTrack, streamIds); +``` + +## Additional Features + +The `VideoDesktopSource` provides additional methods for controlling the desktop capture: + +### Source Selection + +You can change the capture source at runtime: + +```java +// Switch to a different screen +videoSource.setSourceId(newScreenId, false); + +// Switch to a window +videoSource.setSourceId(windowId, true); +``` + +### Capture Configuration + +You can adjust capture settings: + +```java +// Change frame rate +videoSource.setFrameRate(15); // Set to 15 fps + +// Change maximum resolution +videoSource.setMaxFrameSize(1280, 720); // Set to 720p +``` + +### Resource Management + +Always properly dispose of resources when done: + +```java +// Dispose of resources when done +videoSource.stop(); +videoSource.dispose(); +``` + +### Handling Source Changes + +If desktop sources might change during your application's lifecycle (e.g., new windows opening or screens connecting), you should periodically refresh the source list: + +```java +// Refresh the list of available sources +ScreenCapturer screenCapturer = new ScreenCapturer(); +List updatedScreens = screenCapturer.getDesktopSources(); + +WindowCapturer windowCapturer = new WindowCapturer(); +List updatedWindows = windowCapturer.getDesktopSources(); + +// Clean up +screenCapturer.dispose(); +windowCapturer.dispose(); +``` + +--- + +## Conclusion + +This guide has demonstrated how to set up a WebRTC peer connection with desktop video capture capabilities. +When implementing desktop capture in your application, remember to: + +- Always dispose of resources properly to prevent memory leaks +- Periodically refresh source lists to handle dynamic changes in available screens and windows +- Consider the performance implications of higher resolutions and frame rates +- Handle potential permission requirements on different operating systems + +Desktop capture is particularly useful for screen sharing applications, remote assistance tools, collaborative workspaces, and educational platforms where visual content sharing is essential. \ No newline at end of file diff --git a/docs/guide/logging.md b/docs/guide/logging.md new file mode 100644 index 00000000..de0eac3a --- /dev/null +++ b/docs/guide/logging.md @@ -0,0 +1,137 @@ +# Logging + +This guide explains how to use the logging capabilities. The library provides native WebRTC logging through the `dev.onvoid.webrtc.logging.Logging` class. + +## Native WebRTC Logging + +The `Logging` class provides access to the native WebRTC logging system, allowing you to: +- Log messages at different severity levels +- Configure logging behavior +- Implement custom log sinks to capture and process log messages + +### Severity Levels + +The `Logging` class defines the following severity levels (in order of increasing severity): + +| Level | Description | +|-------|-------------| +| `VERBOSE` | For data which should not appear in the normal debug log, but should appear in diagnostic logs | +| `INFO` | Used in debugging | +| `WARNING` | Something that may warrant investigation | +| `ERROR` | A critical error has occurred | +| `NONE` | Do not log | + +### Basic Logging + +The `Logging` class provides several methods for logging messages: + +```java +import dev.onvoid.webrtc.logging.Logging; + +// Log messages with different severity levels +Logging.verbose("Detailed information for diagnostic purposes"); +Logging.info("General information about application operation"); +Logging.warn("Potential issue that might need attention"); +Logging.error("Critical error that affects application operation"); + +// Log an error with exception details +try { + // Some operation that might throw an exception +} catch (Exception e) { + Logging.error("Failed to perform operation", e); +} +``` + +### Configuring Logging Behavior + +You can configure various aspects of the logging system: + +```java +// Configure logging to debug output with minimum severity level +Logging.logToDebug(Logging.Severity.INFO); + +// Enable/disable thread ID in log messages +Logging.logThreads(true); + +// Enable/disable timestamps in log messages +Logging.logTimestamps(true); +``` + +### Custom Log Sinks + +You can implement custom log handlers by creating a class that implements the `LogSink` interface: + +```java +import dev.onvoid.webrtc.logging.LogSink; +import dev.onvoid.webrtc.logging.Logging; +import dev.onvoid.webrtc.logging.Logging.Severity; + +public class CustomLogSink implements LogSink { + + @Override + public void onLogMessage(Severity severity, String message) { + // Process log messages as needed + // For example, write to a file, send to a server, or display in UI + System.out.println("[" + severity + "] " + message); + } +} +``` + +Register your custom log sink to receive log messages: + +```java +// Create and register a log sink for messages with INFO severity or higher +CustomLogSink logSink = new CustomLogSink(); +Logging.addLogSink(Logging.Severity.INFO, logSink); +``` + +The log sink will receive all log messages with a severity level equal to or higher than the specified minimum level. + +## Integration with Other Logging Frameworks + +If you're using a different logging framework like Log4j or SLF4J, you can create a bridge by implementing a custom `LogSink` that forwards messages to your preferred logging system: + +```java +import dev.onvoid.webrtc.logging.LogSink; +import dev.onvoid.webrtc.logging.Logging; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Slf4jLogSink implements LogSink { + + private static final Logger logger = LoggerFactory.getLogger("WebRTC"); + + @Override + public void onLogMessage(Logging.Severity severity, String message) { + switch (severity) { + case VERBOSE: + logger.trace(message); + break; + case INFO: + logger.info(message); + break; + case WARNING: + logger.warn(message); + break; + case ERROR: + logger.error(message); + break; + default: + // Do nothing for NONE + } + } +} +``` + +Then register this sink with the WebRTC logging system: + +```java +Logging.addLogSink(Logging.Severity.VERBOSE, new Slf4jLogSink()); +``` + +This approach allows you to integrate WebRTC's native logging with your application's existing logging infrastructure. + +--- + +## Conclusion +By using the provided methods and custom log sinks, you can effectively capture, process, and manage log messages to aid in debugging and monitoring your application. Whether you choose to use the native logging capabilities or integrate with existing logging frameworks, the WebRTC logging system is designed to be adaptable to your needs. \ No newline at end of file diff --git a/docs/guide/media_devices.md b/docs/guide/media_devices.md new file mode 100644 index 00000000..7957a401 --- /dev/null +++ b/docs/guide/media_devices.md @@ -0,0 +1,179 @@ +# Media Devices + +This guide explains how to work with media devices (microphones, speakers, and cameras). It covers: + +- Querying available media devices +- Getting device capabilities +- Listening for device hotplug events + +Cameras and microphones play a key role in WebRTC. In a more complex application, you will most likely want to check all the connected cameras and microphones and select the appropriate device for the WebRTC session. These devices can be enumerated with the class `MediaDevices`. With `MediaDevices` you can also listen for device changes, whenever a device is connected or disconnected. + +## Querying Media Devices + +The `MediaDevices` class provides methods to query all available media devices connected to the system. + +### Audio Capture Devices (Microphones) + +To get a list of all available microphones: + +```java +// Import required classes +import dev.onvoid.webrtc.media.MediaDevices; +import dev.onvoid.webrtc.media.audio.AudioDevice; +import java.util.List; + +// Get all microphones +List microphones = MediaDevices.getAudioCaptureDevices(); + +// Print microphone details +for (AudioDevice microphone : microphones) { + System.out.println("Microphone: " + microphone.getName()); + System.out.println(" Descriptor: " + microphone.getDescriptor()); +} + +// Get the default microphone +AudioDevice defaultMicrophone = MediaDevices.getDefaultAudioCaptureDevice(); +if (defaultMicrophone != null) { + System.out.println("Default Microphone: " + defaultMicrophone.getName()); +} +``` + +### Audio Render Devices (Speakers) + +To get a list of all available speakers: + +```java +// Import required classes +import dev.onvoid.webrtc.media.MediaDevices; +import dev.onvoid.webrtc.media.audio.AudioDevice; +import java.util.List; + +// Get all speakers +List speakers = MediaDevices.getAudioRenderDevices(); + +// Print speaker details +for (AudioDevice speaker : speakers) { + System.out.println("Speaker: " + speaker.getName()); + System.out.println(" Descriptor: " + speaker.getDescriptor()); +} + +// Get the default speaker +AudioDevice defaultSpeaker = MediaDevices.getDefaultAudioRenderDevice(); +if (defaultSpeaker != null) { + System.out.println("Default Speaker: " + defaultSpeaker.getName()); +} +``` + +### Video Capture Devices (Cameras) + +To get a list of all available cameras: + +```java +// Import required classes +import dev.onvoid.webrtc.media.MediaDevices; +import dev.onvoid.webrtc.media.video.VideoDevice; +import java.util.List; + +// Get all cameras +List cameras = MediaDevices.getVideoCaptureDevices(); + +// Print camera details +for (VideoDevice camera : cameras) { + System.out.println("Camera: " + camera.getName()); + System.out.println(" Descriptor: " + camera.getDescriptor()); +} +``` + +## Camera Capabilities + +You can query the capabilities of a specific camera to determine the supported resolutions and frame rates: + +```java +// Import required classes +import dev.onvoid.webrtc.media.MediaDevices; +import dev.onvoid.webrtc.media.video.VideoDevice; +import dev.onvoid.webrtc.media.video.VideoCaptureCapability; +import java.util.List; + +// Get all cameras +List cameras = MediaDevices.getVideoCaptureDevices(); +if (cameras.isEmpty()) { + System.out.println("No cameras found"); + return; +} + +// Get the first camera +VideoDevice camera = cameras.get(0); +System.out.println("Camera: " + camera.getName()); + +// Get camera capabilities +List capabilities = MediaDevices.getVideoCaptureCapabilities(camera); + +// Print camera capabilities +for (VideoCaptureCapability capability : capabilities) { + System.out.println(" Resolution: " + capability.width + "x" + capability.height); + System.out.println(" Frame Rate: " + capability.frameRate + " fps"); +} +``` + +## Hotplug Events + +You can listen for device hotplug events to be notified when devices are connected or disconnected: + +```java +// Import required classes +import dev.onvoid.webrtc.media.Device; +import dev.onvoid.webrtc.media.DeviceChangeListener; +import dev.onvoid.webrtc.media.MediaDevices; +import dev.onvoid.webrtc.media.audio.AudioDevice; +import dev.onvoid.webrtc.media.video.VideoDevice; + +// Create a device change listener +DeviceChangeListener listener = new DeviceChangeListener() { + @Override + public void deviceConnected(Device device) { + System.out.println("Device connected: " + device.getName()); + + if (device instanceof AudioDevice) { + System.out.println(" Type: Audio Device"); + } + else if (device instanceof VideoDevice) { + System.out.println(" Type: Video Device"); + } + } + + @Override + public void deviceDisconnected(Device device) { + System.out.println("Device disconnected: " + device.getName()); + + if (device instanceof AudioDevice) { + System.out.println(" Type: Audio Device"); + } + else if (device instanceof VideoDevice) { + System.out.println(" Type: Video Device"); + } + } +}; + +// Register the listener +MediaDevices.addDeviceChangeListener(listener); + +// ... later, when you're done listening for events +// Unregister the listener +MediaDevices.removeDeviceChangeListener(listener); +``` + +--- + +## Conclusion + +This guide has provided a comprehensive overview of working with media devices in the WebRTC library. +When implementing media device handling in your applications, consider these best practices: + +- Always check if devices exist before trying to use them +- Handle the case where no devices are available gracefully +- Provide users with the ability to select from available devices +- Implement hotplug listeners to dynamically update available devices +- Remember to unregister device listeners when they're no longer needed + +You can use the code examples in this guide as a starting point for applications that need to work with media devices in WebRTC scenarios like video conferencing, live streaming, or media recording. \ No newline at end of file diff --git a/docs/guide/overview.md b/docs/guide/overview.md new file mode 100644 index 00000000..457fad48 --- /dev/null +++ b/docs/guide/overview.md @@ -0,0 +1,23 @@ +# Guides + +This section provides detailed guides for various features of the webrtc-java library. + +## Media Guides + +- [Media Devices](guide/media_devices.md) - Working with audio and video devices +- [Audio Device Selection](guide/audio_devices.md) - Selecting and configuring audio devices +- [Bitrate and Framerate Constraints](guide/constraints.md) - Controlling media quality +- [Desktop Capture](guide/desktop_capture.md) - Capturing and sharing screens and windows + +## Data Communication + +- [Data Channels](guide/data_channels.md) - Sending and receiving arbitrary data between peers + +## Monitoring and Debugging + +- [RTC Stats](guide/rtc_stats.md) - Monitoring connection quality and performance +- [Logging](guide/logging.md) - Configuring and using the logging system + +## Additional Resources + +For a complete API reference, check the [JavaDoc](https://javadoc.io/doc/dev.onvoid.webrtc/webrtc-java/latest/index.html). \ No newline at end of file diff --git a/docs/guide/rtc_stats.md b/docs/guide/rtc_stats.md new file mode 100644 index 00000000..46e97b9d --- /dev/null +++ b/docs/guide/rtc_stats.md @@ -0,0 +1,397 @@ +# RTC Stats + +This guide explains how to use WebRTC statistics (RTC Stats) with the webrtc-java library. RTC Stats provide detailed metrics and information about the state and performance of your WebRTC connections. + +## Overview + +WebRTC statistics allow you to: +- Monitor the quality of audio and video streams +- Track network performance metrics +- Diagnose connection issues +- Gather information about codecs, candidates, and data channels +- Analyze media source characteristics + +Statistics are collected through the `RTCPeerConnection` and are delivered asynchronously via callback. + +## Understanding RTC Stats Classes + +The webrtc-java library provides several classes for working with statistics: + +### RTCStats + +The `RTCStats` class represents statistics for a specific monitored object at a specific moment in time. Each `RTCStats` object contains: + +- `timestamp`: When the stats were collected (in microseconds since UNIX epoch) +- `type`: An enum value indicating the type of stats (from `RTCStatsType`) +- `id`: A unique identifier for the object that was inspected +- `attributes`: A map of key-value pairs containing the actual statistics data + +### RTCStatsReport + +The `RTCStatsReport` class contains a collection of `RTCStats` objects gathered at the same time. It provides: + +- A map of `RTCStats` objects, accessible via the `getStats()` method +- A timestamp indicating when the report was generated + +### RTCStatsType + +The `RTCStatsType` enum defines the different types of statistics that can be collected: + +```java +public enum RTCStatsType { + CODEC, // Codec statistics + INBOUND_RTP, // Incoming RTP stream statistics + OUTBOUND_RTP, // Outgoing RTP stream statistics + REMOTE_INBOUND_RTP, // Remote endpoint's incoming RTP statistics + REMOTE_OUTBOUND_RTP, // Remote endpoint's outgoing RTP statistics + MEDIA_SOURCE, // Media source statistics + CSRC, // Contributing source statistics + PEER_CONNECTION, // Peer connection statistics + DATA_CHANNEL, // Data channel statistics + STREAM, // MediaStream statistics + TRACK, // MediaStreamTrack statistics + SENDER, // RTP sender statistics + RECEIVER, // RTP receiver statistics + TRANSPORT, // Transport statistics + CANDIDATE_PAIR, // ICE candidate pair statistics + LOCAL_CANDIDATE, // Local ICE candidate statistics + REMOTE_CANDIDATE, // Remote ICE candidate statistics + CERTIFICATE, // Certificate statistics + ICE_SERVER // ICE server statistics +} +``` + +## Collecting Statistics + +### Getting Stats for the Entire Connection + +To collect statistics for the entire peer connection: + +```java +import dev.onvoid.webrtc.RTCPeerConnection; +import dev.onvoid.webrtc.RTCStats; +import dev.onvoid.webrtc.RTCStatsReport; +import dev.onvoid.webrtc.RTCStatsCollectorCallback; + +// Assuming you already have an RTCPeerConnection +RTCPeerConnection peerConnection = /* your peer connection */; + +// Request statistics +peerConnection.getStats(new RTCStatsCollectorCallback() { + @Override + public void onStatsDelivered(RTCStatsReport report) { + // Process the stats report + System.out.println("Stats collected at: " + report.getTimestamp()); + + // Access all stats in the report + Map stats = report.getStats(); + System.out.println("Number of stats objects: " + stats.size()); + + // Process individual stats objects + for (RTCStats stat : stats.values()) { + System.out.println("Stat type: " + stat.getType()); + System.out.println("Stat ID: " + stat.getId()); + + // Access the attributes + Map attributes = stat.getAttributes(); + for (Map.Entry entry : attributes.entrySet()) { + System.out.println(entry.getKey() + ": " + entry.getValue()); + } + } + } +}); +``` + +You can also use a lambda expression for more concise code: + +```java +peerConnection.getStats(report -> { + // Process the stats report + System.out.println("Stats report received with " + report.getStats().size() + " stats objects"); +}); +``` + +### Getting Stats for a Specific Sender or Receiver + +You can also collect statistics for a specific RTP sender or receiver: + +```java +// For a specific sender +RTCRtpSender sender = /* your RTP sender */; +peerConnection.getStats(sender, report -> { + // Process sender stats + System.out.println("Sender stats received"); +}); + +// For a specific receiver +RTCRtpReceiver receiver = /* your RTP receiver */; +peerConnection.getStats(receiver, report -> { + // Process receiver stats + System.out.println("Receiver stats received"); +}); +``` + +## Working with Specific Stat Types + +Different stat types contain different attributes. Here are examples of how to work with some common stat types: + +### Inbound RTP Statistics + +```java +peerConnection.getStats(report -> { + for (RTCStats stats : report.getStats().values()) { + if (stats.getType() == RTCStatsType.INBOUND_RTP) { + Map attributes = stats.getAttributes(); + + // Access common inbound RTP attributes + Long packetsReceived = (Long) attributes.get("packetsReceived"); + Long bytesReceived = (Long) attributes.get("bytesReceived"); + Double jitter = (Double) attributes.get("jitter"); + Long packetsLost = (Long) attributes.get("packetsLost"); + + System.out.println("Inbound RTP Stats:"); + System.out.println("Packets received: " + packetsReceived); + System.out.println("Bytes received: " + bytesReceived); + System.out.println("Jitter (seconds): " + jitter); + System.out.println("Packets lost: " + packetsLost); + } + } +}); +``` + +### Outbound RTP Statistics + +```java +peerConnection.getStats(report -> { + for (RTCStats stats : report.getStats().values()) { + if (stats.getType() == RTCStatsType.OUTBOUND_RTP) { + Map attributes = stats.getAttributes(); + + // Access common outbound RTP attributes + Long packetsSent = (Long) attributes.get("packetsSent"); + Long bytesSent = (Long) attributes.get("bytesSent"); + + System.out.println("Outbound RTP Stats:"); + System.out.println("Packets sent: " + packetsSent); + System.out.println("Bytes sent: " + bytesSent); + } + } +}); +``` + +### Candidate Pair Statistics + +```java +peerConnection.getStats(report -> { + for (RTCStats stats : report.getStats().values()) { + if (stats.getType() == RTCStatsType.CANDIDATE_PAIR) { + Map attributes = stats.getAttributes(); + + // Access common candidate pair attributes + Boolean nominated = (Boolean) attributes.get("nominated"); + String state = (String) attributes.get("state"); + Long bytesSent = (Long) attributes.get("bytesSent"); + Long bytesReceived = (Long) attributes.get("bytesReceived"); + Double currentRoundTripTime = (Double) attributes.get("currentRoundTripTime"); + + System.out.println("ICE Candidate Pair Stats:"); + System.out.println("Nominated: " + nominated); + System.out.println("State: " + state); + System.out.println("Bytes sent: " + bytesSent); + System.out.println("Bytes received: " + bytesReceived); + System.out.println("Current RTT (seconds): " + currentRoundTripTime); + } + } +}); +``` + +## Monitoring Connection Quality + +You can periodically collect statistics to monitor the quality of your WebRTC connection: + +```java +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +// Create a scheduler +ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor(); + +// Schedule periodic stats collection +scheduler.scheduleAtFixedRate(() -> { + peerConnection.getStats(report -> { + // Process and analyze stats + analyzeConnectionQuality(report); + }); +}, 0, 2, TimeUnit.SECONDS); // Collect stats every 2 seconds + +// Example method to analyze connection quality +private void analyzeConnectionQuality(RTCStatsReport report) { + // Track packet loss + Long totalPacketsLost = 0L; + Long totalPacketsReceived = 0L; + + // Track jitter + Double maxJitter = 0.0; + + // Track round-trip time + Double currentRtt = 0.0; + + for (RTCStats stats : report.getStats().values()) { + Map attributes = stats.getAttributes(); + + if (stats.getType() == RTCStatsType.INBOUND_RTP) { + Long packetsLost = (Long) attributes.get("packetsLost"); + Long packetsReceived = (Long) attributes.get("packetsReceived"); + Double jitter = (Double) attributes.get("jitter"); + + if (packetsLost != null) totalPacketsLost += packetsLost; + if (packetsReceived != null) totalPacketsReceived += packetsReceived; + if (jitter != null && jitter > maxJitter) maxJitter = jitter; + } + else if (stats.getType() == RTCStatsType.CANDIDATE_PAIR) { + Double rtt = (Double) attributes.get("currentRoundTripTime"); + if (rtt != null) currentRtt = rtt; + } + } + + // Calculate packet loss percentage + double packetLossPercent = 0; + if (totalPacketsReceived + totalPacketsLost > 0) { + packetLossPercent = (totalPacketsLost * 100.0) / (totalPacketsReceived + totalPacketsLost); + } + + // Log or display the quality metrics + System.out.println("Connection Quality Metrics:"); + System.out.println("Packet Loss: " + String.format("%.2f%%", packetLossPercent)); + System.out.println("Max Jitter: " + String.format("%.2f ms", maxJitter * 1000)); + System.out.println("Round-Trip Time: " + String.format("%.2f ms", currentRtt * 1000)); + + // Determine overall quality + String qualityRating; + if (packetLossPercent < 1 && maxJitter < 0.030 && currentRtt < 0.100) { + qualityRating = "Excellent"; + } else if (packetLossPercent < 3 && maxJitter < 0.050 && currentRtt < 0.200) { + qualityRating = "Good"; + } else if (packetLossPercent < 8 && maxJitter < 0.100 && currentRtt < 0.300) { + qualityRating = "Fair"; + } else { + qualityRating = "Poor"; + } + + System.out.println("Overall Quality: " + qualityRating); +} + +// Don't forget to shut down the scheduler when done +// scheduler.shutdown(); +``` + +## Handling Asynchronous Stats Collection + +Since stats are collected asynchronously, you might need to coordinate with other operations. Here's an example using a CountDownLatch: + +```java +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +public RTCStatsReport collectStatsSync(RTCPeerConnection peerConnection, long timeoutMs) throws InterruptedException { + CountDownLatch latch = new CountDownLatch(1); + AtomicReference reportRef = new AtomicReference<>(); + + peerConnection.getStats(report -> { + reportRef.set(report); + latch.countDown(); + }); + + // Wait for stats to be delivered or timeout + if (!latch.await(timeoutMs, TimeUnit.MILLISECONDS)) { + throw new RuntimeException("Timed out waiting for stats"); + } + + return reportRef.get(); +} + +// Usage +try { + RTCStatsReport report = collectStatsSync(peerConnection, 5000); + // Process the report +} catch (InterruptedException e) { + System.err.println("Stats collection was interrupted: " + e.getMessage()); +} catch (RuntimeException e) { + System.err.println("Stats collection failed: " + e.getMessage()); +} +``` + +## Best Practices + +1. **Don't Collect Too Frequently**: Collecting stats is resource-intensive. For most applications, collecting stats every 1-2 seconds is sufficient. + +2. **Handle Null Values**: Some attributes might be null or missing depending on the state of the connection and the browser implementation. Always check for null values before using them. + +3. **Type Casting**: The attributes in the stats objects are returned as generic Objects. You need to cast them to the appropriate type (Boolean, Long, Double, String, etc.) before using them. + +4. **Trend Analysis**: Individual stats snapshots are useful, but tracking trends over time provides more valuable insights. Consider storing historical data to analyze trends. + +5. **Focus on Relevant Stats**: Depending on your use case, focus on the most relevant stats: + - For video quality: frame rate, resolution, packets lost + - For audio quality: jitter, packets lost + - For network performance: round-trip time, bandwidth + +6. **Correlation**: Correlate stats with user experience. For example, if users report poor quality, check the stats during that time to identify potential issues. + +7. **Logging**: Log stats periodically and especially when issues occur to help with debugging. + +## Common Attributes by Stat Type + +Different stat types have different attributes. Here are some common attributes for each type: + +### INBOUND_RTP +- packetsReceived +- bytesReceived +- packetsLost +- jitter +- framesDecoded (video) +- framesDropped (video) +- audioLevel (audio) + +### OUTBOUND_RTP +- packetsSent +- bytesSent +- retransmittedPacketsSent +- framesSent (video) +- framesEncoded (video) +- targetBitrate + +### CANDIDATE_PAIR +- nominated +- state +- bytesSent +- bytesReceived +- currentRoundTripTime +- availableOutgoingBitrate +- availableIncomingBitrate + +### TRANSPORT +- bytesSent +- bytesReceived +- dtlsState +- selectedCandidatePairId + +### MEDIA_SOURCE +- trackIdentifier +- kind +- audioLevel (audio) +- totalAudioEnergy (audio) +- width (video) +- height (video) +- frames (video) +- framesPerSecond (video) + +Remember that the available attributes may vary depending on the state of the connection. + +--- + +## Conclusion + +WebRTC statistics provide essential insights into the performance and health of your real-time communications. By leveraging the RTC Stats API in webrtc-java, you can monitor connection quality, diagnose issues, and optimize your application's performance. The ability to collect detailed metrics on packets, jitter, latency, and more allows you to make data-driven decisions. \ No newline at end of file diff --git a/docs/index.html b/docs/index.html new file mode 100644 index 00000000..d46428e6 --- /dev/null +++ b/docs/index.html @@ -0,0 +1,64 @@ + + + + + webrtc-java - Java native interface for WebRTC + + + + + + + + +
+ + + + + + + + + + diff --git a/docs/quickstart.md b/docs/quickstart.md new file mode 100644 index 00000000..415dd3aa --- /dev/null +++ b/docs/quickstart.md @@ -0,0 +1,292 @@ +# Quickstart Guide + +This guide will help you get started with webrtc-java quickly. We'll cover installation, basic setup, and simple examples to demonstrate core functionality. + +## Supported Platforms +Maven Central artifacts contain native libraries that can be loaded on the following platforms: + + + + + + + + + + + + + + + + + + + + + + + + + + +
x64armarm64
Linux✔ armeabi-v7a✔ arm64-v8a
macOS-
Windows--
+ +## Installation + +### Maven + +Add the following dependency to your `pom.xml`: + +```xml + + dev.onvoid.webrtc + webrtc-java + {{VERSION}} + +``` + +#### Using SNAPSHOT Versions + +If you want to use the latest development version, you can use a SNAPSHOT release: + +```xml + + dev.onvoid.webrtc + webrtc-java + {{VERSION_SNAPSHOT}} + +``` + +To use SNAPSHOT versions, you need to add the following repository configuration to your `pom.xml`: + +```xml + + + Central Portal Snapshots + central-portal-snapshots + https://central.sonatype.com/repository/maven-snapshots/ + + false + + + true + + + +``` + +### Gradle + +Add the following to your `build.gradle`: + +```gradle +implementation "dev.onvoid.webrtc:webrtc-java:{{VERSION}}" +``` + +For specific platforms, add the appropriate classifier: + +```gradle +implementation group: "dev.onvoid.webrtc", name: "webrtc-java", version: "{{VERSION}}", classifier: "windows-x86_64" +implementation group: "dev.onvoid.webrtc", name: "webrtc-java", version: "{{VERSION}}", classifier: "macos-x86_64" +implementation group: "dev.onvoid.webrtc", name: "webrtc-java", version: "{{VERSION}}", classifier: "macos-aarch64" +implementation group: "dev.onvoid.webrtc", name: "webrtc-java", version: "{{VERSION}}", classifier: "linux-x86_64" +implementation group: "dev.onvoid.webrtc", name: "webrtc-java", version: "{{VERSION}}", classifier: "linux-aarch64" +implementation group: "dev.onvoid.webrtc", name: "webrtc-java", version: "{{VERSION}}", classifier: "linux-aarch32" +``` + +## Basic Setup + +### Initialize the WebRTC Library + +First, create a `PeerConnectionFactory` which is the entry point for most WebRTC operations: + +```java +import dev.onvoid.webrtc.PeerConnectionFactory; + +// Create a peer connection factory +PeerConnectionFactory factory = new PeerConnectionFactory(); +``` + +### Create a Peer Connection + +To establish communication with another peer, you need to create a `RTCPeerConnection`: + +```java +import dev.onvoid.webrtc.RTCConfiguration; +import dev.onvoid.webrtc.RTCIceServer; +import dev.onvoid.webrtc.RTCPeerConnection; +import dev.onvoid.webrtc.PeerConnectionObserver; + +// Configure ICE servers (STUN/TURN) +RTCConfiguration config = new RTCConfiguration(); +RTCIceServer iceServer = new RTCIceServer(); +iceServer.urls.add("stun:stun.l.google.com:19302"); +config.iceServers.add(iceServer); + +// Create a peer connection with an observer to handle events +RTCPeerConnection peerConnection = factory.createPeerConnection(config, new PeerConnectionObserver() { + // Implement required methods +}); +``` + +## Signaling + +WebRTC requires a signaling mechanism to exchange connection information between peers. The library doesn't provide this, so you'll need to implement it using your preferred method (WebSockets, HTTP, etc.). + +Here's a simplified example of the signaling process: + +```java +// Create an offer +RTCOfferOptions options = new RTCOfferOptions(); + +peerConnection.createOffer(options, new CreateSessionDescriptionObserver() { + @Override + public void onSuccess(RTCSessionDescription description) { + // Set local description + peerConnection.setLocalDescription(description, new SetSessionDescriptionObserver() { + @Override + public void onSuccess() { + // Send the offer to the remote peer via your signaling channel + signalingChannel.send(description); + } + + @Override + public void onFailure(String error) { + System.err.println("Failed to set local description: " + error); + } + }); + } + + @Override + public void onFailure(String error) { + System.err.println("Failed to create offer: " + error); + } +}); + +// When you receive an answer from the remote peer via your signaling channel +signalingChannel.onMessage(message -> { + RTCSessionDescription remoteDescription = parseSessionDescription(message); + peerConnection.setRemoteDescription(remoteDescription, new SetSessionDescriptionObserver() { + @Override + public void onSuccess() { + System.out.println("Remote description set successfully"); + } + + @Override + public void onFailure(String error) { + System.err.println("Failed to set remote description: " + error); + } + }); +}); + +// Handle ICE candidates +PeerConnectionObserver connectionObserver = new PeerConnectionObserver() { + @Override + public void onIceCandidate(RTCIceCandidate candidate) { + // Send the ICE candidate to the remote peer via your signaling channel + signalingChannel.send(candidate); + } +}; + +// When you receive an ICE candidate from the remote peer +signalingChannel.onIceCandidate(candidateMessage -> { + RTCIceCandidate candidate = parseIceCandidate(candidateMessage); + peerConnection.addIceCandidate(candidate); +}); +``` + +## Media Streams + +### Accessing Media Devices + +To query media devices (cameras and microphones): + +```java +import dev.onvoid.webrtc.media.MediaDevices; +import dev.onvoid.webrtc.media.video.VideoDevice; +import dev.onvoid.webrtc.media.audio.AudioDevice; + +// Get available video devices +List videoDevices = MediaDevices.getVideoCaptureDevices(); +for (VideoDevice device : videoDevices) { + System.out.println("Video device: " + device.getName()); +} + +// Get available audio devices +List audioDevices = MediaDevices.getAudioCaptureDevices(); +for (AudioDevice device : audioDevices) { + System.out.println("Audio device: " + device.getName()); +} +``` + +### Creating Media Tracks + +To create audio and video tracks: + +```java +import dev.onvoid.webrtc.media.video.VideoTrack; +import dev.onvoid.webrtc.media.audio.AudioTrack; +import dev.onvoid.webrtc.media.video.VideoDeviceSource; +import dev.onvoid.webrtc.media.audio.AudioDeviceSource; + +// Create a video source and track +VideoDeviceSource videoSource = new VideoDeviceSource(); +videoSource.setVideoCaptureDevice(videoDevices.get(0)); // Use the first available camera +VideoTrack videoTrack = factory.createVideoTrack("video0", videoSource); + +// Create an audio source and track +AudioOptions audioOptions = new AudioOptions(); +audioOptions.echoCancellation = true; +audioOptions.autoGainControl = true; +audioOptions.noiseSuppression = true; + +// Create an audio source using the default audio device +AudioTrackSource audioSource = factory.createAudioSource(audioOptions); +AudioTrack audioTrack = factory.createAudioTrack("audio0", audioSource); +``` + +> Audio devices can be specified via the `AudioDeviceModule`, see the guide on [Audio Devices](guide/audio_devices) for more details. + +### Adding Tracks to Peer Connection + +```java +import java.util.ArrayList; +import java.util.List; + +// Add tracks to the peer connection +List streamIds = new ArrayList<>(); +streamIds.add("stream1"); +peerConnection.addTrack(videoTrack, streamIds); +peerConnection.addTrack(audioTrack, streamIds); +``` + +## Cleanup + +Always properly dispose of resources when you're done: + +```java +// Dispose of tracks +videoTrack.dispose(); +audioTrack.dispose(); + +// Dispose of sources +videoSource.dispose(); + +// Close peer connection and release resources +peerConnection.close(); + +// Dispose of factory +factory.dispose(); +``` + +## Next Steps + +Now that you have a basic understanding of webrtc-java, you can explore more advanced features: + +- [Data Channels](guide/data_channels) - Learn more about data channels +- [RTC Stats](guide/rtc_stats) - Monitor connection quality +- [Desktop Capture](guide/desktop_capture) - Share screens and windows +- [All Guides](guide) - Complete list of guides + +For a complete API reference, check the [JavaDoc](https://javadoc.io/doc/dev.onvoid.webrtc/webrtc-java/latest/index.html). \ No newline at end of file diff --git a/pom.xml b/pom.xml index d2214301..24fac10a 100644 --- a/pom.xml +++ b/pom.xml @@ -52,6 +52,7 @@ webrtc-jni webrtc + webrtc-examples @@ -72,7 +73,7 @@ default-compile 9 - + @@ -81,14 +82,14 @@ compile - + module-info.java - + 8 diff --git a/webrtc-examples/pom.xml b/webrtc-examples/pom.xml new file mode 100644 index 00000000..3efe6328 --- /dev/null +++ b/webrtc-examples/pom.xml @@ -0,0 +1,60 @@ + + + 4.0.0 + + + dev.onvoid.webrtc + webrtc-java-parent + 0.13.0-SNAPSHOT + + + webrtc-java-examples + + webrtc-java-examples + + + true + true + UTF-8 + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + + default-compile + compile + + compile + + + 17 + + + + base-compile + none + + + + 17 + + 17 + + + + + + + + + ${project.groupId} + webrtc-java + ${project.version} + + + \ No newline at end of file diff --git a/webrtc-examples/src/main/java/dev/onvoid/webrtc/examples/CodecListExample.java b/webrtc-examples/src/main/java/dev/onvoid/webrtc/examples/CodecListExample.java new file mode 100644 index 00000000..bba5bcb8 --- /dev/null +++ b/webrtc-examples/src/main/java/dev/onvoid/webrtc/examples/CodecListExample.java @@ -0,0 +1,121 @@ +/* + * Copyright 2025 WebRTC Java Contributors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.onvoid.webrtc.examples; + +import java.util.List; +import java.util.Map; + +import dev.onvoid.webrtc.PeerConnectionFactory; +import dev.onvoid.webrtc.RTCRtpCapabilities; +import dev.onvoid.webrtc.RTCRtpCodecCapability; +import dev.onvoid.webrtc.media.MediaType; + +/** + * Example demonstrating how to list all supported codecs with the WebRTC peer-connection-factory. + *

+ * This example shows how to: + *

    + *
  • Create a PeerConnectionFactory
  • + *
  • Get the supported codecs for both sending and receiving audio and video
  • + *
  • Display detailed information about each codec
  • + *
+ * + * @author Alex Andres + */ +public class CodecListExample { + + public static void main(String[] args) { + PeerConnectionFactory factory = new PeerConnectionFactory(); + + try { + // Get receiver capabilities for audio and video. + System.out.println("\nRECEIVER CAPABILITIES:"); + System.out.println("---------------------"); + + System.out.println("\nAudio Receiver Codecs:"); + RTCRtpCapabilities audioReceiverCapabilities = factory.getRtpReceiverCapabilities(MediaType.AUDIO); + printCodecInfo(audioReceiverCapabilities.getCodecs()); + + System.out.println("\nVideo Receiver Codecs:"); + RTCRtpCapabilities videoReceiverCapabilities = factory.getRtpReceiverCapabilities(MediaType.VIDEO); + printCodecInfo(videoReceiverCapabilities.getCodecs()); + + // Get sender capabilities for audio and video. + System.out.println("\nSENDER CAPABILITIES:"); + System.out.println("-------------------"); + + System.out.println("\nAudio Sender Codecs:"); + RTCRtpCapabilities audioSenderCapabilities = factory.getRtpSenderCapabilities(MediaType.AUDIO); + printCodecInfo(audioSenderCapabilities.getCodecs()); + + System.out.println("\nVideo Sender Codecs:"); + RTCRtpCapabilities videoSenderCapabilities = factory.getRtpSenderCapabilities(MediaType.VIDEO); + printCodecInfo(videoSenderCapabilities.getCodecs()); + } + finally { + // Dispose the factory when done. + factory.dispose(); + } + } + + /** + * Prints detailed information about each codec in the provided list. + * All information for a codec is printed in a single line for conciseness. + * + * @param codecs List of codec capabilities to display. + */ + private static void printCodecInfo(List codecs) { + if (codecs.isEmpty()) { + System.out.println(" No codecs found."); + return; + } + + for (int i = 0; i < codecs.size(); i++) { + RTCRtpCodecCapability codec = codecs.get(i); + StringBuilder sb = new StringBuilder(); + + sb.append(" Codec #").append(i + 1).append(": "); + sb.append("MIME Type: ").append(codec.getMimeType()).append(" | "); + sb.append("Media Type: ").append(codec.getMediaType()).append(" | "); + sb.append("Name: ").append(codec.getName()).append(" | "); + sb.append("Clock Rate: ").append(codec.getClockRate()).append(" Hz"); + + if (codec.getMediaType() == MediaType.AUDIO) { + int channels = codec.getChannels(); + sb.append(" | Channels: ").append(channels) + .append(channels == 1 ? " (mono)" : channels == 2 ? " (stereo)" : ""); + } + + // Add SDP format parameters if available. + Map sdpFmtp = codec.getSDPFmtp(); + if (sdpFmtp != null && !sdpFmtp.isEmpty()) { + sb.append(" | SDP Params: {"); + boolean first = true; + for (Map.Entry entry : sdpFmtp.entrySet()) { + if (!first) { + sb.append(", "); + } + sb.append(entry.getKey()).append("=").append(entry.getValue()); + first = false; + } + sb.append("}"); + } + + System.out.println(sb); + } + } +} \ No newline at end of file diff --git a/webrtc-examples/src/main/java/dev/onvoid/webrtc/examples/DesktopVideoExample.java b/webrtc-examples/src/main/java/dev/onvoid/webrtc/examples/DesktopVideoExample.java new file mode 100644 index 00000000..88fddc6d --- /dev/null +++ b/webrtc-examples/src/main/java/dev/onvoid/webrtc/examples/DesktopVideoExample.java @@ -0,0 +1,251 @@ +/* + * Copyright 2025 WebRTC Java Contributors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.onvoid.webrtc.examples; + +import java.util.ArrayList; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import dev.onvoid.webrtc.PeerConnectionFactory; +import dev.onvoid.webrtc.PeerConnectionObserver; +import dev.onvoid.webrtc.RTCConfiguration; +import dev.onvoid.webrtc.RTCDataChannel; +import dev.onvoid.webrtc.RTCIceCandidate; +import dev.onvoid.webrtc.RTCIceConnectionState; +import dev.onvoid.webrtc.RTCIceGatheringState; +import dev.onvoid.webrtc.RTCIceServer; +import dev.onvoid.webrtc.RTCPeerConnection; +import dev.onvoid.webrtc.RTCPeerConnectionState; +import dev.onvoid.webrtc.RTCRtpReceiver; +import dev.onvoid.webrtc.RTCRtpTransceiver; +import dev.onvoid.webrtc.RTCSignalingState; +import dev.onvoid.webrtc.media.MediaStream; +import dev.onvoid.webrtc.media.MediaStreamTrack; +import dev.onvoid.webrtc.media.video.VideoDesktopSource; +import dev.onvoid.webrtc.media.video.VideoTrack; +import dev.onvoid.webrtc.media.video.desktop.DesktopSource; +import dev.onvoid.webrtc.media.video.desktop.ScreenCapturer; +import dev.onvoid.webrtc.media.video.desktop.WindowCapturer; + +/** + * Example demonstrating how to set up a peer connection with a desktop video source. + *

+ * This example shows how to: + *

    + *
  • Create a PeerConnectionFactory
  • + *
  • Get available desktop sources (screens and windows)
  • + *
  • Create a VideoDesktopSource for capturing screen or window content
  • + *
  • Configure the VideoDesktopSource properties
  • + *
  • Create a video track with the desktop source
  • + *
  • Set up a peer connection
  • + *
+ *

+ * Note: This example focuses only on setting up the local peer connection with + * a desktop video source for bidirectional media transfer. In a real application, + * you would need to establish a connection with a remote peer through a signaling + * channel (e.g., WebSocket). + * + * @author Alex Andres + */ +public class DesktopVideoExample { + + public static void main(String[] args) { + // Create a PeerConnectionFactory, which is the main entry point for WebRTC. + PeerConnectionFactory factory = new PeerConnectionFactory(); + + try { + LocalPeer localPeer = new LocalPeer(factory); + + // Keep the application running to observe state changes. + System.out.println("Press Enter to exit..."); + System.in.read(); + + // Clean up. + localPeer.dispose(); + } + catch (Exception e) { + Logger.getLogger(DesktopVideoExample.class.getName()) + .log(Level.SEVERE, "Error in DesktopVideoExample", e); + } + finally { + // Dispose the factory when done. + factory.dispose(); + } + } + + /** + * Represents a peer connection with audio and desktop video tracks. + */ + private static class LocalPeer implements PeerConnectionObserver { + + private final RTCPeerConnection peerConnection; + private final VideoDesktopSource videoSource; + + + public LocalPeer(PeerConnectionFactory factory) { + // Create a basic configuration for the peer connection. + RTCConfiguration config = new RTCConfiguration(); + + // Add a STUN server to help with NAT traversal. + RTCIceServer iceServer = new RTCIceServer(); + iceServer.urls.add("stun:stun.l.google.com:19302"); + config.iceServers.add(iceServer); + + // Create the peer connection. + peerConnection = factory.createPeerConnection(config, this); + + // Get available desktop sources. + System.out.println("Getting available desktop sources..."); + + // Get available screens. + ScreenCapturer screenCapturer = new ScreenCapturer(); + List screens = screenCapturer.getDesktopSources(); + System.out.println("\nAvailable screens:"); + for (DesktopSource screen : screens) { + System.out.printf(" Screen: %s (ID: %d)%n", screen.title, screen.id); + } + + // Get available windows. + WindowCapturer windowCapturer = new WindowCapturer(); + List windows = windowCapturer.getDesktopSources(); + System.out.println("\nAvailable windows:"); + for (DesktopSource window : windows) { + System.out.printf(" Window: %s (ID: %d)%n", window.title, window.id); + } + + // Clean up the capturers as we only needed them to get the sources. + screenCapturer.dispose(); + windowCapturer.dispose(); + + // Create a desktop video source. + videoSource = new VideoDesktopSource(); + + // Configure the desktop video source. + // Set frame rate (e.g., 30 fps). + videoSource.setFrameRate(30); + + // Set maximum frame size (e.g., 1920x1080). + videoSource.setMaxFrameSize(1920, 1080); + + // Select a source to capture. + // For this example; we'll use the first available screen if there is one. + if (!screens.isEmpty()) { + DesktopSource selectedScreen = screens.get(0); + System.out.printf("%nSelected screen for capture: %s (ID: %d)%n", + selectedScreen.title, selectedScreen.id); + videoSource.setSourceId(selectedScreen.id, false); + } + // Otherwise, use the first available window if there is one. + else if (!windows.isEmpty()) { + DesktopSource selectedWindow = windows.get(0); + System.out.printf("%nSelected window for capture: %s (ID: %d)%n", + selectedWindow.title, selectedWindow.id); + videoSource.setSourceId(selectedWindow.id, true); + } + // If no sources are available, fall back to a default (primary screen). + else { + System.out.println("\nNo desktop sources found. Using default (primary screen)."); + videoSource.setSourceId(0, false); + } + + // Start capturing. + videoSource.start(); + + // Create a video track with the desktop source. + VideoTrack videoTrack = factory.createVideoTrack("video0", videoSource); + + // Add the tracks to the peer connection. + List streamIds = new ArrayList<>(); + streamIds.add("stream1"); + peerConnection.addTrack(videoTrack, streamIds); + + System.out.println("LocalPeer: Created with a desktop video track"); + } + + /** + * Closes the peer connection and releases resources. + */ + public void dispose() { + if (videoSource != null) { + // Stop capturing before disposing. + videoSource.stop(); + videoSource.dispose(); + } + if (peerConnection != null) { + peerConnection.close(); + } + } + + // PeerConnectionObserver implementation. + + @Override + public void onIceCandidate(RTCIceCandidate candidate) { + System.out.println("LocalPeer: New ICE candidate: " + candidate.sdp); + // In a real application, you would send this candidate to the remote peer + // through your signaling channel. + } + + @Override + public void onConnectionChange(RTCPeerConnectionState state) { + System.out.println("LocalPeer: Connection state changed to: " + state); + } + + @Override + public void onIceConnectionChange(RTCIceConnectionState state) { + System.out.println("LocalPeer: ICE connection state changed to: " + state); + } + + @Override + public void onIceGatheringChange(RTCIceGatheringState state) { + System.out.println("LocalPeer: ICE gathering state changed to: " + state); + } + + @Override + public void onSignalingChange(RTCSignalingState state) { + System.out.println("LocalPeer: Signaling state changed to: " + state); + } + + @Override + public void onDataChannel(RTCDataChannel dataChannel) { + System.out.println("LocalPeer: Data channel created: " + dataChannel.getLabel()); + } + + @Override + public void onRenegotiationNeeded() { + System.out.println("LocalPeer: Renegotiation needed"); + // In a real application, you would create an offer and set it as the local description. + } + + @Override + public void onAddTrack(RTCRtpReceiver receiver, MediaStream[] mediaStreams) { + System.out.println("LocalPeer: Track added: " + receiver.getTrack().getKind()); + } + + @Override + public void onRemoveTrack(RTCRtpReceiver receiver) { + System.out.println("LocalPeer: Track removed: " + receiver.getTrack().getKind()); + } + + @Override + public void onTrack(RTCRtpTransceiver transceiver) { + MediaStreamTrack track = transceiver.getReceiver().getTrack(); + + System.out.println("LocalPeer: Transceiver track added: " + track.getKind()); + } + } +} \ No newline at end of file diff --git a/webrtc-examples/src/main/java/dev/onvoid/webrtc/examples/PeerConnectionExample.java b/webrtc-examples/src/main/java/dev/onvoid/webrtc/examples/PeerConnectionExample.java new file mode 100644 index 00000000..27cc7eb2 --- /dev/null +++ b/webrtc-examples/src/main/java/dev/onvoid/webrtc/examples/PeerConnectionExample.java @@ -0,0 +1,283 @@ +/* + * Copyright 2025 WebRTC Java Contributors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.onvoid.webrtc.examples; + +import java.util.ArrayList; +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import dev.onvoid.webrtc.PeerConnectionFactory; +import dev.onvoid.webrtc.PeerConnectionObserver; +import dev.onvoid.webrtc.RTCConfiguration; +import dev.onvoid.webrtc.RTCDataChannel; +import dev.onvoid.webrtc.RTCIceCandidate; +import dev.onvoid.webrtc.RTCIceConnectionState; +import dev.onvoid.webrtc.RTCIceGatheringState; +import dev.onvoid.webrtc.RTCIceServer; +import dev.onvoid.webrtc.RTCPeerConnection; +import dev.onvoid.webrtc.RTCPeerConnectionState; +import dev.onvoid.webrtc.RTCRtpReceiver; +import dev.onvoid.webrtc.RTCRtpTransceiver; +import dev.onvoid.webrtc.RTCSignalingState; +import dev.onvoid.webrtc.media.MediaStream; +import dev.onvoid.webrtc.media.MediaStreamTrack; +import dev.onvoid.webrtc.media.audio.AudioOptions; +import dev.onvoid.webrtc.media.audio.AudioTrack; +import dev.onvoid.webrtc.media.audio.AudioTrackSink; +import dev.onvoid.webrtc.media.audio.AudioTrackSource; +import dev.onvoid.webrtc.media.video.VideoDeviceSource; +import dev.onvoid.webrtc.media.video.VideoFrame; +import dev.onvoid.webrtc.media.video.VideoTrack; +import dev.onvoid.webrtc.media.video.VideoTrackSink; + +/** + * Example demonstrating how to set up a peer connection with audio and video tracks + * to be able to send and receive media. + *

+ * This example shows how to: + *

    + *
  • Create a PeerConnectionFactory
  • + *
  • Create audio and video tracks
  • + *
  • Set up a peer connection
  • + *
  • Add tracks to the peer connection for sending media
  • + *
  • Implement callbacks to receive incoming audio and video frames
  • + *
+ *

+ * Note: This example focuses only on setting up the local peer connection with + * audio and video tracks for bidirectional media transfer. In a real application, + * you would need to establish a connection with a remote peer through a signaling + * channel (e.g., WebSocket). + * + * @author Alex Andres + */ +public class PeerConnectionExample { + + public static void main(String[] args) { + // Create a PeerConnectionFactory, which is the main entry point for WebRTC. + PeerConnectionFactory factory = new PeerConnectionFactory(); + + try { + LocalPeer localPeer = new LocalPeer(factory); + + // Keep the application running to observe state changes. + System.out.println("Press Enter to exit..."); + System.in.read(); + + // Clean up. + localPeer.dispose(); + } + catch (Exception e) { + Logger.getLogger(PeerConnectionExample.class.getName()) + .log(Level.SEVERE, "Error in PeerConnectionExample", e); + } + finally { + // Dispose the factory when done + factory.dispose(); + } + } + + /** + * Represents a peer connection with audio and video tracks. + */ + private static class LocalPeer implements PeerConnectionObserver { + + private final RTCPeerConnection peerConnection; + private final AudioTrack audioTrack; + private final VideoTrack videoTrack; + private final AudioFrameLogger audioFrameLogger = new AudioFrameLogger(); + private final VideoFrameLogger videoFrameLogger = new VideoFrameLogger(); + + + public LocalPeer(PeerConnectionFactory factory) { + // Create a basic configuration for the peer connection. + RTCConfiguration config = new RTCConfiguration(); + + // Add a STUN server to help with NAT traversal. + RTCIceServer iceServer = new RTCIceServer(); + iceServer.urls.add("stun:stun.l.google.com:19302"); + config.iceServers.add(iceServer); + + // Create the peer connection. + peerConnection = factory.createPeerConnection(config, this); + + // Create an audio source with options. + AudioOptions audioOptions = new AudioOptions(); + audioOptions.echoCancellation = true; + audioOptions.autoGainControl = true; + audioOptions.noiseSuppression = true; + + AudioTrackSource audioSource = factory.createAudioSource(audioOptions); + audioTrack = factory.createAudioTrack("audio0", audioSource); + + VideoDeviceSource videoSource = new VideoDeviceSource(); + videoTrack = factory.createVideoTrack("video0", videoSource); + + // Add the tracks to the peer connection. + List streamIds = new ArrayList<>(); + streamIds.add("stream1"); + peerConnection.addTrack(audioTrack, streamIds); + peerConnection.addTrack(videoTrack, streamIds); + + System.out.println("LocalPeer: Created with audio and video tracks"); + } + + /** + * Closes the peer connection and releases resources. + */ + public void dispose() { + if (audioTrack != null) { + audioTrack.removeSink(audioFrameLogger); + } + if (videoTrack != null) { + videoTrack.removeSink(videoFrameLogger); + } + if (peerConnection != null) { + peerConnection.close(); + } + } + + // PeerConnectionObserver implementation. + + @Override + public void onIceCandidate(RTCIceCandidate candidate) { + System.out.println("LocalPeer: New ICE candidate: " + candidate.sdp); + // In a real application, you would send this candidate to the remote peer + // through your signaling channel. + } + + @Override + public void onConnectionChange(RTCPeerConnectionState state) { + System.out.println("LocalPeer: Connection state changed to: " + state); + } + + @Override + public void onIceConnectionChange(RTCIceConnectionState state) { + System.out.println("LocalPeer: ICE connection state changed to: " + state); + } + + @Override + public void onIceGatheringChange(RTCIceGatheringState state) { + System.out.println("LocalPeer: ICE gathering state changed to: " + state); + } + + @Override + public void onSignalingChange(RTCSignalingState state) { + System.out.println("LocalPeer: Signaling state changed to: " + state); + } + + @Override + public void onDataChannel(RTCDataChannel dataChannel) { + System.out.println("LocalPeer: Data channel created: " + dataChannel.getLabel()); + } + + @Override + public void onRenegotiationNeeded() { + System.out.println("LocalPeer: Renegotiation needed"); + // In a real application, you would create an offer and set it as the local description. + } + + @Override + public void onAddTrack(RTCRtpReceiver receiver, MediaStream[] mediaStreams) { + System.out.println("LocalPeer: Track added: " + receiver.getTrack().getKind()); + } + + @Override + public void onRemoveTrack(RTCRtpReceiver receiver) { + System.out.println("LocalPeer: Track removed: " + receiver.getTrack().getKind()); + } + + @Override + public void onTrack(RTCRtpTransceiver transceiver) { + MediaStreamTrack track = transceiver.getReceiver().getTrack(); + String kind = track.getKind(); + + if (kind.equals(MediaStreamTrack.AUDIO_TRACK_KIND)) { + AudioTrack audioTrack = (AudioTrack) track; + audioTrack.addSink(audioFrameLogger); + } + if (kind.equals(MediaStreamTrack.VIDEO_TRACK_KIND)) { + VideoTrack videoTrack = (VideoTrack) track; + videoTrack.addSink(videoFrameLogger); + } + + System.out.println("LocalPeer: Transceiver track added: " + kind); + } + } + + + + /** + * A simple implementation of VideoTrackSink that logs information about received frames. + */ + private static class VideoFrameLogger implements VideoTrackSink { + + private static final long LOG_INTERVAL_MS = 1000; // Log every second + private int frameCount = 0; + private long lastLogTime = System.currentTimeMillis(); + + + @Override + public void onVideoFrame(VideoFrame frame) { + frameCount++; + + long now = System.currentTimeMillis(); + if (now - lastLogTime >= LOG_INTERVAL_MS) { + System.out.printf("Received %d video frames in the last %.1f seconds%n", + frameCount, (now - lastLogTime) / 1000.0); + System.out.printf("Last frame: %dx%d, rotation: %d, timestamp: %dms%n", + frame.buffer.getWidth(), frame.buffer.getHeight(), frame.rotation, + frame.timestampNs / 1000000); + + frameCount = 0; + lastLogTime = now; + } + + // Release the native resources associated with this frame to prevent memory leaks. + frame.release(); + } + } + + + + /** + * A simple implementation of AudioTrackSink that logs information about received audio data. + */ + private static class AudioFrameLogger implements AudioTrackSink { + + private static final long LOG_INTERVAL_MS = 1000; // Log every second + private int frameCount = 0; + private long lastLogTime = System.currentTimeMillis(); + + + @Override + public void onData(byte[] data, int bitsPerSample, int sampleRate, int channels, int frames) { + frameCount++; + + long now = System.currentTimeMillis(); + if (now - lastLogTime >= LOG_INTERVAL_MS) { + System.out.printf("Received %d audio frames in the last %.1f seconds%n", + frameCount, (now - lastLogTime) / 1000.0); + System.out.printf("Last audio data: %d bytes, %d bits/sample, %d Hz, %d channels, %d frames%n", + data.length, bitsPerSample, sampleRate, channels, frames); + + frameCount = 0; + lastLogTime = now; + } + } + } +} \ No newline at end of file diff --git a/webrtc-examples/src/main/java/dev/onvoid/webrtc/examples/WhepExample.java b/webrtc-examples/src/main/java/dev/onvoid/webrtc/examples/WhepExample.java new file mode 100644 index 00000000..3280d194 --- /dev/null +++ b/webrtc-examples/src/main/java/dev/onvoid/webrtc/examples/WhepExample.java @@ -0,0 +1,305 @@ +/* + * Example application that demonstrates how to set up a WebRTC peer connection, + * create an offer, and accept a remote answer. + */ + +package dev.onvoid.webrtc.examples; + +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.time.Duration; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import java.util.logging.Logger; + +import dev.onvoid.webrtc.*; +import dev.onvoid.webrtc.media.MediaStream; +import dev.onvoid.webrtc.media.MediaStreamTrack; +import dev.onvoid.webrtc.media.video.VideoDeviceSource; +import dev.onvoid.webrtc.media.video.VideoTrack; + +/** + * Example implementation of WebRTC HTTP Egress Protocol (WHEP) client. + *

+ * This class demonstrates: + *

    + *
  • Setting up a WebRTC peer connection
  • + *
  • Creating and sending an SDP offer to a WHEP endpoint
  • + *
  • Receiving and processing an SDP answer
  • + *
  • Establishing media streaming over WebRTC
  • + *
+ *

+ * The example creates a receive-only peer connection that can accept + * incoming video streams from a WHEP-compatible server. + * + * @see WHEP Specification + * + * @author Alex Andres + */ +public class WhepExample { + + private static final String WHEP_ENDPOINT_URL = "http://localhost:8889/mystream/whep"; + + /** Factory for creating peer connections and media objects. */ + private PeerConnectionFactory factory; + + /** The WebRTC peer connection that handles media communication. */ + private RTCPeerConnection peerConnection; + + /** The local SDP offer to be sent to the remote endpoint. */ + private RTCSessionDescription localOffer; + + // Synchronization objects for async operations. + private final CountDownLatch offerCreatedLatch = new CountDownLatch(1); + private final CountDownLatch localDescriptionSetLatch = new CountDownLatch(1); + private final CountDownLatch remoteDescriptionSetLatch = new CountDownLatch(1); + + + public static void main(String[] args) { + WhepExample example = new WhepExample(); + + try { + example.run(); + } + catch (Exception e) { + Logger.getLogger("WHEPExample").log(Level.SEVERE, "Error running WHEP example", e); + } + finally { + example.cleanup(); + } + } + + public void run() throws Exception { + System.out.println("Starting WebRTC Peer Connection Example"); + + initializePeerConnectionFactory(); + createPeerConnection(); + createOffer(); + + // Wait for the offer to be created. + if (!offerCreatedLatch.await(5, TimeUnit.SECONDS)) { + throw new IllegalStateException("Timeout waiting for offer creation."); + } + + // Set the local description (the offer). + setLocalDescription(localOffer); + + // Wait for the local description to be set. + if (!localDescriptionSetLatch.await(5, TimeUnit.SECONDS)) { + throw new IllegalStateException("Timeout waiting for local description to be set."); + } + + System.out.println("Local offer created and set."); + //System.out.println("SDP Offer: " + localOffer.sdp); + System.out.println("Sending local offer to the remote endpoint."); + + String answerSdp = sendOfferEndpoint(localOffer.sdp); + + //System.out.println("SDP Answer: " + answerSdp); + + // Set the remote description (the answer). + setRemoteDescription(new RTCSessionDescription(RTCSdpType.ANSWER, answerSdp)); + + // Wait for the remote description to be set. + if (!remoteDescriptionSetLatch.await(5, TimeUnit.SECONDS)) { + throw new IllegalStateException("Timeout waiting for remote description to be set."); + } + + System.out.println("Remote answer set. Peer connection established!"); + System.out.println("Media should now be exchanged between peers."); + + // Wait a bit to see connection state changes. + Thread.sleep(10000); + + System.out.println("WebRTC Peer Connection Example completed."); + } + + private void initializePeerConnectionFactory() { + System.out.println("Initializing PeerConnectionFactory."); + factory = new PeerConnectionFactory(); + } + + private void createPeerConnection() { + System.out.println("Creating peer connection."); + + // Create ICE servers configuration. + RTCConfiguration config = new RTCConfiguration(); + + // Add Google's public STUN server. + RTCIceServer iceServer = new RTCIceServer(); + iceServer.urls.add("stun:stun.l.google.com:19302"); + config.iceServers.add(iceServer); + + // Create the peer connection with our observer. + peerConnection = factory.createPeerConnection(config, new PeerConnectionObserverImpl()); + + // Create a video track from a video device source (e.g., webcam). + // Since we are only receiving video in this example, the source will be a dummy video source. + VideoDeviceSource videoSource = new VideoDeviceSource(); + VideoTrack videoTrack = factory.createVideoTrack("videoTrack", videoSource); + videoTrack.addSink(videoFrame -> System.out.println("Received video frame: " + videoFrame)); + + // Only interested in receiving video, so we set up a transceiver for that. + RTCRtpTransceiverInit transceiverInit = new RTCRtpTransceiverInit(); + transceiverInit.direction = RTCRtpTransceiverDirection.RECV_ONLY; + + // Add the transceiver to the peer connection with the video track. + RTCRtpTransceiver transceiver = peerConnection.addTransceiver(videoTrack, transceiverInit); + + // Set up a sink to handle incoming video frames. + MediaStreamTrack track = transceiver.getReceiver().getTrack(); + if (track instanceof VideoTrack vTrack) { + vTrack.addSink(videoFrame -> { + System.out.println("Received video frame: " + videoFrame); + }); + } + } + + private void createOffer() { + System.out.println("Creating offer."); + + // Create offer options (use default options). + RTCOfferOptions options = new RTCOfferOptions(); + + // Create the offer. + peerConnection.createOffer(options, new CreateSessionDescriptionObserver() { + @Override + public void onSuccess(RTCSessionDescription description) { + System.out.println("Offer created successfully."); + localOffer = description; + offerCreatedLatch.countDown(); + } + + @Override + public void onFailure(String error) { + System.err.println("Failed to create offer: " + error); + offerCreatedLatch.countDown(); + } + }); + } + + private void setLocalDescription(RTCSessionDescription description) { + System.out.println("Setting local description."); + + peerConnection.setLocalDescription(description, new SetSessionDescriptionObserver() { + @Override + public void onSuccess() { + System.out.println("Local description set successfully."); + localDescriptionSetLatch.countDown(); + } + + @Override + public void onFailure(String error) { + System.err.println("Failed to set local description: " + error); + localDescriptionSetLatch.countDown(); + } + }); + } + + private String sendOfferEndpoint(String sdpOffer) throws Exception { + HttpClient client = HttpClient.newBuilder() + .connectTimeout(Duration.ofSeconds(10)) + .build(); + + HttpRequest request = HttpRequest.newBuilder() + .uri(URI.create(WHEP_ENDPOINT_URL)) + .header("Content-Type", "application/sdp") + .POST(HttpRequest.BodyPublishers.ofString(sdpOffer)) + .timeout(Duration.ofSeconds(30)) + .build(); + + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); + + if (response.statusCode() == 200 || response.statusCode() == 201) { + System.out.println("WHEP request successful"); + return response.body(); + } + else { + throw new RuntimeException("WHEP request failed with status: " + response.statusCode()); + } + } + + private void setRemoteDescription(RTCSessionDescription description) { + System.out.println("Setting remote description."); + + peerConnection.setRemoteDescription(description, new SetSessionDescriptionObserver() { + @Override + public void onSuccess() { + System.out.println("Remote description set successfully."); + remoteDescriptionSetLatch.countDown(); + } + + @Override + public void onFailure(String error) { + System.err.println("Failed to set remote description: " + error); + remoteDescriptionSetLatch.countDown(); + } + }); + } + + private void cleanup() { + System.out.println("Cleaning up resources."); + + if (peerConnection != null) { + peerConnection.close(); + peerConnection = null; + } + + if (factory != null) { + factory.dispose(); + factory = null; + } + } + + + + /** + * Implementation of PeerConnectionObserver to handle events from the peer connection. + */ + private static class PeerConnectionObserverImpl implements PeerConnectionObserver { + + @Override + public void onIceCandidate(RTCIceCandidate candidate) { + System.out.println("ICE candidate: " + candidate.sdp); + // In a real application, we would send this candidate to the remote peer + } + + @Override + public void onConnectionChange(RTCPeerConnectionState state) { + System.out.println("Connection state changed: " + state); + } + + @Override + public void onIceConnectionChange(RTCIceConnectionState state) { + System.out.println("ICE connection state changed: " + state); + } + + @Override + public void onIceGatheringChange(RTCIceGatheringState state) { + System.out.println("ICE gathering state changed: " + state); + } + + @Override + public void onSignalingChange(RTCSignalingState state) { + System.out.println("Signaling state changed: " + state); + } + + @Override + public void onDataChannel(RTCDataChannel dataChannel) { + System.out.println("Data channel created: " + dataChannel.getLabel()); + } + + @Override + public void onRenegotiationNeeded() { + System.out.println("Renegotiation needed."); + } + + @Override + public void onAddTrack(RTCRtpReceiver receiver, MediaStream[] mediaStreams) { + System.out.println("Track added."); + } + } +} \ No newline at end of file diff --git a/webrtc-examples/src/main/java/module-info.java b/webrtc-examples/src/main/java/module-info.java new file mode 100644 index 00000000..e051b4cd --- /dev/null +++ b/webrtc-examples/src/main/java/module-info.java @@ -0,0 +1,7 @@ +module webrtc.java.examples { + + requires java.logging; + requires java.net.http; + requires webrtc.java; + +} \ No newline at end of file