Video device management is a core feature of ApsaraVideo Real-time Communication (ARTC). The ARTC software development kit (SDK) provides a rich set of APIs to manage devices on various platforms. This topic describes the video device management features of the ARTC engine.
Feature introduction
The video device management feature of the Alibaba Cloud ARTC SDK gives developers comprehensive control over devices. You can fine-tune camera parameters on various platforms, such as iOS and Android. You can also configure parameters such as focal length, exposure control, and manual or auto focus to improve video capture quality.
Sample code
Manage video capture devices on Android: Android/ARTCExample/BasicUsage/src/main/java/com/aliyun/artc/api/basicusage/CameraCommonControl/CameraActivity.java.
Manage video capture devices on iOS: iOS/ARTCExample/BasicUsage/CameraCommonSetting/CameraCommonControlVC.swift.
Prerequisites
You have a valid Alibaba Cloud account and have created an ApsaraVideo Real-time Communication application. For more information, see Create an application. Obtain the App ID and App Key from the ApsaraVideo Real-time Communication console.
SDK integration and basic feature implementation:
You have integrated the ARTC SDK into your project and implemented basic real-time audio and video features. For more information, see Download and integrate the SDK and Implement an audio and video call.
The camera is started. For example, you have started the preview by calling
startPreview, or you have joined a channel by callingjoinChannel.
Implementation
Set the zoom level
The ARTC SDK supports the camera zoom feature.
API reference
/**
* @brief Sets the camera zoom level.
* @param zoom The zoom level. The value ranges from 1 to the maximum zoom value supported by the camera.
* @return
* - 0: Success.
* - A value other than 0: Failure.
* @note This API is available only on iOS and Android.
*/
public abstract int setCameraZoom(float zoom);
/**
* @brief Gets the maximum zoom factor of the camera.
* @return The maximum zoom factor of the camera.
*/
public abstract float GetCameraMaxZoomFactor();
/**
* @brief Gets the current zoom factor of the camera.
* @return The zoom factor of the camera.
*/
public abstract float GetCurrentZoom();Examples
Android
// Get zoom information.
private void initZoomSeekBar() {
if (mAliRtcEngine != null) {
zoomSeekBar.setEnabled(true);
// Get the maximum zoom value.
float maxZoom = mAliRtcEngine.GetCameraMaxZoomFactor();
float currZoom = mAliRtcEngine.GetCurrentZoom();
// Set the SeekBar range (from 1.0 to maxZoom, with a step of 0.1).
if(maxZoom >= 1.0) {
int maxProgress = (int)((maxZoom - 1) * 10);
zoomSeekBar.setMax(maxProgress);
int currProgress = (int)((currZoom - 1) * 10);
zoomSeekBar.setProgress(currProgress);
} else{
zoomSeekBar.setEnabled(false);
}
}
}
// Set the zoom level.
zoomSeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
if(mAliRtcEngine != null) {
float newZoom = (float)((i+10) / 10.0);
mAliRtcEngine.setCameraZoom(newZoom);
zoomTextView.setText(String.format("%.1f", newZoom));
}
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});iOS
// Get zoom information.
self.cameraZoomSlider.isEnabled = true
let maxZoom = rtcEngine.getCameraMaxZoomFactor()
let currZoom = rtcEngine.getCurrentZoom()
"Get maxZoom=\(maxZoom), currZoom=\(currZoom)".printLog()
self.cameraZoomSlider.minimumValue = 1.0
if maxZoom > 1.0 {
self.cameraZoomSlider.maximumValue = maxZoom
} else {
self.cameraZoomSlider.isEnabled = false
}
if currZoom >= 1.0 && currZoom <= maxZoom {
self.cameraZoomSlider.value = currZoom
self.cameraZoomValueLabel.text = String(format: "%.1f", self.cameraZoomSlider.value)
}
else {
self.cameraZoomSlider.value = self.cameraZoomSlider.minimumValue
self.cameraZoomValueLabel.text = "\(self.cameraZoomSlider.minimumValue)"
}
// Set the zoom level.
@IBOutlet weak var cameraZoomSlider: UISlider!
@IBOutlet weak var cameraZoomValueLabel: UILabel!
@IBAction func onCameraZoomChanged(_ sender: UISlider) {
let currValue = sender.value
self.cameraZoomValueLabel.text = String(format: "%.1f", currValue)
self.rtcEngine?.setCameraZoom(currValue)
}Harmony
private handleZoomChange(value: number): void {
if (!this.rtcEngine) {
return;
}
// Calculate the zoom value based on the progress: zoom = 1.0 + (value / 10.0).
const newZoom = 1.0 + (value / 10.0);
// Limit the zoom value to the range of 1.0 to 10.0.
const clampedZoom = Math.max(1.0, Math.min(10.0, newZoom));
this.rtcEngine.setCameraZoom(clampedZoom);
this.zoomValue = clampedZoom.toFixed(1);
console.info(`Set zoom: ${clampedZoom}`);
}Set the exposure
The ARTC SDK lets you set the camera exposure to control brightness during capture.
API reference
/**
* @brief Sets the camera exposure.
* @param exposure The exposure value.
* @return
* - 0: Success.
* - A value other than 0: Failure.
*/
public abstract int SetExposure(float exposure);
/**
* @brief Gets the camera exposure.
* @return The camera exposure.
*/
public abstract float GetCurrentExposure();
/**
* @brief Gets the minimum camera exposure.
* @return The minimum camera exposure.
*/
public abstract float GetMinExposure();
/**
* @brief Gets the maximum camera exposure.
* @return The maximum camera exposure.
*/
public abstract float GetMaxExposure();Examples
Android
// Get exposure information.
private void initExposureSeekBar() {
if (mAliRtcEngine != null) {
exposureSeekBar.setEnabled(true);
// Get the maximum exposure value.
float maxExposure = mAliRtcEngine.GetMaxExposure();
// Get the minimum exposure value.
float minExposure = mAliRtcEngine.GetMinExposure();
float currExposure = mAliRtcEngine.GetCurrentExposure();
if(maxExposure > minExposure) {
// Reset the SeekBar range.
int maxProgress = (int)(maxExposure - minExposure) * 10;
exposureSeekBar.setMax(maxProgress);
int currProgress = (int)((currExposure - minExposure) * 10);
exposureSeekBar.setProgress(currProgress);
} else {
exposureSeekBar.setEnabled(false);
}
}
}
// Set the exposure.
exposureSeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
if(mAliRtcEngine != null) {
float minExposure = mAliRtcEngine.GetMinExposure();
float newExposure = minExposure + (float)(i / 10.0);
mAliRtcEngine.SetExposure(newExposure);
exposureTextView.setText(String.format("% .1f", newExposure));
}
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});iOS
// Get device exposure information.
self.cameraExposureSlider.isEnabled = true
let minExposure = rtcEngine.getMinExposure()
let maxExposure = rtcEngine.getMaxExposure()
let currExposure = rtcEngine.getCurrentExposure()
"Get minExposure=\(minExposure), maxExposure=\(maxExposure), currExposure=\(currExposure)".printLog()
if maxExposure > minExposure {
self.cameraExposureSlider.minimumValue = minExposure
self.cameraExposureSlider.maximumValue = maxExposure
} else {
self.cameraExposureSlider.isEnabled = false
}
if currExposure >= minExposure && currExposure <= maxExposure {
self.cameraExposureSlider.value = currExposure
self.cameraExposureValueLabel.text = String(format: "%.1f", self.cameraExposureSlider.value)
}
else {
self.cameraExposureSlider.value = self.cameraExposureSlider.minimumValue
self.cameraExposureValueLabel.text = "\(self.cameraExposureSlider.minimumValue)"
}
// Set the exposure.
@IBOutlet weak var cameraExposureValueLabel: UILabel!
@IBOutlet weak var cameraExposureSlider: UISlider!
@IBAction func onCameraExposureChanged(_ sender: UISlider) {
let currValue = sender.value
self.cameraExposureValueLabel.text = String(format: "%.1f", currValue)
self.rtcEngine?.setExposure(currValue)
}Harmony
// Handle exposure changes.
private handleExposureChange(value: number): void {
if (!this.rtcEngine) {
return;
}
// Conversion formula: exposure value = progress value / 10.
const actualValue = value / 10.0;
// Limit the exposure value to the range of -12.0 to 12.0.
const clampedExposure = Math.max(-12.0, Math.min(12.0, actualValue));
// Update the status.
this.exposureSliderValue = value;
this.exposureValue = this.formatExposureValue(clampedExposure);
// Call the RTC engine to set the exposure compensation.
try {
this.rtcEngine.setExposure(clampedExposure);
console.info(`Set exposure compensation: ${clampedExposure}`);
} catch (error) {
console.error('Failed to set exposure compensation:', error);
}
}Manually set the exposure point
ARTC lets you manually set the camera's exposure point. When a user specifies a position, typically by tapping a point on the screen, the camera adjusts the brightness for that area.
Before you set the exposure point, call the
isCameraExposurePointSupportedAPI to check if the feature is supported.The input parameters are normalized coordinates.
Interface Information
/**
* @brief Checks whether setting the camera exposure point is supported.
* @return
* - true: Supported.
* - false: Not supported.
* @note This API is available only on iOS and Android. Use this API to check whether you can set an exposure point for the current camera.
*/
public abstract boolean isCameraExposurePointSupported();
/**
* @brief Sets the camera exposure point.
* @param x The value of the x-axis coordinate (normalized). Valid values: [0, 1].
* @param y The value of the y-axis coordinate (normalized). Valid values: [0, 1].
* @return
* - 0: Success.
* - A value other than 0: Failure.
* @note This API is available only on iOS and Android. After you call this API, the camera performs an exposure adjustment on the specified point and maintains this exposure value.
*/
public abstract int setCameraExposurePoint(float x, float y);Examples
Android
// Manually set the exposure point.
mLocalViewGestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onDoubleTap(@NonNull MotionEvent e) {
// Handle double-tap.
// ...
return true;
}
@Override
public boolean onSingleTapConfirmed(@NonNull MotionEvent e) {
// Handle single-tap.
if(mAliRtcEngine != null && mAliRtcEngine.isCameraExposurePointSupported()) {
float[] normalizedCoords = getNormalizedCoordinates(e.getX(), e.getY());
if (normalizedCoords[0] != -1 && normalizedCoords[1] != -1) {
mAliRtcEngine.setCameraExposurePoint(normalizedCoords[0], normalizedCoords[1]);
mCameraExposurePointX.setText(String.format("%.2f", normalizedCoords[0]));
mCameraExposurePointY.setText(String.format("%.2f", normalizedCoords[1]));
}
}
return true;
}
});iOS
@objc func handleSeatViewTap(_ gesture: UITapGestureRecognizer) {
guard let localSeatView = self.localPreviewSeatView else {
return
}
guard let rtcEngine = self.rtcEngine, rtcEngine.isCameraExposurePointSupported() else { return }
let tapPoint = gesture.location(in: localSeatView)
// Convert the tap coordinates to normalized coordinates (in the range of 0 to 1) of the video frame.
let normalizedX = tapPoint.x / localSeatView.bounds.width
let normalizedY = tapPoint.y / localSeatView.bounds.height
rtcEngine.setCameraExposurePoint(CGPoint(x: normalizedX, y: normalizedY))
self.cameraExposurePointXTextField.text = String(format: "%.2f", normalizedX)
self.cameraExposurePointYTextField.text = String(format: "%.2f", normalizedY)
}Harmony
private handleSingleTap(touchX: number, touchY: number): void {
if (!this.rtcEngine) {
return;
}
// Get the normalized coordinates.
const normalizedCoords = this.getNormalizedCoordinates(touchX, touchY);
if (normalizedCoords[0] !== -1 && normalizedCoords[1] !== -1) {
this.rtcEngine.setCameraExposurePoint(normalizedCoords[0], normalizedCoords[1]);
this.exposurePointX = normalizedCoords[0].toFixed(2);
this.exposurePointY = normalizedCoords[1].toFixed(2);
console.info('Set exposure point:', this.exposurePointX, this.exposurePointY);
}
}Manually set the focus point
ARTC lets you set a manual focus point for the camera. This is similar to setting the exposure point, but it adjusts the camera's focus position. Before you use this feature, call isCameraFocusPointSupported to check if it is supported.
After you manually set a focus point, the focus becomes static. It does not continuously track the area or dynamically adjust as the object moves. For dynamic tracking, consider using facial recognition combined with auto-focus.
API reference
/**
* @brief Checks whether the camera supports manual focus.
* @return
* - true: Supported.
* - false: Not supported.
* @note This API is available only on iOS and Android. Use this API to check whether you can set a focus point for the current camera.
*/
public abstract boolean isCameraFocusPointSupported();
/**
* @brief Sets the manual focus point for the camera.
* @param x The value of the x-axis coordinate.
* @param y The value of the y-axis coordinate.
* @return
* - 0: Success.
* - A value other than 0: Failure.
* @note This API is available only on iOS and Android. After you call this API, the camera performs a focus adjustment on the specified point and maintains this focus value.
*/
public abstract int setCameraFocusPoint(float x, float y);Examples
The sample code shows how to set the exposure point on a single tap and set the focus point on a double tap.
Android
mLocalViewGestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onDoubleTap(@NonNull MotionEvent e) {
// Handle double-tap.
if(mAliRtcEngine != null && mAliRtcEngine.isCameraFocusPointSupported()) {
float[] normalizedCoords = getNormalizedCoordinates(e.getX(), e.getY());
if (normalizedCoords[0] != -1 && normalizedCoords[1] != -1) {
mAliRtcEngine.setCameraFocusPoint(normalizedCoords[0], normalizedCoords[1]);
mCameraFocusPointX.setText(String.format("%.2f", normalizedCoords[0]));
mCameraFocusPointY.setText(String.format("%.2f", normalizedCoords[1]));
}
}
return true;
}
@Override
public boolean onSingleTapConfirmed(@NonNull MotionEvent e) {
// Handle single-tap.
// ...
return true;
}
});iOS
@objc func handleSeatViewDoubleTap(_ gesture: UITapGestureRecognizer) {
guard let localSeatView = self.localPreviewSeatView else {
return
}
guard let rtcEngine = self.rtcEngine, rtcEngine.isCameraFocusPointSupported() else { return }
let tapPoint = gesture.location(in: localSeatView)
// Convert the tap coordinates to normalized coordinates (in the range of 0 to 1) of the video frame.
let normalizedX = tapPoint.x / localSeatView.bounds.width
let normalizedY = tapPoint.y / localSeatView.bounds.height
rtcEngine.setCameraFocus(CGPoint(x: normalizedX, y: normalizedY))
self.cameraFocusPointXTextField.text = String(format: "%.2f", normalizedX)
self.cameraFocusPointYTextField.text = String(format: "%.2f", normalizedY)
}Harmony
private handleDoubleTap(touchX: number, touchY: number): void {
if (!this.rtcEngine) {
return;
}
// Get the normalized coordinates.
const normalizedCoords = this.getNormalizedCoordinates(touchX, touchY);
if (normalizedCoords[0] !== -1 && normalizedCoords[1] !== -1) {
this.rtcEngine.setCameraFocusPoint(normalizedCoords[0], normalizedCoords[1]);
this.focusPointX = normalizedCoords[0].toFixed(2);
this.focusPointY = normalizedCoords[1].toFixed(2);
console.info('Set focus point:', this.focusPointX, this.focusPointY);
}
}Auto-focus on faces
ARTC provides APIs to configure the camera's auto-focus on faces feature. If the device supports this feature, you can enable it so that the camera automatically detects faces in the frame and focuses on them. This keeps the facial area in sharp focus.
Scenarios: This feature is used for facial recognition, portrait photography optimization, and visual effect enhancement in video calls.
Before you enable this feature, call isCameraAutoFocusFaceModeSupported to check if the current device supports it.
Interface information
/**
* @brief Checks whether auto-focus on faces is supported.
* @return
* - true: Supported.
* - false: Not supported.
* @note This API is available only on iOS and Android. It returns false if the camera is not turned on.
* If the camera is turned on and supports both facial recognition and focus features, it returns true.
*/
public abstract boolean isCameraAutoFocusFaceModeSupported();
/**
* @brief Sets the camera face focus.
* @param enable
* - true: Enable.
* - false: Disable.
* @return
* - true: Success.
* - false: Failure.
* @note This API is available only on iOS and Android. If {@link AliRtcEngine#isCameraAutoFocusFaceModeSupported} returns true
* and enable is set to true in this call, the camera focuses on the detected faces in real time.
*/
public abstract boolean setCameraAutoFocusFaceModeEnabled(boolean enable);Examples
Android
if (mAliRtcEngine.isCameraAutoFocusFaceModeSupported()) {
mAliRtcEngine.setCameraAutoFocusFaceModeEnabled(isChecked);
}iOS
@IBAction func onCameraAudoFocusSwitch(_ sender: UISwitch) {
if ((self.rtcEngine?.isCameraAutoFocusFaceModeSupported()) != nil) {
self.rtcEngine?.setCameraAutoFocusFaceModeEnabled(sender.isOn)
}
}Turn the flashlight on or off
The ARTC SDK lets you control the camera's flashlight. This feature is often used in scenarios that require brightness adjustment, such as shooting in low-light conditions, video recording, or for special lighting needs.
This API is supported only on iOS and Android platforms. It can be used only if the device hardware supports a flashlight.
The flashlight feature is typically available only for the rear camera. Front cameras usually do not have a physical flashlight. Some devices may simulate a flash effect using the screen, but this API does not control that functionality.
API reference
/**
* @brief Turns the camera flashlight on or off.
* @param flash Specifies whether to turn on the flashlight.
* @return
* - 0: Success.
* - A value other than 0: Failure.
* @note This API is available only on iOS and Android. Generally, only the rear camera has a flashlight.
*/
public abstract int setCameraFlash(boolean flash);Examples
Android
mCameraFlashSwitch = findViewById(R.id.camera_flash_switch);
mCameraFlashSwitch.setEnabled(false);
mCameraFlashSwitch.setOnCheckedChangeListener((buttonView, isChecked) -> {
if (mAliRtcEngine != null) {
mAliRtcEngine.setCameraFlash(isChecked);
}
});iOS
@IBOutlet weak var cameraFlashSwitch: UISwitch!
@IBAction func onCameraFlashSwitch(_ sender: UISwitch) {
if self.rtcEngine?.getCurrentCameraDirection() == .back {
self.rtcEngine?.setCameraFlash(sender.isOn)
}
}Harmony
private handleFlashChange(value: boolean): void {
if (!this.rtcEngine) {
return;
}
this.rtcEngine.setCameraFlash(value);
}