diff --git a/AVOutput/AVOutput.cpp b/AVOutput/AVOutput.cpp index fc8dd79f..b8a3a229 100644 --- a/AVOutput/AVOutput.cpp +++ b/AVOutput/AVOutput.cpp @@ -22,10 +22,27 @@ #include "UtilsIarm.h" #include "UtilsSearchRDKProfile.h" +#define API_VERSION_NUMBER_MAJOR 1 +#define API_VERSION_NUMBER_MINOR 2 +#define API_VERSION_NUMBER_PATCH 0 + namespace WPEFramework { namespace Plugin { - SERVICE_REGISTRATION(AVOutput,1, 0); + static Plugin::Metadata metadata( + // Version (Major, Minor, Patch) + API_VERSION_NUMBER_MAJOR, API_VERSION_NUMBER_MINOR, API_VERSION_NUMBER_PATCH, + // Preconditions + {}, + // Terminations + {}, + // Controls + {} + ); + + + SERVICE_REGISTRATION(AVOutput, API_VERSION_NUMBER_MAJOR, API_VERSION_NUMBER_MINOR, API_VERSION_NUMBER_PATCH); + AVOutput::AVOutput() { diff --git a/AVOutput/AVOutputTV.cpp b/AVOutput/AVOutputTV.cpp index e9bd4fb8..1714b671 100644 --- a/AVOutput/AVOutputTV.cpp +++ b/AVOutput/AVOutputTV.cpp @@ -270,73 +270,142 @@ namespace Plugin { LOGINFO("CTOR\n"); AVOutputTV::instance = this; - InitializeIARM(); - - registerMethod("getBacklight", &AVOutputTV::getBacklight, this); - registerMethod("setBacklight", &AVOutputTV::setBacklight, this); - registerMethod("resetBacklight", &AVOutputTV::resetBacklight, this); - registerMethod("getBacklightCaps", &AVOutputTV::getBacklightCaps, this); - registerMethod("getBrightnessCaps", &AVOutputTV::getBrightnessCaps, this); - registerMethod("getBrightness", &AVOutputTV::getBrightness, this); - registerMethod("setBrightness", &AVOutputTV::setBrightness, this); - registerMethod("resetBrightness", &AVOutputTV::resetBrightness, this); - registerMethod("getContrast", &AVOutputTV::getContrast, this); - registerMethod("setContrast", &AVOutputTV::setContrast, this); - registerMethod("resetContrast", &AVOutputTV::resetContrast, this); - registerMethod("getContrastCaps", &AVOutputTV::getContrastCaps, this); - registerMethod("getSharpness", &AVOutputTV::getSharpness, this); - registerMethod("setSharpness", &AVOutputTV::setSharpness, this); - registerMethod("resetSharpness", &AVOutputTV::resetSharpness, this); - registerMethod("getSharpnessCaps", &AVOutputTV::getSharpnessCaps, this); - registerMethod("getSaturation", &AVOutputTV::getSaturation, this); - registerMethod("setSaturation", &AVOutputTV::setSaturation, this); - registerMethod("resetSaturation", &AVOutputTV::resetSaturation, this); - registerMethod("getSaturationCaps", &AVOutputTV::getSaturationCaps, this); - registerMethod("getHue", &AVOutputTV::getHue, this); - registerMethod("setHue", &AVOutputTV::setHue, this); - registerMethod("resetHue", &AVOutputTV::resetHue, this); - registerMethod("getHueCaps", &AVOutputTV::getHueCaps, this); - registerMethod("getColorTemperature", &AVOutputTV::getColorTemperature, this); - registerMethod("setColorTemperature", &AVOutputTV::setColorTemperature, this); - registerMethod("resetColorTemperature", &AVOutputTV::resetColorTemperature, this); - registerMethod("getColorTemperatureCaps", &AVOutputTV::getColorTemperatureCaps, this); - - registerMethod("getBacklightDimmingMode", &AVOutputTV::getBacklightDimmingMode, this); - registerMethod("setBacklightDimmingMode", &AVOutputTV::setBacklightDimmingMode, this); - registerMethod("resetBacklightDimmingMode", &AVOutputTV::resetBacklightDimmingMode, this); - registerMethod("getBacklightDimmingModeCaps", &AVOutputTV::getBacklightDimmingModeCaps, this); - - registerMethod("getSupportedDolbyVisionModes", &AVOutputTV::getSupportedDolbyVisionModes, this); - registerMethod("getDolbyVisionMode", &AVOutputTV::getDolbyVisionMode, this); - registerMethod("setDolbyVisionMode", &AVOutputTV::setDolbyVisionMode, this); - registerMethod("resetDolbyVisionMode", &AVOutputTV::resetDolbyVisionMode, this); - registerMethod("getDolbyVisionModeCaps", &AVOutputTV::getDolbyVisionModeCaps, this); - registerMethod("getVideoFormat", &AVOutputTV::getVideoFormat, this); - registerMethod("getVideoSource", &AVOutputTV::getVideoSource, this); - registerMethod("getVideoFrameRate", &AVOutputTV::getVideoFrameRate, this); - registerMethod("getVideoResolution", &AVOutputTV::getVideoResolution, this); - registerMethod("getVideoContentType", &AVOutputTV::getVideoContentType, this); - - registerMethod("getZoomMode", &AVOutputTV::getZoomMode, this); - registerMethod("setZoomMode", &AVOutputTV::setZoomMode, this); - registerMethod("resetZoomMode", &AVOutputTV::resetZoomMode, this); - registerMethod("getZoomModeCaps", &AVOutputTV::getZoomModeCaps, this); - - registerMethod("getPictureMode", &AVOutputTV::getPictureMode, this); - registerMethod("setPictureMode", &AVOutputTV::setPictureMode, this); - registerMethod("signalFilmMakerMode", &AVOutputTV::signalFilmMakerMode, this); - registerMethod("resetPictureMode", &AVOutputTV::resetPictureMode, this); - registerMethod("getPictureModeCaps", &AVOutputTV::getPictureModeCaps, this); - registerMethod("getSupportedPictureModes", &AVOutputTV::getSupportedPictureModes, this); - registerMethod("getVideoSourceCaps", &AVOutputTV::getVideoSourceCaps, this); - registerMethod("getVideoFormatCaps", &AVOutputTV::getVideoFormatCaps, this); - registerMethod("getVideoFrameRateCaps", &AVOutputTV::getVideoFrameRateCaps, this); - registerMethod("getVideoResolutionCaps", &AVOutputTV::getVideoResolutionCaps, this); - - registerMethod("getLowLatencyState", &AVOutputTV::getLowLatencyState, this); - registerMethod("setLowLatencyState", &AVOutputTV::setLowLatencyState, this); - registerMethod("resetLowLatencyState", &AVOutputTV::resetLowLatencyState, this); - registerMethod("getLowLatencyStateCaps", &AVOutputTV::getLowLatencyStateCaps, this); + InitializeIARM(); + + registerMethod("getBacklight", &AVOutputTV::getBacklight, this); + registerMethod("setBacklight", &AVOutputTV::setBacklight, this); + registerMethod("resetBacklight", &AVOutputTV::resetBacklight, this); + registerMethod("getBacklightCaps", &AVOutputTV::getBacklightCaps, this); + registerMethod("getBrightnessCaps", &AVOutputTV::getBrightnessCaps, this); + registerMethod("getBrightness", &AVOutputTV::getBrightness, this); + registerMethod("setBrightness", &AVOutputTV::setBrightness, this); + registerMethod("resetBrightness", &AVOutputTV::resetBrightness, this); + registerMethod("getContrast", &AVOutputTV::getContrast, this); + registerMethod("setContrast", &AVOutputTV::setContrast, this); + registerMethod("resetContrast", &AVOutputTV::resetContrast, this); + registerMethod("getContrastCaps", &AVOutputTV::getContrastCaps, this); + registerMethod("getSharpness", &AVOutputTV::getSharpness, this); + registerMethod("setSharpness", &AVOutputTV::setSharpness, this); + registerMethod("resetSharpness", &AVOutputTV::resetSharpness, this); + registerMethod("getSharpnessCaps", &AVOutputTV::getSharpnessCaps, this); + registerMethod("getSaturation", &AVOutputTV::getSaturation, this); + registerMethod("setSaturation", &AVOutputTV::setSaturation, this); + registerMethod("resetSaturation", &AVOutputTV::resetSaturation, this); + registerMethod("getSaturationCaps", &AVOutputTV::getSaturationCaps, this); + registerMethod("getHue", &AVOutputTV::getHue, this); + registerMethod("setHue", &AVOutputTV::setHue, this); + registerMethod("resetHue", &AVOutputTV::resetHue, this); + registerMethod("getHueCaps", &AVOutputTV::getHueCaps, this); + registerMethod("getColorTemperature", &AVOutputTV::getColorTemperature, this); + registerMethod("setColorTemperature", &AVOutputTV::setColorTemperature, this); + registerMethod("resetColorTemperature", &AVOutputTV::resetColorTemperature, this); + registerMethod("getColorTemperatureCaps", &AVOutputTV::getColorTemperatureCaps, this); + + registerMethod("getBacklightDimmingMode", &AVOutputTV::getBacklightDimmingMode, this); + registerMethod("setBacklightDimmingMode", &AVOutputTV::setBacklightDimmingMode, this); + registerMethod("resetBacklightDimmingMode", &AVOutputTV::resetBacklightDimmingMode, this); + registerMethod("getBacklightDimmingModeCaps", &AVOutputTV::getBacklightDimmingModeCaps, this); + + registerMethod("getSupportedDolbyVisionModes", &AVOutputTV::getSupportedDolbyVisionModes, this); + registerMethod("getDolbyVisionMode", &AVOutputTV::getDolbyVisionMode, this); + registerMethod("setDolbyVisionMode", &AVOutputTV::setDolbyVisionMode, this); + registerMethod("resetDolbyVisionMode", &AVOutputTV::resetDolbyVisionMode, this); + registerMethod("getDolbyVisionModeCaps", &AVOutputTV::getDolbyVisionModeCaps, this); + registerMethod("getVideoFormat", &AVOutputTV::getVideoFormat, this); + registerMethod("getVideoSource", &AVOutputTV::getVideoSource, this); + registerMethod("getVideoFrameRate", &AVOutputTV::getVideoFrameRate, this); + registerMethod("getVideoResolution", &AVOutputTV::getVideoResolution, this); + registerMethod("getVideoContentType", &AVOutputTV::getVideoContentType, this); + + registerMethod("getZoomMode", &AVOutputTV::getZoomMode, this); + registerMethod("setZoomMode", &AVOutputTV::setZoomMode, this); + registerMethod("resetZoomMode", &AVOutputTV::resetZoomMode, this); + registerMethod("getZoomModeCaps", &AVOutputTV::getZoomModeCaps, this); + + registerMethod("getPictureMode", &AVOutputTV::getPictureMode, this); + registerMethod("setPictureMode", &AVOutputTV::setPictureMode, this); + registerMethod("signalFilmMakerMode", &AVOutputTV::signalFilmMakerMode, this); + registerMethod("resetPictureMode", &AVOutputTV::resetPictureMode, this); + registerMethod("getPictureModeCaps", &AVOutputTV::getPictureModeCaps, this); + registerMethod("getSupportedPictureModes", &AVOutputTV::getSupportedPictureModes, this); + registerMethod("getVideoSourceCaps", &AVOutputTV::getVideoSourceCaps, this); + registerMethod("getVideoFormatCaps", &AVOutputTV::getVideoFormatCaps, this); + registerMethod("getVideoFrameRateCaps", &AVOutputTV::getVideoFrameRateCaps, this); + registerMethod("getVideoResolutionCaps", &AVOutputTV::getVideoResolutionCaps, this); + + registerMethod("getLowLatencyState", &AVOutputTV::getLowLatencyState, this); + registerMethod("setLowLatencyState", &AVOutputTV::setLowLatencyState, this); + registerMethod("resetLowLatencyState", &AVOutputTV::resetLowLatencyState, this); + registerMethod("getLowLatencyStateCaps", &AVOutputTV::getLowLatencyStateCaps, this); + + registerMethod("getCMS", &AVOutputTV::getCMS, this); + registerMethod("setCMS", &AVOutputTV::setCMS, this); + registerMethod("resetCMS", &AVOutputTV::resetCMS, this); + registerMethod("getCMSCaps", &AVOutputTV::getCMSCaps, this); + + registerMethod("get2PointWB", &AVOutputTV::get2PointWB, this); + registerMethod("set2PointWB", &AVOutputTV::set2PointWB, this); + registerMethod("reset2PointWB", &AVOutputTV::reset2PointWB, this); + registerMethod("get2PointWBCaps", &AVOutputTV::get2PointWBCaps, this); + + registerMethod("getHDRMode", &AVOutputTV::getHDRMode, this); + registerMethod("setHDRMode", &AVOutputTV::setHDRMode, this); + registerMethod("resetHDRMode", &AVOutputTV::resetHDRMode, this); + registerMethod("getHDRModeCaps", &AVOutputTV::getHDRModeCaps, this); + + registerMethod("getAutoBacklightMode", &AVOutputTV::getAutoBacklightMode, this); + registerMethod("setAutoBacklightMode", &AVOutputTV::setAutoBacklightMode, this); + registerMethod("resetAutoBacklightMode", &AVOutputTV::resetAutoBacklightMode, this); + registerMethod("getAutoBacklightModeCaps", &AVOutputTV::getAutoBacklightModeCaps, this); + + registerMethod("getBacklightCapsV2", &AVOutputTV::getBacklightCapsV2, this); + registerMethod("getBrightnessCapsV2", &AVOutputTV::getBrightnessCapsV2, this); + registerMethod("getContrastCapsV2", &AVOutputTV::getContrastCapsV2, this); + registerMethod("getSharpnessCapsV2", &AVOutputTV::getSharpnessCapsV2, this); + registerMethod("getSaturationCapsV2", &AVOutputTV::getSaturationCapsV2, this); + registerMethod("getHueCapsV2", &AVOutputTV::getHueCapsV2, this); + registerMethod("getLowLatencyStateCapsV2", &AVOutputTV::getLowLatencyStateCapsV2, this); + registerMethod("getColorTemperatureCapsV2", &AVOutputTV::getColorTemperatureCapsV2, this); + registerMethod("getBacklightDimmingModeCapsV2", &AVOutputTV::getBacklightDimmingModeCapsV2, this); + registerMethod("getZoomModeCapsV2", &AVOutputTV::getZoomModeCapsV2, this); + registerMethod("getDolbyVisionCalibrationCaps", &AVOutputTV::getDolbyVisionCalibrationCaps, this); + registerMethod("getPictureModeCapsV2", &AVOutputTV::getPictureModeCapsV2, this); + registerMethod("getAutoBacklightModeCapsV2", &AVOutputTV::getAutoBacklightModeCapsV2, this); + registerMethod("getCMSCapsV2", &AVOutputTV::getCMSCapsV2, this); + registerMethod("get2PointWBCapsV2", &AVOutputTV::get2PointWBCapsV2, this); + registerMethod("getSDRGammaCaps", &AVOutputTV::getSDRGammaCaps, this); + + registerMethod("getPrecisionDetailCaps", &AVOutputTV::getPrecisionDetailCaps, this); + registerMethod("getPrecisionDetail", &AVOutputTV::getPrecisionDetail, this); + registerMethod("setPrecisionDetail", &AVOutputTV::setPrecisionDetail, this); + registerMethod("resetPrecisionDetail", &AVOutputTV::resetPrecisionDetail, this); + + registerMethod("getLocalContrastEnhancementCaps", &AVOutputTV::getLocalContrastEnhancementCaps, this); + registerMethod("getLocalContrastEnhancement", &AVOutputTV::getLocalContrastEnhancement, this); + registerMethod("setLocalContrastEnhancement", &AVOutputTV::setLocalContrastEnhancement, this); + registerMethod("resetLocalContrastEnhancement", &AVOutputTV::resetLocalContrastEnhancement, this); + + registerMethod("getMPEGNoiseReductionCaps", &AVOutputTV::getMPEGNoiseReductionCaps, this); + registerMethod("getMPEGNoiseReduction", &AVOutputTV::getMPEGNoiseReduction, this); + registerMethod("setMPEGNoiseReduction", &AVOutputTV::setMPEGNoiseReduction, this); + registerMethod("resetMPEGNoiseReduction", &AVOutputTV::resetMPEGNoiseReduction, this); + + registerMethod("getDigitalNoiseReductionCaps", &AVOutputTV::getDigitalNoiseReductionCaps, this); + registerMethod("getDigitalNoiseReduction", &AVOutputTV::getDigitalNoiseReduction, this); + registerMethod("setDigitalNoiseReduction", &AVOutputTV::setDigitalNoiseReduction, this); + registerMethod("resetDigitalNoiseReduction", &AVOutputTV::resetDigitalNoiseReduction, this); + + registerMethod("getMEMCCaps", &AVOutputTV::getMEMCCaps, this); + registerMethod("getMEMC", &AVOutputTV::getMEMC, this); + registerMethod("setMEMC", &AVOutputTV::setMEMC, this); + registerMethod("resetMEMC", &AVOutputTV::resetMEMC, this); + + registerMethod("getAISuperResolutionCaps", &AVOutputTV::getAISuperResolutionCaps, this); + registerMethod("getAISuperResolution", &AVOutputTV::getAISuperResolution, this); + registerMethod("setAISuperResolution", &AVOutputTV::setAISuperResolution, this); + registerMethod("resetAISuperResolution", &AVOutputTV::resetAISuperResolution, this); + + registerMethod("getMultiPointWBCaps", &AVOutputTV::getMultiPointWBCaps, this); LOGINFO("Exit\n"); } @@ -403,8 +472,8 @@ namespace Plugin { locatePQSettingsFile(); - // Get Index from PQ capabailites - if (getPqParamIndex() != 0) { + // Get Index from PQ capabailites + if (getPqParamIndex() != 0) { LOGWARN("Failed to get the supported index from capability \n"); } @@ -439,13 +508,2365 @@ namespace Plugin { LOGINFO("Exit\n"); } + // Shared zoom mode mappings + static const std::unordered_map zoomModeReverseMap = { + {tvDisplayMode_16x9, "TV 16X9 STRETCH"}, + {tvDisplayMode_4x3, "TV 4X3 PILLARBOX"}, + {tvDisplayMode_NORMAL, "TV NORMAL"}, + {tvDisplayMode_DIRECT, "TV DIRECT"}, + {tvDisplayMode_AUTO, "TV AUTO"}, + {tvDisplayMode_ZOOM, "TV ZOOM"}, + {tvDisplayMode_FULL, "TV FULL"} + }; + static const std::unordered_map zoomModeMap = { + {"TV 16X9 STRETCH", tvDisplayMode_16x9}, + {"TV 4X3 PILLARBOX", tvDisplayMode_4x3}, + {"TV NORMAL", tvDisplayMode_NORMAL}, + {"TV DIRECT", tvDisplayMode_DIRECT}, + {"TV AUTO", tvDisplayMode_AUTO}, + {"TV ZOOM", tvDisplayMode_ZOOM}, + {"TV FULL", tvDisplayMode_FULL} + }; + static const std::unordered_map dimmingModeReverseMap = { + { tvDimmingMode_Fixed, "Fixed" }, + { tvDimmingMode_Local, "Local" }, + { tvDimmingMode_Global, "Global" } + }; + static const std::unordered_map dimmingModeMap = { + { "Fixed", tvDimmingMode_Fixed }, + { "Local", tvDimmingMode_Local }, + { "Global", tvDimmingMode_Global } + }; + + bool AVOutputTV::getPQParamFromContext(const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t paramType, + int& outValue) + { + tvConfigContext_t validContext = getValidContextFromGetParameters(parameters, paramName); + if ((validContext.videoSrcType == VIDEO_SOURCE_ALL && + validContext.videoFormatType == VIDEO_FORMAT_NONE && + validContext.pq_mode == PQ_MODE_INVALID)) + { + LOGWARN("No Valid context for get %s", paramName.c_str()); + return false; + } + + paramIndex_t indexInfo + { + .sourceIndex = static_cast(validContext.videoSrcType), + .pqmodeIndex = static_cast(validContext.pq_mode), + .formatIndex = static_cast(validContext.videoFormatType) + }; + + int value = 0; + tvError_t err = static_cast(getLocalparam(paramName.c_str(), indexInfo, value, paramType)); + if (err == tvERROR_NONE) { + outValue = value; + return true; + } + + LOGERR("getLocalparam failed for %s with error code %d", paramName.c_str(), err); + return false; + } + + bool AVOutputTV::getEnumPQParamString( + const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqType, + const std::unordered_map& enumToStrMap, + std::string& outStr) + { + LOGINFO("getEnumPQParamString Entry for %s\n", paramName.c_str()); + + tvConfigContext_t validContext = getValidContextFromGetParameters(parameters, paramName); + if ((validContext.videoSrcType == VIDEO_SOURCE_ALL && + validContext.videoFormatType == VIDEO_FORMAT_NONE && + validContext.pq_mode == PQ_MODE_INVALID)) + { + LOGWARN("No valid context for get %s", paramName.c_str()); + return false; + } + + paramIndex_t indexInfo { + .sourceIndex = static_cast(validContext.videoSrcType), + .pqmodeIndex = static_cast(validContext.pq_mode), + .formatIndex = static_cast(validContext.videoFormatType) + }; + + int paramValue = 0; + int err = getLocalparam(paramName, indexInfo, paramValue, pqType); + if (err != 0) { + LOGERR("Failed to get %s from localparam", paramName.c_str()); + return false; + } + + auto it = enumToStrMap.find(paramValue); + if (it != enumToStrMap.end()) { + outStr = it->second; + LOGINFO("%s = %s", paramName.c_str(), outStr.c_str()); + return true; + } else { + LOGERR("Enum value %d not found in map for %s", paramValue, paramName.c_str()); + return false; + } + } + bool AVOutputTV::setCMSParam(const JsonObject& parameters) + { + LOGINFO("Entry: setCMSParam"); + + std::string colorStr = parameters.HasLabel("color") ? parameters["color"].String() : ""; + std::string componentStr = parameters.HasLabel("component") ? parameters["component"].String() : ""; + std::string levelStr = parameters.HasLabel("level") ? parameters["level"].String() : ""; + + if (colorStr.empty() || componentStr.empty() || levelStr.empty()) { + LOGERR("Missing color/component/level"); + return false; + } + + int level = 0; + try { + level = std::stoi(levelStr); + } catch (...) { + LOGERR("Invalid level value: %s", levelStr.c_str()); + return false; + } + + int maxCap = 0; + if (componentStr == "Hue") + maxCap = m_maxCmsHue; + else if (componentStr == "Saturation") + maxCap = m_maxCmsSaturation; + else if (componentStr == "Luma") + maxCap = m_maxCmsLuma; + else { + LOGERR("Invalid component: %s", componentStr.c_str()); + return false; + } + + if (level < 0 || level > maxCap) { + LOGERR("Level out of range: %d (0-%d)", level, maxCap); + return false; + } + + tvDataComponentColor_t colorEnum; + if (getCMSColorEnumFromString(colorStr, colorEnum) != 0) { + LOGERR("Invalid color: %s", colorStr.c_str()); + return false; + } + + + if( isSetRequiredForParam(parameters, "CMS") ) { + LOGINFO("Proceed with SetCMSState \n"); + tvError_t ret = SetCMSState(true); + if(ret != tvERROR_NONE) { + LOGWARN("CMS enable failed\n"); + return false; + } + if (componentStr == "Hue") + ret = SetCurrentComponentHue(colorEnum, level); + else if (componentStr == "Saturation") + ret = SetCurrentComponentSaturation(colorEnum, level); + else if (componentStr == "Luma") + ret = SetCurrentComponentLuma(colorEnum, level); + + if (ret != tvERROR_NONE) { + LOGERR("HAL set failed for %s", componentStr.c_str()); + return false; + } + } + + try { + int retVal = updateAVoutputTVParamV2("set", "CMS", parameters, PQ_PARAM_CMS, level); + if (retVal < 0) { + LOGERR("setCMSParam: Failed to save CMS param, return code: %d", retVal); + return false; + } + } catch (const std::exception& e) { + LOGERR("Exception in updateAVoutputTVParamV2: %s", e.what()); + return false; + } catch (...) { + LOGERR("Unknown exception in updateAVoutputTVParamV2"); + return false; + } + + LOGINFO("Exit: setCMSParam success"); + return true; + } + + + bool AVOutputTV::setEnumPQParam(const JsonObject& parameters, + const std::string& inputKey, + const std::string& paramName, + const std::unordered_map& valueMap, + tvPQParameterIndex_t paramType, + std::function halSetter) + { + if (!parameters.HasLabel(inputKey.c_str())) { + LOGERR("Missing input field: %s", inputKey.c_str()); + return false; + } + + std::string value = parameters[inputKey.c_str()].String(); + auto it = valueMap.find(value); + if (it == valueMap.end()) { + LOGERR("Invalid value '%s' for parameter: %s", value.c_str(), inputKey.c_str()); + return false; + } + + int intVal = it->second; + + // Only call HAL for current system context + if (isSetRequiredForParam(parameters, paramName)) { + LOGINFO("Calling HAL for %s = %s intVal %d", paramName.c_str(), value.c_str(), intVal); + tvError_t ret = halSetter(intVal); + if (ret != tvERROR_NONE) { + LOGERR("HAL setter failed for %s", paramName.c_str()); + return false; + } + } + + // Persist the parameter contextually + int result = updateAVoutputTVParamV2("set", paramName, parameters, paramType, intVal); + if (result != 0) { + LOGERR("Persistence failed for %s", paramName.c_str()); + return false; + } + + LOGINFO("setEnumPQParam successful: %s = %s", paramName.c_str(), value.c_str()); + return true; + } + + bool AVOutputTV::setIntPQParam(const JsonObject& parameters, const std::string& paramName, + tvPQParameterIndex_t pqType, tvSetFunction halSetter, int maxCap) + { + LOGINFO("Entry: %s\n", paramName.c_str()); + int paramValue = 0; + tvError_t ret = tvERROR_NONE; + std::string value = ""; + std::string lowerParamName = paramName; + std::transform(lowerParamName.begin(), lowerParamName.end(), lowerParamName.begin(), ::tolower); + + if (!parameters.HasLabel(lowerParamName.c_str())) { + LOGERR("%s: Missing parameter: %s", __FUNCTION__, lowerParamName.c_str()); + return false; + } + + value = parameters[lowerParamName.c_str()].String(); + + try { + paramValue = std::stoi(value); + } catch (const std::exception& e) { + LOGERR("Invalid %s value: %s. Exception: %s", paramName.c_str(), value.c_str(), e.what()); + return false; + } + + if (paramValue < 0 || paramValue > maxCap) { + LOGERR("Input value %d is out of range (0 - %d) for %s", paramValue, maxCap, paramName.c_str()); + return false; + } + + if (isSetRequiredForParam(parameters, paramName)) { + LOGINFO("Proceed with set%s\n", paramName.c_str()); + ret = halSetter(paramValue); + LOGINFO("halsetter ret %d \n", ret); + if (ret != tvERROR_NONE){ + LOGERR("Failed to set %s\n", paramName.c_str()); + return false; + } + } + LOGINFO("Calling updateAVOutputTVParamV2 \n"); + int retval = updateAVoutputTVParamV2("set", paramName, parameters, pqType, paramValue); + if (retval != 0) { + LOGERR("Failed to Save %s to ssm_data. retval: %d\n", paramName.c_str(), retval); + return false; + } + + LOGINFO("Exit: set%s successful to value: %d\n", paramName.c_str(), paramValue); + return true; + } + + uint32_t AVOutputTV::getPQCapabilityWithContext( + const std::function& getCapsFunc, + const JsonObject& parameters, + JsonObject& response) + { + int max_value = 0; + tvContextCaps_t* context_caps = nullptr; + + // Call the HAL function + tvError_t result = getCapsFunc(&context_caps, &max_value); + LOGWARN("AVOutputPlugins: %s: result: %d", __FUNCTION__, result); + + if (result != tvERROR_NONE) { + returnResponse(false); + } + + response["platformSupport"] = true; + + if (max_value > 0) { + JsonObject rangeInfo; + rangeInfo["from"] = 0; + rangeInfo["to"] = max_value; + response["rangeInfo"] = rangeInfo; + } + + response["context"] = parseContextCaps(context_caps); + + returnResponse(true); + } + + + JsonObject AVOutputTV::parseContextCaps(tvContextCaps_t* context_caps) { + JsonObject contextObj; + if (context_caps && context_caps->num_contexts > 0) { + for (size_t i = 0; i < context_caps->num_contexts; ++i) { + int pqMode = context_caps->contexts[i].pq_mode; + int videoFormat = context_caps->contexts[i].videoFormatType; + int videoSource = context_caps->contexts[i].videoSrcType; + + auto pqModeIt = AVOutputTV::pqModeMap.find(pqMode); + auto videoFormatIt = AVOutputTV::videoFormatMap.find(videoFormat); + auto videoSrcIt = AVOutputTV::videoSrcMap.find(videoSource); + + if (pqModeIt != AVOutputTV::pqModeMap.end() && + videoFormatIt != AVOutputTV::videoFormatMap.end() && + videoSrcIt != AVOutputTV::videoSrcMap.end()) { + + const char* pqModeStr = pqModeIt->second.c_str(); + const char* videoFormatStr = videoFormatIt->second.c_str(); + const char* videoSrcStr = videoSrcIt->second.c_str(); + + if (!contextObj.HasLabel(pqModeStr)) { + contextObj[pqModeStr] = JsonObject(); + } + JsonObject pqModeObj = contextObj[pqModeStr].Object(); + + if (!pqModeObj.HasLabel(videoFormatStr)) { + pqModeObj[videoFormatStr] = JsonArray(); + } + JsonArray formatArray = pqModeObj[videoFormatStr].Array(); + // **Manually check for existence before adding** + bool exists = false; + for (size_t j = 0; j < formatArray.Length(); ++j) { + if (strcmp(formatArray[j].String().c_str(), videoSrcStr) == 0) { + exists = true; + break; + } + } + if (!exists) { + formatArray.Add(videoSrcStr); + } + // Update objects + pqModeObj[videoFormatStr] = formatArray; + contextObj[pqModeStr] = pqModeObj; + } + } + } + return contextObj; + } + + uint32_t AVOutputTV::getBacklightCapsV2(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this]( tvContextCaps_t** context_caps, int* max_backlight) { +#if HAL_NOT_READY + return this->GetBacklightCaps(max_backlight, context_caps); +#else + return GetBacklightCaps(max_backlight, context_caps); +#endif + }, parameters, response); + } + + uint32_t AVOutputTV::getBrightnessCapsV2(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this]( tvContextCaps_t** context_caps, int* max_brightness) { +#if HAL_NOT_READY + return this->GetBrightnessCaps(max_brightness, context_caps); +#else + return GetBrightnessCaps(max_brightness, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getContrastCapsV2(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_contrast) { +#if HAL_NOT_READY + return this->GetContrastCaps(max_contrast, context_caps); +#else + return GetContrastCaps(max_contrast, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getSharpnessCapsV2(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_sharpness) { +#if HAL_NOT_READY + return this->GetSharpnessCaps(max_sharpness, context_caps); +#else + return GetSharpnessCaps(max_sharpness, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getSaturationCapsV2(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_saturation) { +#if HAL_NOT_READY + return this->GetSaturationCaps(max_saturation, context_caps); +#else + return GetSaturationCaps(max_saturation, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getHueCapsV2(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this]( tvContextCaps_t** context_caps, int* max_hue) { +#if HAL_NOT_READY + return this->GetHueCaps(max_hue, context_caps); +#else + return GetHueCaps(max_hue, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getPrecisionDetailCaps(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_precision) { +#if HAL_NOT_READY + return this->GetPrecisionDetailCaps(max_precision, context_caps); +#else + return GetPrecisionDetailCaps(max_precision, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getLocalContrastEnhancementCaps(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_val) { +#if HAL_NOT_READY + return this->GetLocalContrastEnhancementCaps(max_val, context_caps); +#else + return GetLocalContrastEnhancementCaps(max_val, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getMPEGNoiseReductionCaps(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_val) { +#if HAL_NOT_READY + return this->GetMPEGNoiseReductionCaps(max_val, context_caps); +#else + return GetMPEGNoiseReductionCaps(max_val, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getDigitalNoiseReductionCaps(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_val) { +#if HAL_NOT_READY + return this->GetDigitalNoiseReductionCaps(max_val, context_caps); +#else + return GetDigitalNoiseReductionCaps(max_val, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getAISuperResolutionCaps(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_val) { +#if HAL_NOT_READY + return this->GetAISuperResolutionCaps(max_val, context_caps); +#else + return GetAISuperResolutionCaps(max_val, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getMultiPointWBCaps(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + + int num_hal_matrix_points = 0; + int rgb_min = 0, rgb_max = 0; + int num_ui_matrix_points = 0; + double* ui_matrix_positions = nullptr; + tvContextCaps_t* context_caps = nullptr; + + tvError_t ret = GetMultiPointWBCaps( + &num_hal_matrix_points, + &rgb_min, + &rgb_max, + &num_ui_matrix_points, + &ui_matrix_positions, + &context_caps + ); + + if (ret != tvERROR_NONE) { + LOGWARN("GetMultiPointWBCaps failed: %s", getErrorString(ret).c_str()); + returnResponse(false); + } + response["platformSupport"] = true; + + response["numHalMatrixPoints"] = num_hal_matrix_points; + response["rgbMin"] = rgb_min; + response["rgbMax"] = rgb_max; + response["numUiMatrixPoints"] = num_ui_matrix_points; + + // Add UI matrix positions + JsonArray uiPosArray; + for (int i = 0; i < num_ui_matrix_points; ++i) { + uiPosArray.Add(ui_matrix_positions[i]); + } + response["uiMatrixPositions"] = uiPosArray; + response["context"] = parseContextCaps(context_caps); +#if HAL_NOT_READY + // TODO:: Review cleanup once HAL is available, as memory will be allocated in HAL. + delete[] ui_matrix_positions; +#endif + LOGINFO("Exit\n"); + returnResponse(true); + } + + uint32_t AVOutputTV::getMEMCCaps(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_val) { +#if HAL_NOT_READY + return this->GetMEMCCaps(max_val, context_caps); +#else + return GetMEMCCaps(max_val, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getLowLatencyStateCapsV2(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_latency) { +#if HAL_NOT_READY + return this->GetLowLatencyStateCaps(max_latency, context_caps); +#else + return GetLowLatencyStateCaps(max_latency, context_caps); +#endif + }, + parameters, response); + } + + // Forward lookup: string → enum + const std::unordered_map colorTempMap = { + {"Standard", tvColorTemp_STANDARD}, + {"Warm", tvColorTemp_WARM}, + {"Cold", tvColorTemp_COLD}, + {"UserDefined", tvColorTemp_USER}, + {"Supercold", tvColorTemp_SUPERCOLD}, + {"BoostStandard", tvColorTemp_BOOST_STANDARD}, + {"BoostWarm", tvColorTemp_BOOST_WARM}, + {"BoostCold", tvColorTemp_BOOST_COLD}, + {"BoostUserDefined", tvColorTemp_BOOST_USER}, + {"BoostSupercold", tvColorTemp_BOOST_SUPERCOLD} + }; + + // Reverse lookup: enum → string + const std::unordered_map colorTempReverseMap = { + {tvColorTemp_STANDARD, "Standard"}, + {tvColorTemp_WARM, "Warm"}, + {tvColorTemp_COLD, "Cold"}, + {tvColorTemp_USER, "UserDefined"}, + {tvColorTemp_SUPERCOLD, "Supercold"}, + {tvColorTemp_BOOST_STANDARD, "BoostStandard"}, + {tvColorTemp_BOOST_WARM, "BoostWarm"}, + {tvColorTemp_BOOST_COLD, "BoostCold"}, + {tvColorTemp_BOOST_USER, "BoostUserDefined"}, + {tvColorTemp_BOOST_SUPERCOLD, "BoostSupercold"} + }; + + uint32_t AVOutputTV::getColorTemperatureCapsV2(const JsonObject& parameters, JsonObject& response) { + tvColorTemp_t* color_temp = nullptr; + size_t num_color_temp = 0; + tvContextCaps_t* context_caps = nullptr; + + tvError_t err = GetColorTemperatureCaps(&color_temp, &num_color_temp, &context_caps); + if (err != tvERROR_NONE) { + return err; + } + + response["platformSupport"] = true; + + JsonArray optionsArray; + for (size_t i = 0; i < num_color_temp; ++i) { + auto it = colorTempReverseMap.find(color_temp[i]); + if (it != colorTempReverseMap.end()) { + optionsArray.Add(it->second); + } + } + response["options"] = optionsArray; + response["context"] = parseContextCaps(context_caps); + + #if HAL_NOT_READY + free(color_temp); + #endif + + returnResponse(true); + } + + uint32_t AVOutputTV::getSDRGammaCaps(const JsonObject& parameters, JsonObject& response) + { + tvSdrGamma_t* sdr_gamma = nullptr; + size_t num_sdr_gamma = 0; + tvContextCaps_t* context_caps = nullptr; + + tvError_t err = GetSdrGammaCaps(&sdr_gamma, &num_sdr_gamma, &context_caps); + if (err != tvERROR_NONE) { + return err; + } + + response["platformSupport"] = true; + + JsonArray optionsArray; + for (size_t i = 0; i < num_sdr_gamma; ++i) { + switch (sdr_gamma[i]) { + case tvSdrGamma_1_8: optionsArray.Add("1.8"); break; + case tvSdrGamma_1_9: optionsArray.Add("1.9"); break; + case tvSdrGamma_2_0: optionsArray.Add("2.0"); break; + case tvSdrGamma_2_1: optionsArray.Add("2.1"); break; + case tvSdrGamma_2_2: optionsArray.Add("2.2"); break; + case tvSdrGamma_2_3: optionsArray.Add("2.3"); break; + case tvSdrGamma_2_4: optionsArray.Add("2.4"); break; + case tvSdrGamma_BT_1886: optionsArray.Add("BT.1886"); break; + default: break; + } + } + response["options"] = optionsArray; + + response["context"] = parseContextCaps(context_caps); + + #if HAL_NOT_READY + free(sdr_gamma); + #endif + + returnResponse(true); + } + + uint32_t AVOutputTV::getBacklightDimmingModeCapsV2(const JsonObject& parameters, JsonObject& response) + { + tvDimmingMode_t* dimming_mode = nullptr; + size_t num_dimming_mode = 0; + tvContextCaps_t* context_caps = nullptr; + + tvError_t err = GetTVDimmingModeCaps(&dimming_mode, &num_dimming_mode, &context_caps); + if (err != tvERROR_NONE) { + return err; + } + + response["platformSupport"] = true; + + JsonArray optionsArray; + for (size_t i = 0; i < num_dimming_mode; ++i) { + auto it = dimmingModeReverseMap.find(dimming_mode[i]); + if (it != dimmingModeReverseMap.end()) { + optionsArray.Add(it->second); + } + } + response["options"] = optionsArray; + + response["context"] = parseContextCaps(context_caps); + + #if HAL_NOT_READY + free(dimming_mode); + #endif + + returnResponse(true); + } + + uint32_t AVOutputTV::getZoomModeCapsV2(const JsonObject& parameters, JsonObject& response) + { + response["platformSupport"] = true; + + JsonArray optionsArray; + for (size_t i = 0; i < m_numAspectRatio; ++i) { + auto it = zoomModeReverseMap.find(m_aspectRatio[i]); + if (it != zoomModeReverseMap.end()) { + optionsArray.Add(it->second); + } + } + response["options"] = optionsArray; + + response["context"] = parseContextCaps(m_aspectRatioCaps); + + #if HAL_NOT_READY + free(m_aspectRatio); + #endif + + returnResponse(true); + } + + uint32_t AVOutputTV::getPictureModeCapsV2(const JsonObject& parameters, JsonObject& response) + { + response["platformSupport"] = true; + + JsonArray optionsArray; + for (size_t i = 0; i < m_numPictureModes; ++i) { + auto it = pqModeMap.find(m_pictureModes[i]); + if (it != pqModeMap.end()) { + optionsArray.Add(it->second); + } + } + response["options"] = optionsArray; + + response["context"] = parseContextCaps(m_pictureModeCaps); + + #if HAL_NOT_READY + free(m_pictureModes); + #endif + + returnResponse(true); + } + + uint32_t AVOutputTV::getAutoBacklightModeCapsV2(const JsonObject& parameters, JsonObject& response) + { + response["platformSupport"] = true; + + JsonArray optionsArray; + for (size_t i = 0; i < m_numBacklightModes; ++i) { + switch (m_backlightModes[i]) { + case tvBacklightMode_MANUAL: + optionsArray.Add("Manual"); + break; + case tvBacklightMode_AMBIENT: + optionsArray.Add("Ambient"); + break; + case tvBacklightMode_ECO: + optionsArray.Add("Eco"); + break; + default: + LOGINFO("Unknown backlightMode option\n"); + break; + } + } + response["options"] = optionsArray; + + response["context"] = parseContextCaps(m_backlightModeCaps); + + #if HAL_NOT_READY + // TODO: Review cleanup once HAL is available, as memory will be allocated in HAL. + free(m_backlightModes); + #endif + + returnResponse(true); + } + + uint32_t AVOutputTV::getDolbyVisionCalibrationCaps(const JsonObject& parameters, JsonObject& response) + { + tvDVCalibrationSettings_t* min_values = nullptr; + tvDVCalibrationSettings_t* max_values = nullptr; + tvContextCaps_t* context_caps = nullptr; + + if (GetDVCalibrationCaps(&min_values, &max_values, &context_caps) != tvERROR_NONE) { + returnResponse(false); + } + + // Set platform support + response["platformSupport"] = true; + + // Add all range fields (flattened as per expected JSON) + response["rangeTmax"] = JsonObject({{"from", min_values->Tmax}, {"to", max_values->Tmax}}); + response["rangeTmin"] = JsonObject({{"from", min_values->Tmin}, {"to", max_values->Tmin}}); + response["rangeTgamma"] = JsonObject({{"from", min_values->Tgamma}, {"to", max_values->Tgamma}}); + response["rangeRx"] = JsonObject({{"from", min_values->Rx}, {"to", max_values->Rx}}); + response["rangeRy"] = JsonObject({{"from", min_values->Ry}, {"to", max_values->Ry}}); + response["rangeGx"] = JsonObject({{"from", min_values->Gx}, {"to", max_values->Gx}}); + response["rangeGy"] = JsonObject({{"from", min_values->Gy}, {"to", max_values->Gy}}); + response["rangeBx"] = JsonObject({{"from", min_values->Bx}, {"to", max_values->Bx}}); + response["rangeBy"] = JsonObject({{"from", min_values->By}, {"to", max_values->By}}); + response["rangeWx"] = JsonObject({{"from", min_values->Wx}, {"to", max_values->Wx}}); + response["rangeWy"] = JsonObject({{"from", min_values->Wy}, {"to", max_values->Wy}}); + + // Add context list + response["context"] = parseContextCaps(context_caps); + + // Indicate success + response["success"] = true; + +#if HAL_NOT_READY + // TODO: Clean up when HAL handles memory + delete min_values; + delete max_values; +#endif + + returnResponse(true); + } + + uint32_t AVOutputTV::getZoomModeCaps(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); - std::vector range; - std::vector pqmode; - std::vector source; - std::vector format; + capVectors_t info; + + JsonArray rangeArray; + JsonArray pqmodeArray; + JsonArray formatArray; + JsonArray sourceArray; + + unsigned int index = 0; + + tvError_t ret = getParamsCaps("AspectRatio",info); + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + for (index = 0; index < info.rangeVector.size(); index++) { + rangeArray.Add(info.rangeVector[index]); + } + + response["options"]=rangeArray; + + if (info.pqmodeVector.front().compare("none") != 0) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); + } + response["pictureModeInfo"]=pqmodeArray; + } + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); + } + response["videoSourceInfo"]=sourceArray; + } + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); + } + response["videoFormatInfo"]=formatArray; + } + LOGINFO("Exit\n"); + returnResponse(true); + } + } + + uint32_t AVOutputTV::setZoomMode(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + if(m_aspectRatioStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + std::string value; + tvDisplayMode_t mode = tvDisplayMode_16x9; + capDetails_t inputInfo; + + + value = parameters.HasLabel("zoomMode") ? parameters["zoomMode"].String() : ""; + returnIfParamNotFound(parameters,"zoomMode"); + + if (validateInputParameter("AspectRatio",value) != 0) { + LOGERR("%s: Range validation failed for AspectRatio\n", __FUNCTION__); + returnResponse(false); + } + + if (parsingSetInputArgument(parameters,"AspectRatio",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if( !isCapablityCheckPassed( "AspectRatio",inputInfo )) { + LOGERR("%s: CapablityCheck failed for AspectRatio\n", __FUNCTION__); + returnResponse(false); + } + + if(!value.compare("TV 16X9 STRETCH")) { + mode = tvDisplayMode_16x9; + } + else if (!value.compare("TV 4X3 PILLARBOX")) { + mode = tvDisplayMode_4x3; + } + else if (!value.compare("TV NORMAL")) { + mode = tvDisplayMode_NORMAL; + } + else if (!value.compare("TV DIRECT")) { + mode = tvDisplayMode_DIRECT; + } + else if (!value.compare("TV AUTO")) { + mode = tvDisplayMode_AUTO; + } + else if (!value.compare("TV ZOOM")) { + mode = tvDisplayMode_ZOOM; + } + else { + returnResponse(false); + } + m_videoZoomMode = mode; + tvError_t ret = setAspectRatioZoomSettings (mode); + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + //Save DisplayMode to localstore and ssm_data + int retval=updateAVoutputTVParam("set","AspectRatio",inputInfo,PQ_PARAM_ASPECT_RATIO,mode); + + if(retval != 0) { + LOGERR("Failed to Save DisplayMode to ssm_data\n"); + returnResponse(false); + } + + tr181ErrorCode_t err = setLocalParam(rfc_caller_id, AVOUTPUT_ASPECTRATIO_RFC_PARAM, value.c_str()); + if ( err != tr181Success ) { + LOGERR("setLocalParam for %s Failed : %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, getTR181ErrorString(err)); + returnResponse(false); + } + else { + LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, value.c_str()); + } + LOGINFO("Exit : SetAspectRatio() value : %s\n",value.c_str()); + returnResponse(true); + } + } + else + { + std::string value = parameters.HasLabel("zoomMode") ? parameters["zoomMode"].String() : ""; + returnIfParamNotFound(parameters, "zoomMode"); + + auto it = zoomModeMap.find(value); + if (it == zoomModeMap.end()) { + LOGERR("Invalid zoom mode: %s. Not in supported options.", value.c_str()); + returnResponse(false); + } + tvDisplayMode_t mode = it->second; + tvError_t ret = setAspectRatioZoomSettings(mode); + if (ret != tvERROR_NONE) { + returnResponse(false); + } + else + { + // Save DisplayMode to local store and ssm_data + int retval = updateAVoutputTVParamV2("set", "AspectRatio", parameters, PQ_PARAM_ASPECT_RATIO, mode); + if (retval != 0) { + LOGERR("Failed to Save DisplayMode to ssm_data\n"); + returnResponse(false); + } + tr181ErrorCode_t err = setLocalParam(rfc_caller_id, AVOUTPUT_ASPECTRATIO_RFC_PARAM, value.c_str()); + if (err != tr181Success) { + LOGERR("setLocalParam for %s Failed : %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, getTR181ErrorString(err)); + returnResponse(false); + } else { + LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, value.c_str()); + } + LOGINFO("Exit : SetAspectRatio() value : %s\n", value.c_str()); + returnResponse(true); + } + } + } + + uint32_t AVOutputTV::getZoomMode(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + tvDisplayMode_t mode; + + tvError_t ret = getUserSelectedAspectRatio (&mode); + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + switch(mode) { + case tvDisplayMode_16x9: + LOGINFO("Aspect Ratio: TV 16X9 STRETCH\n"); + response["zoomMode"] = "TV 16X9 STRETCH"; + break; + + case tvDisplayMode_4x3: + LOGINFO("Aspect Ratio: TV 4X3 PILLARBOX\n"); + response["zoomMode"] = "TV 4X3 PILLARBOX"; + break; + + case tvDisplayMode_NORMAL: + LOGINFO("Aspect Ratio: TV Normal\n"); + response["zoomMode"] = "TV NORMAL"; + break; + + case tvDisplayMode_AUTO: + LOGINFO("Aspect Ratio: TV AUTO\n"); + response["zoomMode"] = "TV AUTO"; + break; + + case tvDisplayMode_DIRECT: + LOGINFO("Aspect Ratio: TV DIRECT\n"); + response["zoomMode"] = "TV DIRECT"; + break; + + case tvDisplayMode_ZOOM: + LOGINFO("Aspect Ratio: TV ZOOM\n"); + response["zoomMode"] = "TV ZOOM"; + break; + + default: + LOGINFO("Aspect Ratio: TV AUTO\n"); + response["zoomMode"] = "TV AUTO"; + break; + } + returnResponse(true); + } + } + + uint32_t AVOutputTV::resetZoomMode(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + capDetails_t inputInfo; + tvError_t ret = tvERROR_NONE; + + if (parsingSetInputArgument(parameters, "AspectRatio",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if( !isCapablityCheckPassed( "AspectRatio",inputInfo )) { + LOGERR("%s: CapablityCheck failed for AspectRatio\n", __FUNCTION__); + returnResponse(false); + } + + tr181ErrorCode_t err = clearLocalParam(rfc_caller_id,AVOUTPUT_ASPECTRATIO_RFC_PARAM); + if ( err != tr181Success ) { + LOGERR("clearLocalParam for %s Failed : %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, getTR181ErrorString(err)); + ret = tvERROR_GENERAL; + } + else { + ret = setDefaultAspectRatio(inputInfo.pqmode,inputInfo.source,inputInfo.format); + } + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetDefaultAspectRatio()\n"); + returnResponse(true); + } + } + + uint32_t AVOutputTV::getVideoFormat(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + tvVideoFormatType_t videoFormat; + tvError_t ret = GetCurrentVideoFormat(&videoFormat); + if(ret != tvERROR_NONE) { + response["currentVideoFormat"] = "NONE"; + returnResponse(false); + } + else { + response["currentVideoFormat"] = getVideoFormatTypeToString(videoFormat); + LOGINFO("Exit: getVideoFormat :%d success \n",videoFormat); + returnResponse(true); + } + } + + uint32_t AVOutputTV::getVideoResolution(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + tvResolutionParam_t videoResolution; + tvError_t ret = GetCurrentVideoResolution(&videoResolution); + if(ret != tvERROR_NONE) { + response["currentVideoResolution"] = "NONE"; + returnResponse(false); + } + else { + response["currentVideoResolution"] = getVideoResolutionTypeToString(videoResolution); + LOGINFO("Exit: getVideoResolution :%d success \n",videoResolution.resolutionValue); + returnResponse(true); + } + } + + uint32_t AVOutputTV::getVideoFrameRate(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + tvVideoFrameRate_t videoFramerate; + tvError_t ret = GetCurrentVideoFrameRate(&videoFramerate); + if(ret != tvERROR_NONE) { + response["currentVideoFrameRate"] = "NONE"; + returnResponse(false); + } + else { + response["currentVideoFrameRate"] = getVideoFrameRateTypeToString(videoFramerate); + LOGINFO("Exit: videoFramerate :%d success \n",videoFramerate); + returnResponse(true); + } + } + + uint32_t AVOutputTV::resetPrecisionDetail(const JsonObject& parameters, JsonObject& response) + { + #if HAL_NOT_READY + bool success = false; + #else + bool success = resetPQParamToDefault(parameters, "PrecisionDetail", + PQ_PARAM_PRECISION_DETAIL, SetPrecisionDetail); + #endif + returnResponse(success); + } + + uint32_t AVOutputTV::resetLocalContrastEnhancement(const JsonObject& parameters, JsonObject& response) + { + #if HAL_NOT_READY + bool success = false; + #else + #if ENABLE_PQ_PARAM + bool success = resetPQParamToDefault(parameters, "LocalContrastEnhancement", + PQ_PARAM_LOCAL_CONTRAST_ENHANCEMENT, SetLocalContrastEnhancement); + #else + bool success = true; + #endif + #endif + returnResponse(success); + } + + uint32_t AVOutputTV::resetMPEGNoiseReduction(const JsonObject& parameters, JsonObject& response) + { + #if HAL_NOT_READY + bool success = false; + #else + #if ENABLE_PQ_PARAM + bool success = resetPQParamToDefault(parameters, "MPEGNoiseReduction", + PQ_PARAM_MPEG_NOISE_REDUCTION, SetMPEGNoiseReduction); + #else + bool success = true; + #endif + #endif + returnResponse(success); + } + + uint32_t AVOutputTV::resetDigitalNoiseReduction(const JsonObject& parameters, JsonObject& response) + { + #if HAL_NOT_READY + bool success = false; + #else + #if ENABLE_PQ_PARAM + bool success = resetPQParamToDefault(parameters, "DigitalNoiseReduction", + PQ_PARAM_DIGITAL_NOISE_REDUCTION, SetDigitalNoiseReduction); + #else + bool success = true; + #endif + + #endif + returnResponse(success); + } + + uint32_t AVOutputTV::resetMEMC(const JsonObject& parameters, JsonObject& response) + { + #if HAL_NOT_READY + bool success = false; + #else + bool success = resetPQParamToDefault(parameters, "MEMC", + PQ_PARAM_MEMC, SetMEMC); + #endif + returnResponse(success); + } + + uint32_t AVOutputTV::resetAISuperResolution(const JsonObject& parameters, JsonObject& response) + { +#if HAL_NOT_READY + bool success= false; +#else + bool success= resetPQParamToDefault(parameters,"AISuperResolution", + PQ_PARAM_AI_SUPER_RESOLUTION, SetAISuperResolution); +#endif + returnResponse(success); + } + + uint32_t AVOutputTV::getPrecisionDetail(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + int precisionDetail = 0; + bool success = getPQParamFromContext(parameters, + "PrecisionDetail", + PQ_PARAM_PRECISION_DETAIL, + precisionDetail); + if (success) { + response["precisionDetail"] = precisionDetail; + } + returnResponse(success); + } + + uint32_t AVOutputTV::getLocalContrastEnhancement(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + int localContraseEnhancement = 0; + bool success = getPQParamFromContext(parameters, + "LocalContrastEnhancement", + PQ_PARAM_LOCAL_CONTRAST_ENHANCEMENT, + localContraseEnhancement); + if (success) { + response["localContrastEnhancement"] = localContraseEnhancement; + } + returnResponse(success); + } + + uint32_t AVOutputTV::getMPEGNoiseReduction(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + int MPEGNoiseReduction = 0; + bool success = getPQParamFromContext(parameters, + "MPEGNoiseReduction", + PQ_PARAM_MPEG_NOISE_REDUCTION, + MPEGNoiseReduction); + if (success) { + response["mpegNoiseReduction"] = MPEGNoiseReduction; + } + returnResponse(success); + } + + uint32_t AVOutputTV::getDigitalNoiseReduction(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + int digitalNoiseReduction = 0; + bool success = getPQParamFromContext(parameters, + "DigitalNoiseReduction", + PQ_PARAM_DIGITAL_NOISE_REDUCTION, + digitalNoiseReduction); + if (success) { + response["digitalNoiseReduction"] = digitalNoiseReduction; + } + returnResponse(success); + } + + uint32_t AVOutputTV::getMEMC(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + int MEMC = 0; + bool success = getPQParamFromContext(parameters, + "MEMC", + PQ_PARAM_MEMC, + MEMC); + if (success) { + response["memc"] = MEMC; + } + returnResponse(success); + } + + uint32_t AVOutputTV::getAISuperResolution(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + int aiSuperResolution = 0; + bool success = getPQParamFromContext(parameters, + "AISuperResolution", + PQ_PARAM_AI_SUPER_RESOLUTION, + aiSuperResolution); + if (success) { + response["aiSuperResolution"] = aiSuperResolution; + } + returnResponse(success); + } + + uint32_t AVOutputTV::setContextPQParam(const JsonObject& parameters, JsonObject& response, + const std::string& inputParamName, + const std::string& tr181ParamName, + int maxAllowedValue, + tvPQParameterIndex_t pqParamType, + std::function halSetter) + { + LOGINFO("Entry"); + + if (!parameters.HasLabel(inputParamName.c_str())) { + LOGERR("Missing parameter: %s", inputParamName.c_str()); + returnResponse(false); + } + + std::string valueStr = parameters[inputParamName.c_str()].String(); + int value = std::stoi(valueStr); + + if (value < 0 || value > maxAllowedValue) { + LOGERR("Input value %d is out of range for %s", value, inputParamName.c_str()); + returnResponse(false); + } + + // Get current context + tvVideoSrcType_t currentSrc = VIDEO_SOURCE_IP; + tvVideoFormatType_t currentFmt = VIDEO_FORMAT_SDR; + tvPQModeIndex_t currentPQMode = PQ_MODE_STANDARD; + + GetCurrentVideoSource(¤tSrc); + GetCurrentVideoFormat(¤tFmt); + if (currentFmt == VIDEO_FORMAT_NONE) + currentFmt = VIDEO_FORMAT_SDR; + + char picMode[PIC_MODE_NAME_MAX] = {0}; + if (getCurrentPictureMode(picMode)) + { + auto it = pqModeReverseMap.find(picMode); + if (it != pqModeReverseMap.end()) + { + currentPQMode = static_cast(it->second); + } + else + { + LOGERR("Unknown picture mode"); + } + } + else + { + LOGERR("Failed to get current picture mode"); + } + + LOGINFO("currentPQMode: %d, currentFmt: %d, currentSrc: %d", currentPQMode, currentFmt, currentSrc); + + if (isSetRequiredForParam(parameters, tr181ParamName)) { + #if HAL_NOT_READY + #else + tvError_t ret = halSetter(currentSrc, currentPQMode, currentFmt, value); + if (ret != tvERROR_NONE) { + LOGERR("HAL setter failed for %s", inputParamName.c_str()); + returnResponse(false); + } + #endif + } + + // Persist + int retval = updateAVoutputTVParamV2("set", tr181ParamName, parameters, pqParamType, value); + if (retval != 0) { + LOGERR("Failed to save %s to ssm_data", inputParamName.c_str()); + returnResponse(false); + } + + LOGINFO("Exit: %s set successfully to %d", inputParamName.c_str(), value); + returnResponse(true); + } + + uint32_t AVOutputTV::setAISuperResolution(const JsonObject& parameters, JsonObject& response) + { + return setContextPQParam( + parameters, response, + "aiSuperResolution", + "AISuperResolution", + m_maxAISuperResolution, + PQ_PARAM_AI_SUPER_RESOLUTION, + [](tvVideoSrcType_t src, tvPQModeIndex_t mode, tvVideoFormatType_t fmt, int val) { + return SetAISuperResolution(src, mode, fmt, val); + } + ); + } + + uint32_t AVOutputTV::setMEMC(const JsonObject& parameters, JsonObject& response) + { + return setContextPQParam( + parameters, response, + "memc", "MEMC", + m_maxMEMC, + PQ_PARAM_MEMC, + [](tvVideoSrcType_t src, tvPQModeIndex_t mode, tvVideoFormatType_t fmt, int val) { + return SetMEMC(src, mode, fmt, val); + } + ); + } + + uint32_t AVOutputTV::setPrecisionDetail(const JsonObject& parameters, JsonObject& response) + { + return setContextPQParam( + parameters, response, + "precisionDetail", "PrecisionDetail", + m_maxPrecisionDetail, + PQ_PARAM_PRECISION_DETAIL, + [](tvVideoSrcType_t src, tvPQModeIndex_t mode, tvVideoFormatType_t fmt, int val) { + return SetPrecisionDetail(src, mode, fmt, val); + } + ); + } + + uint32_t AVOutputTV::setLocalContrastEnhancement(const JsonObject& parameters, JsonObject& response) + { +#if ENABLE_PQ_PARAM + return setContextPQParam( + parameters, response, + "localContrastEnhancement", "LocalContrastEnhancement", + m_maxLocalContrastEnhancement, + PQ_PARAM_LOCAL_CONTRAST_ENHANCEMENT, + [](tvVideoSrcType_t src, tvPQModeIndex_t mode, tvVideoFormatType_t fmt, int val) { + return SetLocalContrastEnhancement(src, mode, fmt, val); + } + ); +#else + returnResponse(true); +#endif + } + + uint32_t AVOutputTV::setMPEGNoiseReduction(const JsonObject& parameters, JsonObject& response) + { +#if ENABLE_PQ_PARAM + return setContextPQParam( + parameters, response, + "mpegNoiseReduction", "MPEGNoiseReduction", + m_maxMPEGNoiseReduction, + PQ_PARAM_MPEG_NOISE_REDUCTION, + [](tvVideoSrcType_t src, tvPQModeIndex_t mode, tvVideoFormatType_t fmt, int val) { + return SetMPEGNoiseReduction(src, mode, fmt, val); + } + ); +#else + returnResponse(true); +#endif + } + + uint32_t AVOutputTV::setDigitalNoiseReduction(const JsonObject& parameters, JsonObject& response) + { +#if ENABLE_PQ_PARAM + return setContextPQParam( + parameters, response, + "digitalNoiseReduction", "DigitalNoiseReduction", + m_maxDigitalNoiseReduction, + PQ_PARAM_DIGITAL_NOISE_REDUCTION, + [](tvVideoSrcType_t src, tvPQModeIndex_t mode, tvVideoFormatType_t fmt, int val) { + return SetDigitalNoiseReduction(src, mode, fmt, val); + } + ); +#else + returnResponse(true); +#endif + } + + uint32_t AVOutputTV::getBacklight(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + if(m_backlightStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + std::string key; + paramIndex_t indexInfo; + int backlight = 0,err = 0; + + if (parsingGetInputArgument(parameters, "Backlight",inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } + + if (isPlatformSupport("Backlight") != 0) { + returnResponse(false); + } + + if (getParamIndex("Backlight", inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } + + err = getLocalparam("Backlight",indexInfo,backlight, PQ_PARAM_BACKLIGHT); + if( err == 0 ) { + response["backlight"] = backlight; + LOGINFO("Exit : Backlight Value: %d \n", backlight); + returnResponse(true); + } + else { + returnResponse(false); + } + } + else + { + int backlight = 0; + bool success = getPQParamFromContext(parameters, + "Backlight", + PQ_PARAM_BACKLIGHT, + backlight); + if (success) { + response["backlight"] = backlight; + } + returnResponse(success); + + } + } + + uint32_t AVOutputTV::setBacklight(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + if(m_backlightStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + std::string value; + capDetails_t inputInfo; + int backlight = 0; + tvError_t ret = tvERROR_NONE; + + value = parameters.HasLabel("backlight") ? parameters["backlight"].String() : ""; + returnIfParamNotFound(parameters,"backlight"); + backlight = std::stoi(value); + + if (validateIntegerInputParameter("Backlight",backlight) != 0) { + LOGERR("Failed in Backlight range validation:%s", __FUNCTION__); + returnResponse(false); + } + + if (parsingSetInputArgument(parameters,"Backlight",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if (isPlatformSupport("Backlight") != 0 ) { + returnResponse(false); + } + + if( !isCapablityCheckPassed( "Backlight" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for Backlight\n", __FUNCTION__); + returnResponse(false); + } + + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with setBacklight\n"); + ret = SetBacklight(backlight); + } + + if(ret != tvERROR_NONE) { + LOGERR("Failed to set Backlight\n"); + returnResponse(false); + } + else { + int retval= updateAVoutputTVParam("set","Backlight",inputInfo,PQ_PARAM_BACKLIGHT,backlight); + if(retval != 0 ) { + LOGERR("Failed to Save Backlight to ssm_data\n"); + returnResponse(false); + } + LOGINFO("Exit : setBacklight successful to value: %d\n", backlight); + returnResponse(true); + } + } + else + { + bool success = setIntPQParam(parameters, "Backlight", PQ_PARAM_BACKLIGHT, SetBacklight, m_maxBacklight); + returnResponse(success); + } + + } + bool AVOutputTV::resetEnumPQParamToDefault( + const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqIndex, + const std::unordered_map& valueMap, + std::function&)> halSetter) + { + LOGINFO("Entry: %s\n", paramName.c_str()); + + capDetails_t inputInfo; + paramIndex_t indexInfo; + int intVal = 0; + tvError_t ret = tvERROR_NONE; + + // Step 1: Save reset state using V2 persistence + LOGINFO("Updating AVOutputTVParamV2 for: %s\n", paramName.c_str()); + int retval = updateAVoutputTVParamV2("reset", paramName, parameters, pqIndex, intVal); + if (retval != 0) { + LOGERR("Failed to reset %s via updateAVoutputTVParamV2. retval: %d\n", paramName.c_str(), retval); + return false; + } + + // Step 2: Apply value from persisted config to HAL if needed + if (isSetRequiredForParam(parameters, paramName)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + + if (getParamIndex(paramName, inputInfo, indexInfo) == 0 && + getLocalparam(paramName, indexInfo, intVal, pqIndex) == 0) + { + LOGINFO("%s: getLocalparam success for %s [format=%d, source=%d, mode=%d] → value=%d\n", + __FUNCTION__, paramName.c_str(), indexInfo.formatIndex, + indexInfo.sourceIndex, indexInfo.pqmodeIndex, intVal); + + if (valueMap.find(intVal) == valueMap.end()) { + LOGERR("%s: Invalid enum value %d for %s\n", __FUNCTION__, intVal, paramName.c_str()); + return false; + } + + ret = halSetter(intVal, valueMap); + if (ret != tvERROR_NONE) { + LOGERR("%s: HAL setter failed for value %d\n", paramName.c_str(), intVal); + return false; + } + } + else { + LOGERR("%s: Failed to get local param for %s\n", __FUNCTION__, paramName.c_str()); + return false; + } + } + + LOGINFO("Exit: resetEnumPQParamToDefault for %s successful (value: %d)\n", paramName.c_str(), intVal); + return true; + } + + bool AVOutputTV::resetPQParamToDefault(const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqIndex, + tvSetFunctionV2 halSetter) + { + LOGINFO("Entry: %s\n", paramName.c_str()); + + capDetails_t inputInfo; + paramIndex_t indexInfo; + int level = 0; + tvError_t ret = tvERROR_NONE; + + // Save reset state using V2 path + LOGINFO("Updating AVOutputTVParamV2 for: %s\n", paramName.c_str()); + int retval = updateAVoutputTVParamV2("reset", paramName, parameters, pqIndex, level); + if (retval != 0) + { + LOGERR("Failed to update %s via updateAVoutputTVParamV2. retval: %d\n", paramName.c_str(), retval); + return false; + } + + // If update succeeded, apply value from local config to HAL + if (isSetRequiredForParam(parameters, paramName)) + { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + + if (getParamIndex(paramName, inputInfo, indexInfo) == 0 && + getLocalparam(paramName, indexInfo, level, pqIndex) == 0) + { + LOGINFO("%s: getLocalparam success for %s: format=%d, source=%d, mode=%d, value=%d\n", + __FUNCTION__, paramName.c_str(), indexInfo.formatIndex, + indexInfo.sourceIndex, indexInfo.pqmodeIndex, level); + if (halSetter) { + ret = halSetter( + static_cast(indexInfo.sourceIndex), + static_cast(indexInfo.pqmodeIndex), + static_cast(indexInfo.formatIndex), + level); + LOGINFO("%s halSetter return value: %d\n", paramName.c_str(), ret); + } else { + LOGERR("halSetter is null for %s\n", paramName.c_str()); + return false; + } + } + else + { + LOGERR("%s: Failed to get local param for %s\n", __FUNCTION__, paramName.c_str()); + return false; + } + } + + LOGINFO("Exit: reset%s successful to value: %d\n", paramName.c_str(), level); + return true; + } + + bool AVOutputTV::resetPQParamToDefault(const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqIndex, + tvSetFunction halSetter) + { + LOGINFO("Entry: %s\n", paramName.c_str()); + + capDetails_t inputInfo; + paramIndex_t indexInfo; + int level = 0; + tvError_t ret = tvERROR_NONE; + + // Save reset state using V2 path + LOGINFO("Updating AVOutputTVParamV2 for: %s\n", paramName.c_str()); + int retval = updateAVoutputTVParamV2("reset", paramName, parameters, pqIndex, level); + if (retval != 0) + { + LOGERR("Failed to update %s via updateAVoutputTVParamV2. retval: %d\n", paramName.c_str(), retval); + return false; + } + + // If update succeeded, apply value from local config to HAL + if (isSetRequiredForParam(parameters, paramName)) + { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + + if (getParamIndex(paramName, inputInfo, indexInfo) == 0 && + getLocalparam(paramName, indexInfo, level, pqIndex) == 0) + { + LOGINFO("%s: getLocalparam success for %s: format=%d, source=%d, mode=%d, value=%d\n", + __FUNCTION__, paramName.c_str(), indexInfo.formatIndex, + indexInfo.sourceIndex, indexInfo.pqmodeIndex, level); + ret = halSetter(level); + LOGINFO("%s halSetter return value: %d\n", paramName.c_str(), ret); + } + else + { + LOGERR("%s: Failed to get local param for %s\n", __FUNCTION__, paramName.c_str()); + return false; + } + } + + LOGINFO("Exit: reset%s successful to value: %d\n", paramName.c_str(), level); + return true; + } + + uint32_t AVOutputTV::resetBacklight(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + if(m_backlightStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + int backlight=0; + paramIndex_t indexInfo; + tvError_t ret = tvERROR_NONE; + + if (parsingSetInputArgument(parameters, "Backlight",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if (isPlatformSupport("Backlight") != 0) { + returnResponse(false); + } + + if( !isCapablityCheckPassed( "Backlight",inputInfo )) { + LOGERR("%s: CapablityCheck failed for Backlight\n", __FUNCTION__); + returnResponse(false); + } + + int retval= updateAVoutputTVParam("reset","Backlight",inputInfo,PQ_PARAM_BACKLIGHT,backlight); + if(retval != 0 ) { + LOGERR("Failed to reset Backlight\n"); + returnResponse(false); + } + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("Backlight", inputInfo,indexInfo); + int err = getLocalparam("Backlight",indexInfo,backlight, PQ_PARAM_BACKLIGHT); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,backlight); + ret = SetBacklight(backlight); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } + } + } + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetBacklight Successful to value : %d \n",backlight); + returnResponse(true); + } + } + else + { + bool success= resetPQParamToDefault(parameters, "Backlight", PQ_PARAM_BACKLIGHT, SetBacklight); + returnResponse(success); + } + } + + uint32_t AVOutputTV::getBacklightCaps(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + capVectors_t vectorInfo; + JsonObject rangeObj; + JsonArray pqmodeArray; + JsonArray formatArray; + JsonArray sourceArray; + + unsigned int index = 0; + + tvError_t ret = getParamsCaps("Backlight", vectorInfo ); + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + response["platformSupport"] = (vectorInfo.isPlatformSupportVector[0].compare("true") == 0) ? true : false; + + rangeObj["from"] = std::stoi(vectorInfo.rangeVector[0]); + rangeObj["to"] = std::stoi(vectorInfo.rangeVector[1]); + response["rangeInfo"]=rangeObj; + + if ((vectorInfo.pqmodeVector.front()).compare("none") != 0) { + for (index = 0; index < vectorInfo.pqmodeVector.size(); index++) { + pqmodeArray.Add(vectorInfo.pqmodeVector[index]); + } + response["pictureModeInfo"]=pqmodeArray; + } + if ((vectorInfo.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < vectorInfo.sourceVector.size(); index++) { + sourceArray.Add(vectorInfo.sourceVector[index]); + } + response["videoSourceInfo"]=sourceArray; + } + if ((vectorInfo.formatVector.front()).compare("none") != 0) { + for (index = 0; index < vectorInfo.formatVector.size(); index++) { + formatArray.Add(vectorInfo.formatVector[index]); + } + response["videoFormatInfo"]=formatArray; + } + LOGINFO("Exit\n"); + returnResponse(true); + } + } + + uint32_t AVOutputTV::getBrightness(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + if(m_brightnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int brightness = 0; + + if (parsingGetInputArgument(parameters, "Brightness",inputInfo) != 0) { + LOGERR("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } + + if (getParamIndex("Brightness", inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } + + int err = getLocalparam("Brightness",indexInfo,brightness, PQ_PARAM_BRIGHTNESS); + if( err == 0 ) { + response["brightness"] = brightness; + LOGINFO("Exit : Brightness Value: %d \n", brightness); + returnResponse(true); + } + else { + returnResponse(false); + } + } + else + { + int brightness = 0; + bool success = getPQParamFromContext(parameters, + "Brightness", + PQ_PARAM_BRIGHTNESS, + brightness); + if (success) { + response["brightness"] = brightness; + } + returnResponse(success); + } + } + + uint32_t AVOutputTV::setBrightness(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + if(m_brightnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + std::string value; + capDetails_t inputInfo; + int brightness = 0; + tvError_t ret = tvERROR_NONE; + + value = parameters.HasLabel("brightness") ? parameters["brightness"].String() : ""; + returnIfParamNotFound(parameters,"brightness"); + brightness = stoi(value); + + if (validateIntegerInputParameter("Brightness",brightness) != 0) { + LOGERR("Failed in Brightness range validation:%s", __FUNCTION__); + returnResponse(false); + } + + if (parsingSetInputArgument(parameters, "Brightness",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if( !isCapablityCheckPassed( "Brightness",inputInfo )) { + LOGERR("%s: CapablityCheck failed for Brightness\n", __FUNCTION__); + returnResponse(false); + } + + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s \n",__FUNCTION__); + ret = SetBrightness(brightness); + } + + if(ret != tvERROR_NONE) { + LOGERR("Failed to set Brightness\n"); + returnResponse(false); + } + else { + int retval= updateAVoutputTVParam("set","Brightness",inputInfo,PQ_PARAM_BRIGHTNESS,brightness); + if(retval != 0 ) { + LOGERR("Failed to Save Brightness to ssm_data\n"); + returnResponse(false); + } + LOGINFO("Exit : setBrightness successful to value: %d\n", brightness); + returnResponse(true); + } + } + else + { + bool success = setIntPQParam(parameters, "Brightness", PQ_PARAM_BRIGHTNESS, SetBrightness, m_maxBrightness); + returnResponse(success); + } + } + + + uint32_t AVOutputTV::resetBrightness(const JsonObject& parameters, JsonObject& response) + { + + LOGINFO("Entry\n"); + if(m_brightnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + std::string value; + capDetails_t inputInfo; + paramIndex_t indexInfo; + int brightness=0; + tvError_t ret = tvERROR_NONE; + + if (parsingSetInputArgument(parameters, "Brightness",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if( !isCapablityCheckPassed( "Brightness",inputInfo )) { + LOGERR("%s: CapablityCheck failed for Brightness\n", __FUNCTION__); + returnResponse(false); + } + + int retval= updateAVoutputTVParam("reset","Brightness",inputInfo,PQ_PARAM_BRIGHTNESS,brightness); + if(retval != 0 ) { + LOGWARN("Failed to reset Brightness\n"); + returnResponse(false); + } + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("Brightness", inputInfo,indexInfo); + int err = getLocalparam("Brightness",indexInfo,brightness, PQ_PARAM_BRIGHTNESS); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,brightness); + ret = SetBrightness(brightness); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } + } + } + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetBrightness Successful to value : %d \n",brightness); + returnResponse(true); + } + } + else + { + bool success = resetPQParamToDefault(parameters, "Brightness", PQ_PARAM_BRIGHTNESS, SetBrightness); + returnResponse(success); + } + } + + uint32_t AVOutputTV::getBrightnessCaps(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + capVectors_t info; + + JsonArray pqmodeArray; + JsonArray formatArray; + JsonArray sourceArray; + JsonObject rangeObj; + + unsigned int index = 0; + + tvError_t ret = getParamsCaps("Brightness",info); + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + rangeObj["from"] = stoi(info.rangeVector[0]); + rangeObj["to"] = stoi(info.rangeVector[1]); + response["rangeInfo"]=rangeObj; + + if ((info.pqmodeVector.front()).compare("none") != 0) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); + } + response["pictureModeInfo"]=pqmodeArray; + } + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); + } + response["videoSourceInfo"]=sourceArray; + } + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); + } + response["videoFormatInfo"]=formatArray; + } + LOGINFO("Exit\n"); + returnResponse(true); + } + } + + uint32_t AVOutputTV::getContrast(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + if(m_contrastStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int contrast = 0; + + if (parsingGetInputArgument(parameters, "Contrast",inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } + + if (getParamIndex("Contrast",inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } + + int err = getLocalparam("Contrast",indexInfo,contrast, PQ_PARAM_CONTRAST); + if( err == 0 ) { + response["contrast"] = contrast; + LOGINFO("Exit : Contrast Value: %d \n", contrast); + returnResponse(true); + } + else { + returnResponse(false); + } + } + else + { + int contrast = 0; + bool success = getPQParamFromContext(parameters, + "Contrast", + PQ_PARAM_CONTRAST, + contrast); + if (success) { + response["contrast"] = contrast; + } + returnResponse(success); + + } + } + + uint32_t AVOutputTV::setContrast(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + if(m_contrastStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + int contrast = 0; + tvError_t ret = tvERROR_NONE; + std::string value; + + value = parameters.HasLabel("contrast") ? parameters["contrast"].String() : ""; + returnIfParamNotFound(parameters,"contrast"); + contrast = std::stoi(value); + + if (validateIntegerInputParameter("Contrast", contrast) != 0) { + LOGERR("Failed in contrast range validation:%s", __FUNCTION__); + returnResponse(false); + } + + if (parsingSetInputArgument(parameters, "Contrast",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if( !isCapablityCheckPassed( "Contrast" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for Contrast\n", __FUNCTION__); + returnResponse(false); + } + + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s \n",__FUNCTION__); + ret = SetContrast(contrast); + } + + if(ret != tvERROR_NONE) { + LOGERR("Failed to set Contrast\n"); + returnResponse(false); + } + else { + int retval= updateAVoutputTVParam("set","Contrast",inputInfo,PQ_PARAM_CONTRAST,contrast); + if(retval != 0 ) { + LOGERR("Failed to Save Contrast to ssm_data\n"); + returnResponse(false); + } + LOGINFO("Exit : setContrast successful to value: %d\n", contrast); + returnResponse(true); + } + } + else + { + bool success = setIntPQParam(parameters, "Contrast", PQ_PARAM_CONTRAST, SetContrast, m_maxContrast); + returnResponse(success); + } + } + + uint32_t AVOutputTV::resetContrast(const JsonObject& parameters, JsonObject& response) + { + + LOGINFO("Entry\n"); + if(m_contrastStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int contrast=0; + tvError_t ret = tvERROR_NONE; + + if (parsingSetInputArgument(parameters, "Contrast",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if( !isCapablityCheckPassed( "Contrast" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for Contrast\n", __FUNCTION__); + returnResponse(false); + } + + int retval= updateAVoutputTVParam("reset","Contrast",inputInfo,PQ_PARAM_CONTRAST,contrast); + + if(retval != 0 ) { + LOGWARN("Failed to reset Contrast\n"); + returnResponse(false); + } + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("Contrast", inputInfo,indexInfo); + int err = getLocalparam("Contrast",indexInfo,contrast, PQ_PARAM_CONTRAST); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,contrast); + ret = SetContrast(contrast); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } + } + } + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetContrast Successful to value : %d \n",contrast); + returnResponse(true); + } + } + else + { + bool success= resetPQParamToDefault(parameters, "Contrast", PQ_PARAM_CONTRAST, SetContrast); + returnResponse(success); + + } + } + + uint32_t AVOutputTV::getContrastCaps(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + capVectors_t info; + + JsonArray rangeArray; + JsonArray pqmodeArray; + JsonArray formatArray; + JsonArray sourceArray; + + JsonObject rangeObj; + unsigned int index = 0; + + tvError_t ret = getParamsCaps("Contrast",info); + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + rangeObj["from"] = stoi(info.rangeVector[0]); + rangeObj["to"] = stoi(info.rangeVector[1]); + response["rangeInfo"]=rangeObj; + + if ((info.pqmodeVector.front()).compare("none") != 0) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); + } + response["pictureModeInfo"]=pqmodeArray; + } + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); + } + response["videoSourceInfo"]=sourceArray; + } + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); + } + response["videoFormatInfo"]=formatArray; + } + LOGINFO("Exit\n"); + returnResponse(true); + } + } + + uint32_t AVOutputTV::getSaturation(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + if(m_saturationStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int saturation = 0; + + if (parsingGetInputArgument(parameters, "Saturation",inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } + + if (getParamIndex("Saturation", inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } + + int err = getLocalparam("Saturation",indexInfo,saturation, PQ_PARAM_SATURATION); + if( err == 0 ) { + response["saturation"] = saturation; + LOGINFO("Exit : Saturation Value: %d \n", saturation); + returnResponse(true); + } + else { + returnResponse(false); + } + } + else + { + int saturation = 0; + bool success = getPQParamFromContext(parameters, + "Saturation", + PQ_PARAM_SATURATION, + saturation); + if (success) { + response["saturation"] = saturation; + } + returnResponse(success); + + } + } + + uint32_t AVOutputTV::setSaturation(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + if(m_saturationStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + std::string value; + int saturation = 0; + tvError_t ret = tvERROR_NONE; + + value = parameters.HasLabel("saturation") ? parameters["saturation"].String() : ""; + returnIfParamNotFound(parameters,"saturation"); + saturation = std::stoi(value); + + if (validateIntegerInputParameter("Saturation",saturation) != 0) { + LOGERR("Failed in saturation range validation:%s", __FUNCTION__); + returnResponse(false); + } + + if (parsingSetInputArgument(parameters, "Saturation",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if( !isCapablityCheckPassed( "Saturation" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for Saturation\n", __FUNCTION__); + returnResponse(false); + } + + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + ret = SetSaturation(saturation); + } + + if(ret != tvERROR_NONE) { + LOGERR("Failed to set Saturation\n"); + returnResponse(false); + } + else { + int retval= updateAVoutputTVParam("set","Saturation",inputInfo,PQ_PARAM_SATURATION,saturation); + if(retval != 0 ) { + LOGERR("Failed to Save Saturation to ssm_data\n"); + returnResponse(false); + } + LOGINFO("Exit : setSaturation successful to value: %d\n", saturation); + returnResponse(true); + } + } + else + { + bool success = setIntPQParam(parameters, "Saturation", PQ_PARAM_SATURATION, SetSaturation, m_maxSaturation); + returnResponse(success); + } + } + + uint32_t AVOutputTV::resetSaturation(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + if(m_saturationStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int saturation=0; + tvError_t ret = tvERROR_NONE; + + if (parsingSetInputArgument(parameters, "Saturation", inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if( !isCapablityCheckPassed( "Saturation", inputInfo )) { + LOGERR("%s: CapablityCheck failed for Saturation\n", __FUNCTION__); + returnResponse(false); + } + + int retval= updateAVoutputTVParam("reset","Saturation",inputInfo,PQ_PARAM_SATURATION,saturation); + + if(retval != 0 ) { + LOGERR("Failed to reset Saturation\n"); + returnResponse(false); + } + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("Saturation",inputInfo,indexInfo); + int err = getLocalparam("Saturation",indexInfo, saturation, PQ_PARAM_SATURATION); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,saturation); + ret = SetSaturation(saturation); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } + } + } + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetSaturation Successful to value : %d \n",saturation); + returnResponse(true); + } + } + else + { + bool success= resetPQParamToDefault(parameters, "Saturation", PQ_PARAM_SATURATION, SetSaturation); + returnResponse(success); + + } + } + + uint32_t AVOutputTV::getSaturationCaps(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + capVectors_t info; JsonArray rangeArray; JsonArray pqmodeArray; @@ -453,34 +2874,34 @@ namespace Plugin { JsonArray sourceArray; unsigned int index = 0; + JsonObject rangeObj; - tvError_t ret = getParamsCaps(range,pqmode,source,format,"AspectRatio"); + tvError_t ret = getParamsCaps("Saturation",info); if(ret != tvERROR_NONE) { returnResponse(false); } - else { - for (index = 0; index < range.size(); index++) { - rangeArray.Add(range[index]); - } + else { + rangeObj["from"] = stoi(info.rangeVector[0]); + rangeObj["to"] = stoi(info.rangeVector[1]); + response["rangeInfo"]=rangeObj; - response["options"]=rangeArray; - if (pqmode.front().compare("none") != 0) { - for (index = 0; index < pqmode.size(); index++) { - pqmodeArray.Add(pqmode[index]); + if ((info.pqmodeVector.front()).compare("none") != 0) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); } response["pictureModeInfo"]=pqmodeArray; } - if ((source.front()).compare("none") != 0) { - for (index = 0; index < source.size(); index++) { - sourceArray.Add(source[index]); + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); } response["videoSourceInfo"]=sourceArray; } - if ((format.front()).compare("none") != 0) { - for (index = 0; index < format.size(); index++) { - formatArray.Add(format[index]); + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); } response["videoFormatInfo"]=formatArray; } @@ -489,413 +2910,412 @@ namespace Plugin { } } - uint32_t AVOutputTV::setZoomMode(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getSharpness(const JsonObject& parameters, JsonObject& response) { - LOGINFO("Entry\n"); - std::string value; - tvDisplayMode_t mode = tvDisplayMode_16x9; - std::string pqmode; - std::string source; - std::string format; + LOGINFO("Entry"); + if(m_sharpnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int sharpness = 0; + if (parsingGetInputArgument(parameters, "Sharpness",inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } - value = parameters.HasLabel("zoomMode") ? parameters["zoomMode"].String() : ""; - returnIfParamNotFound(parameters,"zoomMode"); + if (getParamIndex("Sharpness",inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - if (validateInputParameter("AspectRatio",value) != 0) { - LOGERR("%s: Range validation failed for AspectRatio\n", __FUNCTION__); - returnResponse(false); + int err = getLocalparam("Sharpness",indexInfo,sharpness, PQ_PARAM_SHARPNESS); + if( err == 0 ) { + response["sharpness"] = sharpness; + LOGINFO("Exit : Sharpness Value: %d \n", sharpness); + returnResponse(true); + } + else { + returnResponse(false); + } } + else + { + int sharpness = 0; + bool success = getPQParamFromContext(parameters, + "Sharpness", + PQ_PARAM_SHARPNESS, + sharpness); + if (success) { + response["sharpness"] = sharpness; + } + returnResponse(success); - if (parsingSetInputArgument(parameters, "AspectRatio",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); } + } - if( !isCapablityCheckPassed( pqmode, source, format, "AspectRatio" )) { - LOGERR("%s: CapablityCheck failed for AspectRatio\n", __FUNCTION__); - returnResponse(false); - } + uint32_t AVOutputTV::setSharpness(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + if(m_sharpnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + int sharpness = 0; + tvError_t ret = tvERROR_NONE; + std::string value; - if(!value.compare("TV 16X9 STRETCH")) { - mode = tvDisplayMode_16x9; - } - else if (!value.compare("TV 4X3 PILLARBOX")) { - mode = tvDisplayMode_4x3; - } - else if (!value.compare("TV NORMAL")) { - mode = tvDisplayMode_NORMAL; - } - else if (!value.compare("TV DIRECT")) { - mode = tvDisplayMode_DIRECT; - } - else if (!value.compare("TV AUTO")) { - mode = tvDisplayMode_AUTO; - } - else if (!value.compare("TV ZOOM")) { - mode = tvDisplayMode_ZOOM; - } - else { - returnResponse(false); - } - m_videoZoomMode = mode; - tvError_t ret = setAspectRatioZoomSettings (mode); + value = parameters.HasLabel("sharpness") ? parameters["sharpness"].String() : ""; + returnIfParamNotFound(parameters,"sharpness"); + sharpness = std::stoi(value); - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - //Save DisplayMode to localstore and ssm_data - int params[3]={0}; - params[0]=mode; - int retval=updateAVoutputTVParam("set","AspectRatio",pqmode,source,format,PQ_PARAM_ASPECT_RATIO,params); + if (validateIntegerInputParameter("Sharpness",sharpness) != 0) { + LOGERR("Failed in sharpness range validation:%s", __FUNCTION__); + returnResponse(false); + } + + if (parsingSetInputArgument(parameters, "Sharpness", inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if(retval != 0) { - LOGERR("Failed to Save DisplayMode to ssm_data\n"); - returnResponse(false); + if( !isCapablityCheckPassed( "Sharpness", inputInfo )) { + LOGERR("%s: CapablityCheck failed for Sharpness\n", __FUNCTION__); + returnResponse(false); } - tr181ErrorCode_t err = setLocalParam(rfc_caller_id, AVOUTPUT_ASPECTRATIO_RFC_PARAM, value.c_str()); - if ( err != tr181Success ) { - LOGERR("setLocalParam for %s Failed : %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, getTR181ErrorString(err)); - returnResponse(false); + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + ret = SetSharpness(sharpness); + } + + if(ret != tvERROR_NONE) { + LOGERR("Failed to set Sharpness\n"); + returnResponse(false); } else { - LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, value.c_str()); + int retval= updateAVoutputTVParam("set","Sharpness",inputInfo,PQ_PARAM_SHARPNESS,sharpness); + if(retval != 0 ) { + LOGERR("Failed to Save Sharpness to ssm_data\n"); + returnResponse(false); + } + LOGINFO("Exit : setSharpness successful to value: %d\n", sharpness); + returnResponse(true); } - LOGINFO("Exit : SetAspectRatio() value : %s\n",value.c_str()); - returnResponse(true); + } + else + { + bool success = setIntPQParam(parameters, "Sharpness", PQ_PARAM_SHARPNESS, SetSharpness, m_maxSharpness); + returnResponse(success); } } - uint32_t AVOutputTV::getZoomMode(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::resetSharpness(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); - tvDisplayMode_t mode; - - tvError_t ret = getUserSelectedAspectRatio (&mode); - - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - switch(mode) { - case tvDisplayMode_16x9: - LOGINFO("Aspect Ratio: TV 16X9 STRETCH\n"); - response["zoomMode"] = "TV 16X9 STRETCH"; - break; - - case tvDisplayMode_4x3: - LOGINFO("Aspect Ratio: TV 4X3 PILLARBOX\n"); - response["zoomMode"] = "TV 4X3 PILLARBOX"; - break; - - case tvDisplayMode_NORMAL: - LOGINFO("Aspect Ratio: TV Normal\n"); - response["zoomMode"] = "TV NORMAL"; - break; + if(m_sharpnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int sharpness=0; + tvError_t ret = tvERROR_NONE; - case tvDisplayMode_AUTO: - LOGINFO("Aspect Ratio: TV AUTO\n"); - response["zoomMode"] = "TV AUTO"; - break; + if (parsingSetInputArgument(parameters, "Sharpness",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - case tvDisplayMode_DIRECT: - LOGINFO("Aspect Ratio: TV DIRECT\n"); - response["zoomMode"] = "TV DIRECT"; - break; + if( !isCapablityCheckPassed( "Sharpness" , inputInfo)) { + LOGERR("%s: CapablityCheck failed for Sharpness\n", __FUNCTION__); + returnResponse(false); + } - case tvDisplayMode_ZOOM: - LOGINFO("Aspect Ratio: TV ZOOM\n"); - response["zoomMode"] = "TV ZOOM"; - break; + int retval= updateAVoutputTVParam("reset","Sharpness", inputInfo,PQ_PARAM_SHARPNESS,sharpness); - default: - LOGINFO("Aspect Ratio: TV AUTO\n"); - response["zoomMode"] = "TV AUTO"; - break; + if(retval != 0 ) { + LOGERR("Failed to reset Sharpness\n"); + returnResponse(false); + } + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("Sharpness",inputInfo,indexInfo); + int err = getLocalparam("Sharpness",indexInfo, sharpness, PQ_PARAM_SHARPNESS); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,sharpness); + ret = SetSharpness(sharpness); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } + } } - returnResponse(true); - } - } - - uint32_t AVOutputTV::resetZoomMode(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); - std::string pqmode; - std::string source; - std::string format; - tvError_t ret = tvERROR_NONE; - if (parsingSetInputArgument(parameters, "AspectRatio",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetSharpness Successful to value : %d \n",sharpness); + returnResponse(true); + } } + else + { + bool success= resetPQParamToDefault(parameters, "Sharpness", PQ_PARAM_SHARPNESS, SetSharpness); + returnResponse(success); - if( !isCapablityCheckPassed( pqmode, source, format, "AspectRatio" )) { - LOGERR("%s: CapablityCheck failed for AspectRatio\n", __FUNCTION__); - returnResponse(false); } - tr181ErrorCode_t err = clearLocalParam(rfc_caller_id,AVOUTPUT_ASPECTRATIO_RFC_PARAM); - if ( err != tr181Success ) { - LOGERR("clearLocalParam for %s Failed : %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, getTR181ErrorString(err)); - ret = tvERROR_GENERAL; - } - else { - ret = setDefaultAspectRatio(pqmode,source,format); - } - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - LOGINFO("Exit : resetDefaultAspectRatio()\n"); - returnResponse(true); - } } - uint32_t AVOutputTV::getVideoFormat(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getSharpnessCaps(const JsonObject& parameters, JsonObject& response) { - LOGINFO("Entry\n"); - tvVideoFormatType_t videoFormat; - tvError_t ret = GetCurrentVideoFormat(&videoFormat); - if(ret != tvERROR_NONE) { - response["currentVideoFormat"] = "NONE"; - returnResponse(false); - } - else { - response["currentVideoFormat"] = getVideoFormatTypeToString(videoFormat); - LOGINFO("Exit: getVideoFormat :%d success \n",videoFormat); - returnResponse(true); - } - } + LOGINFO("Entry"); + capVectors_t info; - uint32_t AVOutputTV::getVideoResolution(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); - tvResolutionParam_t videoResolution; - tvError_t ret = GetCurrentVideoResolution(&videoResolution); - if(ret != tvERROR_NONE) { - response["currentVideoResolution"] = "NONE"; - returnResponse(false); - } - else { - response["currentVideoResolution"] = getVideoResolutionTypeToString(videoResolution); - LOGINFO("Exit: getVideoResolution :%d success \n",videoResolution.resolutionValue); - returnResponse(true); - } - } + JsonArray rangeArray; + JsonArray pqmodeArray; + JsonArray formatArray; + JsonArray sourceArray; + + JsonObject rangeObj; + unsigned int index = 0; + + tvError_t ret = getParamsCaps("Sharpness",info); - uint32_t AVOutputTV::getVideoFrameRate(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); - tvVideoFrameRate_t videoFramerate; - tvError_t ret = GetCurrentVideoFrameRate(&videoFramerate); if(ret != tvERROR_NONE) { - response["currentVideoFrameRate"] = "NONE"; returnResponse(false); } else { - response["currentVideoFrameRate"] = getVideoFrameRateTypeToString(videoFramerate); - LOGINFO("Exit: videoFramerate :%d success \n",videoFramerate); + rangeObj["from"] = stoi(info.rangeVector[0]); + rangeObj["to"] = stoi(info.rangeVector[1]); + response["rangeInfo"]=rangeObj; + + if ((info.pqmodeVector.front()).compare("none") != 0) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); + } + response["pictureModeInfo"]=pqmodeArray; + } + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); + } + response["videoSourceInfo"]=sourceArray; + } + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); + } + response["videoFormatInfo"]=formatArray; + } + LOGINFO("Exit\n"); returnResponse(true); } } - uint32_t AVOutputTV::getBacklight(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getHue(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); + if(m_hueStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int hue = 0; - std::string pqmode; - std::string source; - std::string format; - std::string key; - - int sourceIndex=0,pqIndex=0,formatIndex=0; - int backlight = 0,err = 0; - - if (parsingGetInputArgument(parameters, "Backlight",source, pqmode, format) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } + if (parsingGetInputArgument(parameters, "Hue", inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } - if (isPlatformSupport("Backlight") != 0) { - returnResponse(false); - } + if (getParamIndex("Hue",inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - if (getParamIndex(source,pqmode,format,sourceIndex,pqIndex,formatIndex) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); + int err = getLocalparam("Hue",indexInfo,hue, PQ_PARAM_HUE); + if( err == 0 ) { + response["hue"] = hue; + LOGINFO("Exit : Hue Value: %d \n", hue); + returnResponse(true); + } + else { + returnResponse(false); + } } + else + { + int hue = 0; + bool success = getPQParamFromContext(parameters, + "Hue", + PQ_PARAM_HUE, + hue); + if (success) { + response["hue"] = hue; + } + returnResponse(success); - err = getLocalparam("Backlight",formatIndex,pqIndex,sourceIndex,backlight, PQ_PARAM_BACKLIGHT); - if( err == 0 ) { - response["backlight"] = backlight; - LOGINFO("Exit : Backlight Value: %d \n", backlight); - returnResponse(true); - } - else { - returnResponse(false); } } - uint32_t AVOutputTV::setBacklight(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::setHue(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_hueStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + int hue = 0; + tvError_t ret = tvERROR_NONE; + std::string value; - std::string value; - std::string pqmode; - std::string source; - std::string format; - int backlight = 0; - tvError_t ret = tvERROR_NONE; - - value = parameters.HasLabel("backlight") ? parameters["backlight"].String() : ""; - returnIfParamNotFound(parameters,"backlight"); - backlight = std::stoi(value); - - if (validateIntegerInputParameter("Backlight",backlight) != 0) { - LOGERR("Failed in Backlight range validation:%s", __FUNCTION__); - returnResponse(false); - } + value = parameters.HasLabel("hue") ? parameters["hue"].String() : ""; + returnIfParamNotFound(parameters,"hue"); + hue = std::stoi(value); - if (parsingSetInputArgument(parameters, "Backlight",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (validateIntegerInputParameter("Hue",hue) != 0) { + LOGERR("Failed in hue range validation:%s", __FUNCTION__); + returnResponse(false); + } - if (isPlatformSupport("Backlight") != 0 ) { - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "Hue",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( pqmode, source, format, "Backlight" )) { - LOGERR("%s: CapablityCheck failed for Backlight\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "Hue", inputInfo )) { + LOGERR("%s: CapablityCheck failed for Hue\n", __FUNCTION__); + returnResponse(false); + } - if( isSetRequired(pqmode,source,format) ) { - LOGINFO("Proceed with setBacklight\n"); - ret = SetBacklight(backlight); - } + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + ret = SetHue(hue); + } - if(ret != tvERROR_NONE) { - LOGERR("Failed to set Backlight\n"); - returnResponse(false); - } - else { - int params[3]={0}; - params[0]=backlight; - int retval= updateAVoutputTVParam("set","Backlight",pqmode,source,format,PQ_PARAM_BACKLIGHT,params); - if(retval != 0 ) { - LOGERR("Failed to Save Backlight to ssm_data\n"); + if(ret != tvERROR_NONE) { + LOGERR("Failed to set Hue\n"); returnResponse(false); } - LOGINFO("Exit : setBacklight successful to value: %d\n", backlight); - returnResponse(true); + else { + int retval= updateAVoutputTVParam("set","Hue",inputInfo,PQ_PARAM_HUE,hue); + if(retval != 0 ) { + LOGERR("Failed to Save Hue to ssm_data\n"); + returnResponse(false); + } + LOGINFO("Exit : setHue successful to value: %d\n", hue); + returnResponse(true); + } + } + else + { + bool success = setIntPQParam(parameters, "Hue", PQ_PARAM_HUE, SetHue, m_maxHue); + returnResponse(success); } - } - uint32_t AVOutputTV::resetBacklight(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::resetHue(const JsonObject& parameters, JsonObject& response) { + LOGINFO("Entry\n"); - std::string value; - std::string pqmode; - std::string source; - std::string format; - int sourceIndex=0,pqIndex=0,formatIndex=0,backlight=0; - int params[3]={0}; - tvError_t ret = tvERROR_NONE; + if(m_hueStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int hue=0; + tvError_t ret = tvERROR_NONE; - if (parsingSetInputArgument(parameters, "Backlight",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "Hue",inputInfo)!= 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if (isPlatformSupport("Backlight") != 0) { - returnResponse(false); - } + if( !isCapablityCheckPassed( "Hue" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for Hue\n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( pqmode, source, format, "Backlight" )) { - LOGERR("%s: CapablityCheck failed for Backlight\n", __FUNCTION__); - returnResponse(false); - } + int retval= updateAVoutputTVParam("reset","Hue", inputInfo,PQ_PARAM_HUE,hue); - int retval= updateAVoutputTVParam("reset","Backlight",pqmode,source,format,PQ_PARAM_BACKLIGHT,params); - if(retval != 0 ) { - LOGERR("Failed to reset Backlight\n"); - returnResponse(false); - } - else { - if (isSetRequired(pqmode,source,format)) { - getParamIndex("Current","Current", "Current",sourceIndex,pqIndex,formatIndex); - int err = getLocalparam("Backlight",formatIndex,pqIndex,sourceIndex,backlight, PQ_PARAM_BACKLIGHT); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,formatIndex, sourceIndex, pqIndex,backlight); - ret = SetBacklight(backlight); - } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; + if(retval != 0 ) { + LOGERR("Failed to reset Hue\n"); + returnResponse(false); + } + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("Hue",inputInfo,indexInfo); + int err = getLocalparam("Hue",indexInfo, hue, PQ_PARAM_HUE); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,hue); + ret = SetHue(hue); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } } } - } - if(ret != tvERROR_NONE) { - returnResponse(false); + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetHue Successful to value : %d \n",hue); + returnResponse(true); + } } - else { - LOGINFO("Exit : resetBacklight Successful to value : %d \n",backlight); - returnResponse(true); + else + { + bool success= resetPQParamToDefault(parameters, "Hue", PQ_PARAM_HUE, SetHue); + returnResponse(success); + } + } - uint32_t AVOutputTV::getBacklightCaps(const JsonObject& parameters, JsonObject& response) - { + uint32_t AVOutputTV::getHueCaps(const JsonObject& parameters, JsonObject& response) + { LOGINFO("Entry"); - std::vector range; - std::vector pqmode; - std::vector source; - std::vector format; + capVectors_t info; - std::string isPlatformSupport; - std::vector indexInfo; - - JsonObject rangeObj; + JsonArray rangeArray; JsonArray pqmodeArray; JsonArray formatArray; JsonArray sourceArray; + JsonObject rangeObj; unsigned int index = 0; - tvError_t ret = getParamsCaps(range,pqmode,source,format,"Backlight", isPlatformSupport, indexInfo); + tvError_t ret = getParamsCaps("Hue",info); if(ret != tvERROR_NONE) { returnResponse(false); } else { - response["platformSupport"] = (isPlatformSupport.compare("true") == 0) ? true : false; - - rangeObj["from"] = stoi(range[0]); - rangeObj["to"] = stoi(range[1]); + rangeObj["from"] = stoi(info.rangeVector[0]); + rangeObj["to"] = stoi(info.rangeVector[1]); response["rangeInfo"]=rangeObj; - if ((pqmode.front()).compare("none") != 0) { - for (index = 0; index < pqmode.size(); index++) { - pqmodeArray.Add(pqmode[index]); + if ((info.pqmodeVector.front()).compare("none") != 0) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); } response["pictureModeInfo"]=pqmodeArray; } - if ((source.front()).compare("none") != 0) { - for (index = 0; index < source.size(); index++) { - sourceArray.Add(source[index]); + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); } response["videoSourceInfo"]=sourceArray; } - if ((format.front()).compare("none") != 0) { - for (index = 0; index < format.size(); index++) { - formatArray.Add(format[index]); + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); } response["videoFormatInfo"]=formatArray; } @@ -904,185 +3324,260 @@ namespace Plugin { } } - uint32_t AVOutputTV::getBrightness(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getColorTemperature(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); + if(m_colorTempStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int colortemp = 0; - std::string pqmode; - std::string source; - std::string format; - std::string key; - int sourceIndex=0,pqIndex=0,formatIndex=0; - int brightness = 0; + if (parsingGetInputArgument(parameters, "ColorTemperature", inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } - if (parsingGetInputArgument(parameters, "Brightness",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } + if (getParamIndex("ColorTemperature",inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - if (getParamIndex(source,pqmode,format,sourceIndex,pqIndex,formatIndex) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); + int err = getLocalparam("ColorTemp",indexInfo,colortemp,PQ_PARAM_COLOR_TEMPERATURE); + if( err == 0 ) { + switch(colortemp) { + case tvColorTemp_STANDARD: + LOGINFO("Color Temp Value: Standard\n"); + response["colorTemperature"] = "Standard"; + break; + + case tvColorTemp_WARM: + LOGINFO("Color Temp Value: Warm\n"); + response["colorTemperature"] = "Warm"; + break; + + case tvColorTemp_COLD: + LOGINFO("Color Temp Value: Cold\n"); + response["colorTemperature"] = "Cold"; + break; + + case tvColorTemp_USER: + LOGINFO("Color Temp Value: User Defined\n"); + response["colorTemperature"] = "UserDefined"; + break; + + default: + LOGINFO("Color Temp Value: Standard\n"); + response["colorTemperature"] = "Standard"; + break; + } + LOGINFO("Exit : ColorTemperature Value: %d \n", colortemp); + returnResponse(true); + } + else { + returnResponse(false); + } } + else + { + std::string outMode; + if (getEnumPQParamString(parameters, "ColorTemp", + PQ_PARAM_COLOR_TEMPERATURE, colorTempReverseMap, outMode)) { + response["colorTemperature"] = outMode; + returnResponse(true); + } else { + returnResponse(false); + } - int err = getLocalparam("Brightness",formatIndex,pqIndex,sourceIndex,brightness, PQ_PARAM_BRIGHTNESS); - if( err == 0 ) { - response["brightness"] = brightness; - LOGINFO("Exit : Brightness Value: %d \n", brightness); - returnResponse(true); - } - else { - returnResponse(false); } } - uint32_t AVOutputTV::setBrightness(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::setColorTemperature(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_colorTempStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + std::string value; + tvColorTemp_t colortemp = tvColorTemp_MAX; + tvError_t ret = tvERROR_NONE; + + value = parameters.HasLabel("colorTemperature") ? parameters["colorTemperature"].String() : ""; + returnIfParamNotFound(parameters,"colorTemperature"); + if(!value.compare("Standard")) { + colortemp = tvColorTemp_STANDARD; + } + else if (!value.compare("Warm")) { + colortemp = tvColorTemp_WARM; + } + else if (!value.compare("Cold")) { + colortemp = tvColorTemp_COLD; + } + else if (!value.compare("UserDefined")) { + colortemp = tvColorTemp_USER; + } + else { + returnResponse(false); + } - std::string value; - std::string pqmode; - std::string source; - std::string format; - int brightness = 0; - tvError_t ret = tvERROR_NONE; - - value = parameters.HasLabel("brightness") ? parameters["brightness"].String() : ""; - returnIfParamNotFound(parameters,"brightness"); - brightness = stoi(value); - - if (validateIntegerInputParameter("Brightness",brightness) != 0) { - LOGERR("Failed in Brightness range validation:%s", __FUNCTION__); - returnResponse(false); - } - - if (parsingSetInputArgument(parameters, "Brightness",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "ColorTemperature",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( pqmode, source, format, "Brightness" )) { - LOGERR("%s: CapablityCheck failed for Brightness\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "ColorTemperature", inputInfo )) { + LOGERR("%s: CapablityCheck failed for colorTemperature\n", __FUNCTION__); + returnResponse(false); + } - if( isSetRequired(pqmode,source,format) ) { - LOGINFO("Proceed with %s \n",__FUNCTION__); - ret = SetBrightness(brightness); - } + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + ret = SetColorTemperature((tvColorTemp_t)colortemp); + } - if(ret != tvERROR_NONE) { - LOGERR("Failed to set Brightness\n"); - returnResponse(false); + if(ret != tvERROR_NONE) { + LOGERR("Failed to set ColorTemperature\n"); + returnResponse(false); + } + else { + int retval= updateAVoutputTVParam("set","ColorTemp", inputInfo,PQ_PARAM_COLOR_TEMPERATURE,(int)colortemp); + if(retval != 0 ) { + LOGERR("Failed to Save ColorTemperature to ssm_data\n"); + returnResponse(false); + } + LOGINFO("Exit : setColorTemperature successful to value: %d\n", colortemp); + returnResponse(true); + } } - else { - int params[3]={0}; - params[0]=brightness; - int retval= updateAVoutputTVParam("set","Brightness",pqmode,source,format,PQ_PARAM_BRIGHTNESS,params); - if(retval != 0 ) { - LOGERR("Failed to Save Brightness to ssm_data\n"); + else + { + bool success = setEnumPQParam( + parameters, + "colorTemperature", + "ColorTemp", + colorTempMap, + PQ_PARAM_COLOR_TEMPERATURE, + [](int val) { + return SetColorTemperature(static_cast(val)); + }); + + if (!success) { + LOGERR("setColorTemperature failed"); returnResponse(false); } - LOGINFO("Exit : setBrightness successful to value: %d\n", brightness); + + LOGINFO("setColorTemperature: Success"); returnResponse(true); } - } - - uint32_t AVOutputTV::resetBrightness(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::resetColorTemperature(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_colorTempStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int colortemp=0; + tvError_t ret = tvERROR_NONE; - std::string value; - std::string pqmode; - std::string source; - std::string format; - int sourceIndex=0,pqIndex=0,formatIndex=0,brightness=0; - int params[3]={0}; - tvError_t ret = tvERROR_NONE; + if (parsingSetInputArgument(parameters, "ColorTemperature", inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if (parsingSetInputArgument(parameters, "Brightness",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "ColorTemperature", inputInfo )) { + LOGERR("%s: CapablityCheck failed for colorTemperature\n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( pqmode, source, format, "Brightness" )) { - LOGERR("%s: CapablityCheck failed for Brightness\n", __FUNCTION__); - returnResponse(false); - } + int retval= updateAVoutputTVParam("reset","ColorTemp", inputInfo,PQ_PARAM_COLOR_TEMPERATURE,colortemp); - int retval= updateAVoutputTVParam("reset","Brightness",pqmode,source,format,PQ_PARAM_BRIGHTNESS,params); - if(retval != 0 ) { - LOGWARN("Failed to reset Brightness\n"); - returnResponse(false); - } - else { - if (isSetRequired(pqmode,source,format)) { - getParamIndex("Current","Current", "Current",sourceIndex,pqIndex,formatIndex); - int err = getLocalparam("Brightness",formatIndex,pqIndex,sourceIndex,brightness, PQ_PARAM_BRIGHTNESS); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,formatIndex, sourceIndex, pqIndex,brightness); - ret = SetBrightness(brightness); - } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; + if(retval != 0 ) { + LOGERR("Failed to reset ColorTemperature\n"); + returnResponse(false); + } + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("ColorTemperature",inputInfo,indexInfo); + int err = getLocalparam("ColorTemp",indexInfo, colortemp, PQ_PARAM_COLOR_TEMPERATURE); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex, colortemp); + ret = SetColorTemperature((tvColorTemp_t)colortemp); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } } } - } - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - LOGINFO("Exit : resetBrightness Successful to value : %d \n",brightness); - returnResponse(true); + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetColorTemperature Successful to value : %d \n",colortemp); + returnResponse(true); + } } + else + { + bool success = resetEnumPQParamToDefault( + parameters, + "ColorTemp", + PQ_PARAM_COLOR_TEMPERATURE, + colorTempReverseMap, + [](int val, const std::unordered_map&) { + return SetColorTemperature(static_cast(val)); + }); + returnResponse(success); + } } - uint32_t AVOutputTV::getBrightnessCaps(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getColorTemperatureCaps(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); - std::vector range; - std::vector pqmode; - std::vector source; - std::vector format; + capVectors_t info; + JsonArray rangeArray; JsonArray pqmodeArray; JsonArray formatArray; JsonArray sourceArray; - JsonObject rangeObj; unsigned int index = 0; - tvError_t ret = getParamsCaps(range,pqmode,source,format,"Brightness"); + tvError_t ret = getParamsCaps("ColorTemperature",info); if(ret != tvERROR_NONE) { returnResponse(false); } else { - rangeObj["from"] = stoi(range[0]); - rangeObj["to"] = stoi(range[1]); - response["rangeInfo"]=rangeObj; + for (index = 0; index < info.rangeVector.size(); index++) { + rangeArray.Add(info.rangeVector[index]); + } + + response["options"]=rangeArray; - if ((pqmode.front()).compare("none") != 0) { - for (index = 0; index < pqmode.size(); index++) { - pqmodeArray.Add(pqmode[index]); + if (((info.pqmodeVector.front()).compare("none") != 0)) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); } response["pictureModeInfo"]=pqmodeArray; } - if ((source.front()).compare("none") != 0) { - for (index = 0; index < source.size(); index++) { - sourceArray.Add(source[index]); + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); } response["videoSourceInfo"]=sourceArray; } - if ((format.front()).compare("none") != 0) { - for (index = 0; index < format.size(); index++) { - formatArray.Add(format[index]); + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); } response["videoFormatInfo"]=formatArray; } @@ -1091,186 +3586,267 @@ namespace Plugin { } } - uint32_t AVOutputTV::getContrast(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getBacklightDimmingMode(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); + if(m_dimmingModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int dimmingMode = 0; + if (parsingGetInputArgument(parameters, "DimmingMode", inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } - std::string pqmode; - std::string source; - std::string format; - std::string key; - int sourceIndex=0,pqIndex=0,formatIndex=0; - int contrast = 0; + if (getParamIndex("DimmingMode",inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - if (parsingGetInputArgument(parameters, "Contrast",source, pqmode, format) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } - if (getParamIndex(source,pqmode,format,sourceIndex,pqIndex,formatIndex) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); - } + int err = getLocalparam("DimmingMode",indexInfo,dimmingMode, PQ_PARAM_DIMMINGMODE); + if( err == 0 ) { + switch(dimmingMode) { + case tvDimmingMode_Fixed: + LOGINFO("DimmingMode Value: Fixed\n"); + response["dimmingMode"] = "Fixed"; + break; - int err = getLocalparam("Contrast",formatIndex,pqIndex,sourceIndex,contrast, PQ_PARAM_CONTRAST); - if( err == 0 ) { - response["contrast"] = contrast; - LOGINFO("Exit : Contrast Value: %d \n", contrast); - returnResponse(true); + case tvDimmingMode_Local: + LOGINFO("DimmingMode Value: Local\n"); + response["dimmingMode"] = "Local"; + break; + + case tvDimmingMode_Global: + LOGINFO("DimmingMode Value: Global\n"); + response["dimmingMode"] = "Global"; + break; + + } + LOGINFO("Exit : DimmingMode Value: %d \n", dimmingMode); + returnResponse(true); + } + else { + returnResponse(false); + } } - else { - returnResponse(false); + else + { + std::string mode; + if (getEnumPQParamString(parameters, "DimmingMode", + PQ_PARAM_DIMMINGMODE, dimmingModeReverseMap, mode)) { + response["dimmingMode"] = mode; + returnResponse(true); + } else { + returnResponse(false); + } } } - uint32_t AVOutputTV::setContrast(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::setBacklightDimmingMode(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_dimmingModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { - std::string value; - std::string pqmode; - std::string source; - std::string format; - int contrast = 0; - tvError_t ret = tvERROR_NONE; + capDetails_t inputInfo; + int dimmingMode = 0; + tvError_t ret = tvERROR_NONE; + std::string value; - value = parameters.HasLabel("contrast") ? parameters["contrast"].String() : ""; - returnIfParamNotFound(parameters,"contrast"); - contrast = stoi(value); + value = parameters.HasLabel("dimmingMode") ? parameters["dimmingMode"].String() : ""; + returnIfParamNotFound(parameters,"dimmingMode"); - if (validateIntegerInputParameter("Contrast", contrast) != 0) { - LOGERR("Failed in contrast range validation:%s", __FUNCTION__); - returnResponse(false); - } + if (validateInputParameter("DimmingMode",value) != 0) { + LOGERR("%s: Range validation failed for DimmingMode\n", __FUNCTION__); + returnResponse(false); + } + dimmingMode = getDimmingModeIndex(value); - if (parsingSetInputArgument(parameters, "Contrast",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "DimmingMode",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( pqmode, source, format, "Contrast" )) { - LOGERR("%s: CapablityCheck failed for Contrast\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "DimmingMode" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for DimmingMode\n", __FUNCTION__); + returnResponse(false); + } - if( isSetRequired(pqmode,source,format) ) { - LOGINFO("Proceed with %s \n",__FUNCTION__); - ret = SetContrast(contrast); - } + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + ret = SetTVDimmingMode(value.c_str()); + } - if(ret != tvERROR_NONE) { - LOGERR("Failed to set Contrast\n"); - returnResponse(false); + if(ret != tvERROR_NONE) { + LOGERR("Failed to set DimmingMode\n"); + returnResponse(false); + } + else { + int retval= updateAVoutputTVParam("set","DimmingMode",inputInfo,PQ_PARAM_DIMMINGMODE,(int)dimmingMode); + if(retval != 0 ) { + LOGERR("Failed to Save DimmingMode to ssm_data\n"); + returnResponse(false); + } + + LOGINFO("Exit : setDimmingMode successful to value: %d\n", dimmingMode); + returnResponse(true); + } } - else { - int params[3]={0}; - params[0]=contrast; - int retval= updateAVoutputTVParam("set","Contrast",pqmode,source,format,PQ_PARAM_CONTRAST,params); - if(retval != 0 ) { - LOGERR("Failed to Save Contrast to ssm_data\n"); + else + { + int dimmingMode = 0; + tvError_t ret = tvERROR_NONE; + std::string value; + + value = parameters.HasLabel("dimmingMode") ? parameters["dimmingMode"].String() : ""; + returnIfParamNotFound(parameters,"dimmingMode"); + + dimmingMode = getDimmingModeIndex(value); + if (dimmingMode < 0 || dimmingMode > tvDimmingMode_MAX) { + LOGERR("Input value %d is out of range (0 - %d) for DimmingMode", dimmingMode, tvDimmingMode_MAX); returnResponse(false); } - LOGINFO("Exit : setContrast successful to value: %d\n", contrast); - returnResponse(true); + if( isSetRequiredForParam(parameters, "DimmingMode" ) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + ret = SetTVDimmingMode(value.c_str()); + } + if(ret != tvERROR_NONE) { + LOGERR("Failed to set DimmingMode\n"); + returnResponse(false); + } + else + { + // Update the TV parameter + int retval = updateAVoutputTVParamV2("set", "DimmingMode", parameters, PQ_PARAM_DIMMINGMODE, (int)dimmingMode); + if (retval != 0) { + LOGERR("Failed to Save DimmingMode to ssm_data. retval: %d \n", retval); + returnResponse(false); + } + LOGINFO("Exit : setDimmingMode successful to value: %d \n", dimmingMode); + returnResponse(true); + } } - } - uint32_t AVOutputTV::resetContrast(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::resetBacklightDimmingMode(const JsonObject& parameters, JsonObject& response) { - LOGINFO("Entry\n"); + if(m_dimmingModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { - std::string value; - std::string pqmode; - std::string source; - std::string format; - int sourceIndex=0,pqIndex=0,formatIndex=0,contrast=0; - int params[3]={0}; - tvError_t ret = tvERROR_NONE; + capDetails_t inputInfo; + paramIndex_t indexInfo; + std::string dimmingMode; + int dMode=0; + tvError_t ret = tvERROR_NONE; - if (parsingSetInputArgument(parameters, "Contrast",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "DimmingMode", inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( pqmode, source, format, "Contrast" )) { - LOGERR("%s: CapablityCheck failed for Contrast\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "DimmingMode" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for DimmingMode\n", __FUNCTION__); + returnResponse(false); + } - int retval= updateAVoutputTVParam("reset","Contrast",pqmode,source,format,PQ_PARAM_CONTRAST,params); + int retval= updateAVoutputTVParam("reset","DimmingMode", inputInfo,PQ_PARAM_DIMMINGMODE,dMode); - if(retval != 0 ) { - LOGWARN("Failed to reset Contrast\n"); - returnResponse(false); - } - else { - if (isSetRequired(pqmode,source,format)) { - getParamIndex("Current","Current", "Current",sourceIndex,pqIndex,formatIndex); - int err = getLocalparam("Contrast",formatIndex,pqIndex,sourceIndex,contrast, PQ_PARAM_CONTRAST); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,formatIndex, sourceIndex, pqIndex,contrast); - ret = SetContrast(contrast); - } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; + if(retval != 0 ) { + LOGERR("Failed to reset ldim\n"); + returnResponse(false); + } + + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("DimmingMode",inputInfo,indexInfo); + int err = getLocalparam("DimmingMode",indexInfo, dMode, PQ_PARAM_DIMMINGMODE); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex, dMode); + getDimmingModeStringFromEnum(dMode,dimmingMode); + ret = SetTVDimmingMode(dimmingMode.c_str()); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } } } - } - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - LOGINFO("Exit : resetContrast Successful to value : %d \n",contrast); - returnResponse(true); + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetBacklightDimmingMode Successful to value : %s \n",dimmingMode.c_str()); + returnResponse(true); + } } + else + { + bool success = resetEnumPQParamToDefault( + parameters, + "DimmingMode", + PQ_PARAM_DIMMINGMODE, + dimmingModeReverseMap, + [](int val, const std::unordered_map& enumMap) -> tvError_t { + auto it = enumMap.find(val); + if (it != enumMap.end()) { + return SetTVDimmingMode(it->second.c_str()); + } else { + LOGERR("Invalid enum value: %d for DimmingMode\n", val); + return tvERROR_GENERAL; + } + }); + + returnResponse(success); + } } - uint32_t AVOutputTV::getContrastCaps(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getBacklightDimmingModeCaps(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); - std::vector range; - std::vector pqmode; - std::vector source; - std::vector format; + capVectors_t info; - JsonArray rangeArray; + JsonArray supportedDimmingModeArray; JsonArray pqmodeArray; JsonArray formatArray; JsonArray sourceArray; - JsonObject rangeObj; unsigned int index = 0; - tvError_t ret = getParamsCaps(range,pqmode,source,format,"Contrast"); + tvError_t ret = getParamsCaps("DimmingMode",info); if(ret != tvERROR_NONE) { returnResponse(false); } else { - rangeObj["from"] = stoi(range[0]); - rangeObj["to"] = stoi(range[1]); - response["rangeInfo"]=rangeObj; + for (index = 0; index < info.rangeVector.size(); index++) { + supportedDimmingModeArray.Add(info.rangeVector[index]); + } + + response["options"]=supportedDimmingModeArray; - if ((pqmode.front()).compare("none") != 0) { - for (index = 0; index < pqmode.size(); index++) { - pqmodeArray.Add(pqmode[index]); + if (((info.pqmodeVector.front()).compare("none") != 0)) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); } response["pictureModeInfo"]=pqmodeArray; } - if ((source.front()).compare("none") != 0) { - for (index = 0; index < source.size(); index++) { - sourceArray.Add(source[index]); + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); } response["videoSourceInfo"]=sourceArray; } - if ((format.front()).compare("none") != 0) { - for (index = 0; index < format.size(); index++) { - formatArray.Add(format[index]); + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); } response["videoFormatInfo"]=formatArray; } @@ -1279,127 +3855,163 @@ namespace Plugin { } } - uint32_t AVOutputTV::getSaturation(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getSupportedDolbyVisionModes(const JsonObject& parameters, JsonObject& response) { - LOGINFO("Entry"); + LOGINFO("Entry\n"); + tvDolbyMode_t dvModes[tvMode_Max] = { tvDolbyMode_Invalid }; + tvDolbyMode_t *dvModesPtr[tvMode_Max] = { 0 }; + unsigned short totalAvailable = 0; + for (int i = 0; i < tvMode_Max; i++) + { + dvModesPtr[i] = &dvModes[i]; + } + // Set an initial value to indicate the mode type + dvModes[0] = tvDolbyMode_Dark; + + tvError_t ret = GetTVSupportedDolbyVisionModes(dvModesPtr, &totalAvailable); + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + JsonArray SupportedDVModes; + + for(int count = 0;count range; - std::vector pqmode; - std::vector source; - std::vector format; + capVectors_t info; JsonArray rangeArray; JsonArray pqmodeArray; @@ -1432,34 +4040,36 @@ namespace Plugin { JsonArray sourceArray; unsigned int index = 0; - JsonObject rangeObj; - tvError_t ret = getParamsCaps(range,pqmode,source,format,"Saturation"); + tvError_t ret = getParamsCaps("DolbyVisionMode", info); if(ret != tvERROR_NONE) { returnResponse(false); } else { - rangeObj["from"] = stoi(range[0]); - rangeObj["to"] = stoi(range[1]); - response["rangeInfo"]=rangeObj; + response["platformSupport"] = (info.isPlatformSupportVector[0].compare("true") == 0 ) ? true : false; + + for (index = 0; index < info.rangeVector.size(); index++) { + rangeArray.Add(info.rangeVector[index]); + } - if ((pqmode.front()).compare("none") != 0) { - for (index = 0; index < pqmode.size(); index++) { - pqmodeArray.Add(pqmode[index]); + response["options"]=rangeArray; + if ((info.pqmodeVector.front()).compare("none") != 0) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); } response["pictureModeInfo"]=pqmodeArray; } - if ((source.front()).compare("none") != 0) { - for (index = 0; index < source.size(); index++) { - sourceArray.Add(source[index]); + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); } response["videoSourceInfo"]=sourceArray; } - if ((format.front()).compare("none") != 0) { - for (index = 0; index < format.size(); index++) { - formatArray.Add(format[index]); + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); } response["videoFormatInfo"]=formatArray; } @@ -1468,186 +4078,135 @@ namespace Plugin { } } - uint32_t AVOutputTV::getSharpness(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getSupportedPictureModes(const JsonObject& parameters, JsonObject& response) { - LOGINFO("Entry"); - - std::string pqmode; - std::string source; - std::string format; - std::string key; - int sourceIndex=0,pqIndex=0,formatIndex=0; - int sharpness = 0; - - if (parsingGetInputArgument(parameters, "Sharpness",source, pqmode, format) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); + LOGINFO("Entry\n"); + pic_modes_t pictureModes[PIC_MODES_SUPPORTED_MAX]; + pic_modes_t *pictureModesPtr[PIC_MODES_SUPPORTED_MAX]={0}; + unsigned short totalAvailable = 0; + for (int i = 0; i < PIC_MODES_SUPPORTED_MAX; i++) + { + pictureModesPtr[i] = &pictureModes[i]; } - - if (getParamIndex(source,pqmode,format,sourceIndex,pqIndex,formatIndex) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + tvError_t ret = GetTVSupportedPictureModes(pictureModesPtr,&totalAvailable); + if(ret != tvERROR_NONE) { returnResponse(false); } + else { + JsonArray SupportedPicModes; - int err = getLocalparam("Sharpness",formatIndex,pqIndex,sourceIndex,sharpness, PQ_PARAM_SHARPNESS); - if( err == 0 ) { - response["sharpness"] = sharpness; - LOGINFO("Exit : Sharpness Value: %d \n", sharpness); + for(int count = 0;count range; + std::vector pqmode; + std::vector source; + std::vector format; - if (validateIntegerInputParameter("Sharpness",sharpness) != 0) { - LOGERR("Failed in sharpness range validation:%s", __FUNCTION__); + if (getCapabilitySource(rangeArray) != 0) { returnResponse(false); } + response["options"]=rangeArray; + LOGINFO("Exit\n"); + returnResponse(true); + } - if (parsingSetInputArgument(parameters, "Sharpness",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + uint32_t AVOutputTV::getVideoFormatCaps(const JsonObject& parameters, JsonObject& response) + { - if( !isCapablityCheckPassed( pqmode, source, format, "Sharpness" )) { - LOGERR("%s: CapablityCheck failed for Sharpness\n", __FUNCTION__); - returnResponse(false); - } + JsonArray rangeArray; - if( isSetRequired(pqmode,source,format) ) { - LOGINFO("Proceed with %s\n",__FUNCTION__); - ret = SetSharpness(sharpness); - } + capVectors_t info; + + tvError_t ret = getParamsCaps("VideoFormat",info); if(ret != tvERROR_NONE) { - LOGERR("Failed to set Sharpness\n"); returnResponse(false); } else { - int params[3]={0}; - params[0]=sharpness; - int retval= updateAVoutputTVParam("set","Sharpness",pqmode,source,format,PQ_PARAM_SHARPNESS,params); - if(retval != 0 ) { - LOGERR("Failed to Save Sharpness to ssm_data\n"); - returnResponse(false); + if ((info.rangeVector.front()).compare("none") != 0) { + for (unsigned int index = 0; index < info.rangeVector.size(); index++) { + rangeArray.Add(info.rangeVector[index]); + } + response["options"]=rangeArray; } - LOGINFO("Exit : setSharpness successful to value: %d\n", sharpness); - returnResponse(true); } - + LOGINFO("Exit\n"); + returnResponse(true); } - uint32_t AVOutputTV::resetSharpness(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getVideoFrameRateCaps(const JsonObject& parameters, JsonObject& response) { - LOGINFO("Entry\n"); - std::string value; - std::string pqmode; - std::string source; - std::string format; - int sourceIndex=0,pqIndex=0,formatIndex=0,sharpness=0; - int params[3]={0}; - tvError_t ret = tvERROR_NONE; - - if (parsingSetInputArgument(parameters, "Sharpness",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + std::vector rangeInfo; + JsonArray rangeArray; - if( !isCapablityCheckPassed( pqmode, source, format, "Sharpness" )) { - LOGERR("%s: CapablityCheck failed for Sharpness\n", __FUNCTION__); + if ( getRangeCapability("VideoFrameRate", rangeInfo) != 0 ) { returnResponse(false); } - int retval= updateAVoutputTVParam("reset","Sharpness",pqmode,source,format,PQ_PARAM_SHARPNESS,params); - - if(retval != 0 ) { - LOGERR("Failed to reset Sharpness\n"); - returnResponse(false); - } - else { - if (isSetRequired(pqmode,source,format)) { - getParamIndex("Current","Current", "Current",sourceIndex,pqIndex,formatIndex); - int err = getLocalparam("Sharpness",formatIndex,pqIndex,sourceIndex, sharpness, PQ_PARAM_SHARPNESS); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,formatIndex, sourceIndex, pqIndex,sharpness); - ret = SetSharpness(sharpness); - } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; - } - } + for (unsigned int index = 0; index < rangeInfo.size(); index++) { + rangeArray.Add(std::stof(rangeInfo[index])); } - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - LOGINFO("Exit : resetSharpness Successful to value : %d \n",sharpness); - returnResponse(true); - } + response["videoFrameRates"] = rangeArray; + returnResponse(true); + } + uint32_t AVOutputTV::getVideoResolutionCaps(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + response["maxResolution"] = "4096*2160p"; + returnResponse(true); } - uint32_t AVOutputTV::getSharpnessCaps(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getPictureModeCaps(const JsonObject& parameters, JsonObject& response) { - LOGINFO("Entry"); - std::vector range; - std::vector pqmode; - std::vector source; - std::vector format; - JsonArray rangeArray; - JsonArray pqmodeArray; - JsonArray formatArray; JsonArray sourceArray; + JsonArray formatArray; + JsonArray rangeArray; - JsonObject rangeObj; - unsigned int index = 0; + capVectors_t info; - tvError_t ret = getParamsCaps(range,pqmode,source,format,"Sharpness"); + unsigned int index = 0; + tvError_t ret = getParamsCaps("PictureMode",info); if(ret != tvERROR_NONE) { returnResponse(false); } else { - rangeObj["from"] = stoi(range[0]); - rangeObj["to"] = stoi(range[1]); - response["rangeInfo"]=rangeObj; - if ((pqmode.front()).compare("none") != 0) { - for (index = 0; index < pqmode.size(); index++) { - pqmodeArray.Add(pqmode[index]); + if ((info.rangeVector.front()).compare("none") != 0) { + for (index = 0; index < info.rangeVector.size(); index++) { + rangeArray.Add(info.rangeVector[index]); } - response["pictureModeInfo"]=pqmodeArray; + response["options"]=rangeArray; } - if ((source.front()).compare("none") != 0) { - for (index = 0; index < source.size(); index++) { - sourceArray.Add(source[index]); + + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); } response["videoSourceInfo"]=sourceArray; } - if ((format.front()).compare("none") != 0) { - for (index = 0; index < format.size(); index++) { - formatArray.Add(format[index]); + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); } response["videoFormatInfo"]=formatArray; } @@ -1656,348 +4215,734 @@ namespace Plugin { } } - uint32_t AVOutputTV::getHue(const JsonObject& parameters, JsonObject& response) + bool AVOutputTV::getPictureModeV2(const JsonObject& parameters, std::string& outMode) { LOGINFO("Entry"); - std::string pqmode; - std::string source; - std::string format; - std::string key; - int sourceIndex=0,pqIndex=0,formatIndex=0; - int hue = 0; + tvVideoSrcType_t source = VIDEO_SOURCE_IP; + tvVideoFormatType_t format = VIDEO_FORMAT_SDR; - if (parsingGetInputArgument(parameters, "Hue",source, pqmode, format) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); + // Parse videoSource + if (!parameters.HasLabel("videoSource") || parameters["videoSource"].String() == "Current") { + GetCurrentVideoSource(&source); + } else { + std::string srcStr = parameters["videoSource"].String(); + if (videoSrcReverseMap.count(srcStr)) { + source = static_cast(videoSrcReverseMap.at(srcStr)); + } else { + LOGERR("Invalid videoSource: %s", srcStr.c_str()); + return false; + } } - if (getParamIndex(source,pqmode,format,sourceIndex,pqIndex,formatIndex) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); + // Parse videoFormat + if (!parameters.HasLabel("videoFormat") || parameters["videoFormat"].String() == "Current") { + GetCurrentVideoFormat(&format); + if (format == VIDEO_FORMAT_NONE) format = VIDEO_FORMAT_SDR; + } else { + std::string fmtStr = parameters["videoFormat"].String(); + if (videoFormatReverseMap.count(fmtStr)) { + format = static_cast(videoFormatReverseMap.at(fmtStr)); + } else { + LOGERR("Invalid videoFormat: %s", fmtStr.c_str()); + return false; + } } - int err = getLocalparam("Hue",formatIndex,pqIndex,sourceIndex,hue, PQ_PARAM_HUE); - if( err == 0 ) { - response["hue"] = hue; - LOGINFO("Exit : Hue Value: %d \n", hue); - returnResponse(true); - } - else { - returnResponse(false); + // Directly use TR-181 to fetch active picture mode + std::string tr181_param_name = std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM) + + "." + convertSourceIndexToStringV2(source) + + ".Format." + convertVideoFormatToStringV2(format) + + ".PictureModeString"; + + LOGINFO("TR181 Param Name = %s", tr181_param_name.c_str()); + + TR181_ParamData_t param = {0}; + tr181ErrorCode_t err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); + if (err != tr181Success) { + LOGERR("getLocalParam failed: %d", err); + return false; } + + outMode = param.value; + LOGINFO("Exit: PictureMode = %s", outMode.c_str()); + return true; } - uint32_t AVOutputTV::setHue(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getPictureMode(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + std::string pictureModeStr; + if (m_pictureModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + TR181_ParamData_t param = {0}; - std::string value; - std::string pqmode; - std::string source; - std::string format; - int hue = 0; - tvError_t ret = tvERROR_NONE; - - value = parameters.HasLabel("hue") ? parameters["hue"].String() : ""; - returnIfParamNotFound(parameters,"hue"); - hue = stoi(value); - - if (validateIntegerInputParameter("Hue",hue) != 0) { - LOGERR("Failed in hue range validation:%s", __FUNCTION__); - returnResponse(false); - } + if (parsingGetInputArgument(parameters, "PictureMode", inputInfo) != 0) { + LOGERR("%s: Failed to parse input argument", __FUNCTION__); + returnResponse(false); + } - if (parsingSetInputArgument(parameters, "Hue",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (getParamIndex("PictureMode", inputInfo, indexInfo) == -1) { + LOGERR("%s: getParamIndex failed", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( pqmode, source, format, "Hue" )) { - LOGERR("%s: CapablityCheck failed for Hue\n", __FUNCTION__); - returnResponse(false); - } + std::string tr181_param_name = std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM) + + "." + convertSourceIndexToString(indexInfo.sourceIndex) + + ".Format." + convertVideoFormatToString(indexInfo.formatIndex) + + ".PictureModeString"; - if( isSetRequired(pqmode,source,format) ) { - LOGINFO("Proceed with %s\n",__FUNCTION__); - ret = SetHue(hue); - } + tr181ErrorCode_t err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); + if (err != tr181Success) { + returnResponse(false); + } - if(ret != tvERROR_NONE) { - LOGERR("Failed to set Hue\n"); - returnResponse(false); + pictureModeStr = param.value; } - else { - int params[3]={0}; - params[0]=hue; - int retval= updateAVoutputTVParam("set","Hue",pqmode,source,format,PQ_PARAM_HUE,params); - if(retval != 0 ) { - LOGERR("Failed to Save Hue to ssm_data\n"); + else + { + if (!getPictureModeV2(parameters, pictureModeStr)) { returnResponse(false); } - LOGINFO("Exit : setHue successful to value: %d\n", hue); - returnResponse(true); } - + response["pictureMode"] = pictureModeStr; + LOGINFO("Exit: getPictureMode() : %s", pictureModeStr.c_str()); + returnResponse(true); } - uint32_t AVOutputTV::resetHue(const JsonObject& parameters, JsonObject& response) + bool AVOutputTV::setPictureModeV2(const JsonObject& parameters) { + LOGINFO("Entry %s", __FUNCTION__); - LOGINFO("Entry\n"); + if (!parameters.HasLabel("pictureMode")) { + LOGERR("Missing 'pictureMode' in parameters."); + return false; + } - std::string value; - std::string pqmode; - std::string source; - std::string format; - int sourceIndex=0,pqIndex=0,formatIndex=0,hue=0; - int params[3]={0}; - tvError_t ret = tvERROR_NONE; + std::string mode = parameters["pictureMode"].String(); - if (parsingSetInputArgument(parameters, "Hue",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); + // Validate against m_pictureModes + int modeIndex = -1; + for (size_t i = 0; i < m_numPictureModes; ++i) { + auto it = pqModeMap.find(m_pictureModes[i]); + if (it != pqModeMap.end()) { + if (it->second == mode) { + modeIndex = static_cast(i); + LOGINFO("Matched pictureMode '%s' at index %d", mode.c_str(), modeIndex); + break; + } + } else { + LOGERR("pqModeMap does not contain m_pictureModes[%zu] = %d", i, m_pictureModes[i]); + } } - if( !isCapablityCheckPassed( pqmode, source, format, "Hue" )) { - LOGERR("%s: CapablityCheck failed for Hue\n", __FUNCTION__); - returnResponse(false); + if (modeIndex == -1) { + LOGERR("Invalid pictureMode: %s", mode.c_str()); + return false; } - int retval= updateAVoutputTVParam("reset","Hue",pqmode,source,format,PQ_PARAM_HUE,params); - - if(retval != 0 ) { - LOGERR("Failed to reset Hue\n"); - returnResponse(false); + // Extract videoSource + std::vector sources; + if (parameters.HasLabel("videoSource")) { + const JsonArray& sourceParam = parameters["videoSource"].Array(); + for (uint32_t i = 0; i < sourceParam.Length(); ++i) { + std::string source = sourceParam[i].Value(); + if (!source.empty()) { + sources.push_back(source); + } + } + } else { + sources.push_back("Global"); + LOGINFO("videoSource not provided, defaulting to 'Global'"); + } + + // Extract videoFormat + std::vector formats; + if (parameters.HasLabel("videoFormat")) { + const JsonArray& formatParam = parameters["videoFormat"].Array(); + for (uint32_t i = 0; i < formatParam.Length(); ++i) { + std::string format = formatParam[i].Value(); + if (!format.empty()) { + formats.push_back(format); + } + } + } else { + formats.push_back("Global"); + LOGINFO("videoFormat not provided, defaulting to 'Global'"); } - else { - if (isSetRequired(pqmode,source,format)) { - getParamIndex("Current","Current", "Current",sourceIndex,pqIndex,formatIndex); - int err = getLocalparam("Hue",formatIndex,pqIndex,sourceIndex, hue, PQ_PARAM_HUE); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,formatIndex, sourceIndex, pqIndex,hue); - ret = SetHue(hue); + + // Expand 'Global' sources + if (std::find(sources.begin(), sources.end(), "Global") != sources.end()) { + std::unordered_set sourceSet; + for (size_t j = 0; j < m_pictureModeCaps->num_contexts; ++j) { + if (m_pictureModeCaps->contexts[j].pq_mode == m_pictureModes[modeIndex]) { + std::string srcStr = convertSourceIndexToStringV2(m_pictureModeCaps->contexts[j].videoSrcType); + sourceSet.insert(srcStr); } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; + } + sources.insert(sources.end(), sourceSet.begin(), sourceSet.end()); + } + + // Expand 'Global' formats + if (std::find(formats.begin(), formats.end(), "Global") != formats.end()) { + std::unordered_set formatSet; + for (size_t j = 0; j < m_pictureModeCaps->num_contexts; ++j) { + if (m_pictureModeCaps->contexts[j].pq_mode == m_pictureModes[modeIndex]) { + std::string fmtStr = convertVideoFormatToStringV2(m_pictureModeCaps->contexts[j].videoFormatType); + formatSet.insert(fmtStr); } } + formats.insert(formats.end(), formatSet.begin(), formatSet.end()); } - if(ret != tvERROR_NONE) { - returnResponse(false); + // Get current context + tvVideoSrcType_t currentSrc = VIDEO_SOURCE_IP; + tvVideoFormatType_t currentFmt = VIDEO_FORMAT_SDR; + GetCurrentVideoSource(¤tSrc); + GetCurrentVideoFormat(¤tFmt); + if (currentFmt == VIDEO_FORMAT_NONE) + currentFmt = VIDEO_FORMAT_SDR; + + LOGINFO("Current video source: %s, format: %s", + convertSourceIndexToStringV2(currentSrc).c_str(), + convertVideoFormatToStringV2(currentFmt).c_str()); + + bool contextHandled = false; + + // Iterate through contexts and apply mode + for (size_t i = 0; i < m_pictureModeCaps->num_contexts; ++i) { + const tvConfigContext_t& ctx = m_pictureModeCaps->contexts[i]; + + if (ctx.pq_mode != m_pictureModes[modeIndex]) + continue; + + if (!isValidFormat(formats, ctx.videoFormatType)) + continue; + + if (!isValidSource(sources, ctx.videoSrcType)) + continue; + + std::string srcStr = convertSourceIndexToStringV2(ctx.videoSrcType); + std::string fmtStr = convertVideoFormatToStringV2(ctx.videoFormatType); + + if (ctx.videoSrcType == currentSrc && ctx.videoFormatType == currentFmt) { + if (SetTVPictureMode(mode.c_str()) != tvERROR_NONE) { + LOGERR("SetTVPictureMode failed for mode: %s", mode.c_str()); + continue; + } + } +//TODO:: Revisit this logic. Have to revert if HAL call fails. + std::string tr181Param = std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM) + "." + + srcStr + ".Format." + fmtStr + ".PictureModeString"; + + tr181ErrorCode_t err = setLocalParam(rfc_caller_id, tr181Param.c_str(), mode.c_str()); + if (err != tr181Success) { + LOGERR("setLocalParam failed: %s => %s", tr181Param.c_str(), getTR181ErrorString(err)); + continue; + } + else { + LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM, mode.c_str()); + int pqmodeindex = (int)getPictureModeIndex(mode); + SaveSourcePictureMode(ctx.videoSrcType, ctx.videoFormatType, pqmodeindex); + } + + contextHandled = true; } - else { - LOGINFO("Exit : resetHue Successful to value : %d \n",hue); - returnResponse(true); + + if (!contextHandled) { + LOGERR("No valid context found to apply pictureMode: %s", mode.c_str()); + return false; } + LOGINFO("Exit %s: PictureMode '%s' applied successfully.", __FUNCTION__, mode.c_str()); + return true; } - uint32_t AVOutputTV::getHueCaps(const JsonObject& parameters, JsonObject& response) + + uint32_t AVOutputTV::setPictureMode(const JsonObject& parameters, JsonObject& response) { - LOGINFO("Entry"); - std::vector range; - std::vector pqmode; - std::vector source; - std::vector format; + LOGINFO("Entry\n"); + if (m_pictureModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + char prevmode[PIC_MODE_NAME_MAX]={0}; + std::string value; + GetTVPictureMode(prevmode); + + tvError_t ret = tvERROR_NONE; + value = parameters.HasLabel("pictureMode") ? parameters["pictureMode"].String() : ""; + returnIfParamNotFound(parameters,"pictureMode"); + + // As only source need to validate, so pqmode and formate passing as currrent + if (parsingSetInputArgument(parameters, "PictureMode",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - JsonArray rangeArray; - JsonArray pqmodeArray; - JsonArray formatArray; - JsonArray sourceArray; + if (validateInputParameter("PictureMode",value) != 0) { + LOGERR("%s: Range validation failed for PictureMode\n", __FUNCTION__); + returnResponse(false); + } + if( !isCapablityCheckPassed( "PictureMode" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for PictureMode\n", __FUNCTION__); + returnResponse(false); + } - JsonObject rangeObj; - unsigned int index = 0; + if( isSetRequired("Current",inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with SetTVPictureMode\n"); + ret = SetTVPictureMode(value.c_str()); + } + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + valueVectors_t values; + inputInfo.pqmode = "Current"; + + getSaveConfig("PictureMode" ,inputInfo, values); + + for (int sourceType : values.sourceValues) { + tvVideoSrcType_t source = (tvVideoSrcType_t)sourceType; + for (int formatType : values.formatValues) { + tvVideoFormatType_t format = (tvVideoFormatType_t)formatType; + std::string tr181_param_name = ""; + tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); + // framing Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.AVOutput.Source.source_index[x].Format.format_index[x].PictureModeString.value + tr181_param_name += "."+convertSourceIndexToString(source)+"."+"Format."+ + convertVideoFormatToString(format)+"."+"PictureModeString"; + tr181ErrorCode_t err = setLocalParam(rfc_caller_id, tr181_param_name.c_str(), value.c_str()); + if ( err != tr181Success ) { + LOGERR("setLocalParam for %s Failed : %s\n", AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM, getTR181ErrorString(err)); + returnResponse(false); + } + else { + LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM, value.c_str()); + int pqmodeindex = (int)getPictureModeIndex(value); + SaveSourcePictureMode(source, format, pqmodeindex); + } + } + } + + //Filmmaker mode telemetry + if(!strncmp(value.c_str(),"filmmaker",strlen(value.c_str())) && strncmp(prevmode,"filmmaker",strlen(prevmode))) { + LOGINFO("%s mode has been enabled",value.c_str()); + } + else if(!strncmp(prevmode,"filmmaker",strlen(prevmode)) && strncmp(value.c_str(),"filmmaker",strlen(value.c_str()))) { + LOGINFO("%s mode has been disabled",prevmode); + } - tvError_t ret = getParamsCaps(range,pqmode,source,format,"Hue"); + LOGINFO("Broadcasting the low latency change event \n"); - if(ret != tvERROR_NONE) { - returnResponse(false); + if(m_isDalsEnabled) { + //GameModebroadcast + if(!strncmp(value.c_str(),"game",strlen(value.c_str())) && strncmp(prevmode,"game",strlen(prevmode))) { + broadcastLowLatencyModeChangeEvent(1); + } + else if(!strncmp(prevmode,"game",strlen(prevmode)) && strncmp(value.c_str(),"game",strlen(value.c_str()))) { + broadcastLowLatencyModeChangeEvent(0); + } + } + + LOGINFO("Exit : Value : %s \n",value.c_str()); + returnResponse(true); + } } else { - rangeObj["from"] = stoi(range[0]); - rangeObj["to"] = stoi(range[1]); - response["rangeInfo"]=rangeObj; - - if ((pqmode.front()).compare("none") != 0) { - for (index = 0; index < pqmode.size(); index++) { - pqmodeArray.Add(pqmode[index]); + bool success = false; + try { + success = setPictureModeV2(parameters); + } catch (const std::exception& e) { + LOGERR("Exception in setPictureModeV2: %s", e.what()); + } catch (...) { + LOGERR("Unknown exception in setPictureModeV2"); + } + returnResponse(success); + } + } + bool AVOutputTV::resetPictureModeV2(const JsonObject& parameters) + { + LOGINFO("Entry %s\n", __FUNCTION__); + + auto extractList = [](const JsonObject& params, const std::string& key) -> std::vector { + std::vector result; + if (params.HasLabel(key.c_str())) { + const JsonArray& array = params[key.c_str()].Array(); + for (uint32_t i = 0; i < array.Length(); ++i) { + result.push_back(array[i].Value()); } - response["pictureModeInfo"]=pqmodeArray; + } else { + result.push_back("Global"); + } + return result; + }; + + std::vector sources = extractList(parameters, "videoSource"); + std::vector formats = extractList(parameters, "videoFormat"); + + auto expandGlobal = [](std::vector& vec, const std::unordered_set& fullSet) { + if (std::find(vec.begin(), vec.end(), "Global") != vec.end()) { + vec.erase(std::remove(vec.begin(), vec.end(), "Global"), vec.end()); + vec.insert(vec.end(), fullSet.begin(), fullSet.end()); + } + std::unordered_set unique(vec.begin(), vec.end()); + vec.assign(unique.begin(), unique.end()); + }; + + // Expand "Global" values + std::unordered_set allSources, allFormats; + for (size_t j = 0; j < m_pictureModeCaps->num_contexts; ++j) { + allSources.insert(convertSourceIndexToStringV2(m_pictureModeCaps->contexts[j].videoSrcType)); + allFormats.insert(convertVideoFormatToStringV2(m_pictureModeCaps->contexts[j].videoFormatType)); + } + expandGlobal(sources, allSources); + expandGlobal(formats, allFormats); + + // Get current source & format + tvVideoSrcType_t currentSrc = VIDEO_SOURCE_IP; + tvVideoFormatType_t currentFmt = VIDEO_FORMAT_SDR; + GetCurrentVideoSource(¤tSrc); + GetCurrentVideoFormat(¤tFmt); + if (currentFmt == VIDEO_FORMAT_NONE) + currentFmt = VIDEO_FORMAT_SDR; + + bool contextHandled = false; + + for (size_t i = 0; i < m_pictureModeCaps->num_contexts; ++i) { + const tvConfigContext_t& ctx = m_pictureModeCaps->contexts[i]; + + if (!isValidSource(sources, ctx.videoSrcType) || !isValidFormat(formats, ctx.videoFormatType)) + continue; + + std::string tr181Param = std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM) + "." + + convertSourceIndexToStringV2(ctx.videoSrcType) + ".Format." + + convertVideoFormatToStringV2(ctx.videoFormatType) + ".PictureModeString"; + + // Clear override + tr181ErrorCode_t err = clearLocalParam(rfc_caller_id, tr181Param.c_str()); + if (err != tr181Success) { + LOGERR("clearLocalParam failed for %s: %s", tr181Param.c_str(), getTR181ErrorString(err)); + continue; + } + + // Read saved TR-181 value + TR181_ParamData_t param = {0}; + err = getLocalParam(rfc_caller_id, tr181Param.c_str(), ¶m); + if (err != tr181Success || strlen(param.value) == 0) { + LOGWARN("getLocalParam failed or empty for %s", tr181Param.c_str()); + continue; } - if ((source.front()).compare("none") != 0) { - for (index = 0; index < source.size(); index++) { - sourceArray.Add(source[index]); + + // Apply to hardware if current context matches + if (ctx.videoSrcType == currentSrc && ctx.videoFormatType == currentFmt) { + + tvError_t ret = SetTVPictureMode(param.value); + if (ret != tvERROR_NONE) { + LOGERR("SetTVPictureMode failed for %s", param.value); + continue; } - response["videoSourceInfo"]=sourceArray; } - if ((format.front()).compare("none") != 0) { - for (index = 0; index < format.size(); index++) { - formatArray.Add(format[index]); + + // Save to internal config + int pqmodeIndex = static_cast(getPictureModeIndex(param.value)); + SaveSourcePictureMode(ctx.videoSrcType, ctx.videoFormatType, pqmodeIndex); + contextHandled = true; + } + + if (!contextHandled) { + LOGERR("No valid pictureMode context matched to reset.\n"); + return false; + } + + LOGINFO("resetPictureModeV2: Exit - PictureMode reset successfully.\n"); + return true; + } + + uint32_t AVOutputTV::resetPictureMode(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + if (m_pictureModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + tr181ErrorCode_t err = tr181Success; + TR181_ParamData_t param = {0}; + + valueVectors_t values; + capDetails_t inputInfo; + + // As only source need to validate, so pqmode and formate passing as currrent + if (parsingSetInputArgument(parameters, "PictureMode",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if( !isCapablityCheckPassed( "PictureMode",inputInfo )) { + LOGERR("%s: CapablityCheck failed for PictureMode\n", __FUNCTION__); + returnResponse(false); + } + inputInfo.pqmode = "Current"; + getSaveConfig("PictureMode", inputInfo, values); + + for (int source : values.sourceValues) { + tvVideoSrcType_t sourceType = (tvVideoSrcType_t)source; + for (int format : values.formatValues) { + tvVideoFormatType_t formatType = (tvVideoFormatType_t)format; + std::string tr181_param_name = ""; + tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); + tr181_param_name += "."+convertSourceIndexToString(sourceType)+"."+"Format."+ + convertVideoFormatToString(formatType)+"."+"PictureModeString"; + + err = clearLocalParam(rfc_caller_id, tr181_param_name.c_str()); + if ( err != tr181Success ) { + LOGWARN("clearLocalParam for %s Failed : %s\n", tr181_param_name.c_str(), getTR181ErrorString(err)); + returnResponse(false); + } + else { + err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); + if ( tr181Success == err ) { + //get curren source and if matches save for that alone + tvVideoSrcType_t current_source = VIDEO_SOURCE_IP; + GetCurrentVideoSource(¤t_source); + + tvVideoFormatType_t current_format = VIDEO_FORMAT_NONE; + GetCurrentVideoFormat(¤t_format); + if( current_format == VIDEO_FORMAT_NONE) { + current_format = VIDEO_FORMAT_SDR; + } + + if (current_source == sourceType && current_format == formatType) { + + tvError_t ret = SetTVPictureMode(param.value); + if(ret != tvERROR_NONE) { + LOGWARN("Picture Mode set failed: %s\n",getErrorString(ret).c_str()); + returnResponse(false); + } + else { + LOGINFO("Exit : Picture Mode reset successfully, value: %s\n", param.value); + } + } + int pqmodeindex = (int)getPictureModeIndex(param.value); + SaveSourcePictureMode(sourceType, formatType, pqmodeindex); + } + else { + LOGWARN("getLocalParam for %s failed\n", AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); + returnResponse(false); + } + } } - response["videoFormatInfo"]=formatArray; } - LOGINFO("Exit\n"); returnResponse(true); } + else + { + bool success = resetPictureModeV2(parameters); + returnResponse(success); + } } - uint32_t AVOutputTV::getColorTemperature(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::signalFilmMakerMode(const JsonObject& parameters, JsonObject& response) { - LOGINFO("Entry"); + LOGINFO("Entry\n"); + std::string value; + char prevmode[PIC_MODE_NAME_MAX]={0}; + GetTVPictureMode(prevmode); - std::string pqmode; - std::string source; - std::string format; - std::string key; - int sourceIndex=0,pqIndex=0,formatIndex=0; - int colortemp = 0; + value = parameters.HasLabel("signalFilmMakerMode") ? parameters["signalFilmMakerMode"].String() : ""; + returnIfParamNotFound(parameters, "signalFilmMakerMode"); - if (parsingGetInputArgument(parameters, "ColorTemperature",source, pqmode, format) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); + if(strncmp(value.c_str(),"ON",strlen(value.c_str())) == 0) { + NotifyFilmMakerModeChange(tvContentType_FMM); + LOGINFO(" enabling Film makermode \n"); } - - if (getParamIndex(source,pqmode,format,sourceIndex,pqIndex,formatIndex) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); + else { + LOGINFO(" disabling Film makermode \n"); + NotifyFilmMakerModeChange(tvContentType_NONE); } + returnResponse(true); + } - int err = getLocalparam("ColorTemp",formatIndex,pqIndex,sourceIndex,colortemp,PQ_PARAM_COLOR_TEMPERATURE); - if( err == 0 ) { - switch(colortemp) { - case tvColorTemp_STANDARD: - LOGINFO("Color Temp Value: Standard\n"); - response["colorTemperature"] = "Standard"; - break; + uint32_t AVOutputTV::setLowLatencyState(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + if(m_lowLatencyStateStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + std::string value; + capDetails_t inputInfo; + int lowLatencyIndex = 0,prevLowLatencyIndex = 0; + tvError_t ret = tvERROR_NONE; - case tvColorTemp_WARM: - LOGINFO("Color Temp Value: Warm\n"); - response["colorTemperature"] = "Warm"; - break; + ret = GetLowLatencyState(&prevLowLatencyIndex); + if(ret != tvERROR_NONE) { + LOGERR("Get previous low latency state failed\n"); + returnResponse(false); + } - case tvColorTemp_COLD: - LOGINFO("Color Temp Value: Cold\n"); - response["colorTemperature"] = "Cold"; - break; + value = parameters.HasLabel("LowLatencyState") ? parameters["LowLatencyState"].String() : ""; + returnIfParamNotFound(parameters,"LowLatencyState"); + lowLatencyIndex = std::stoi(value); + + if (validateIntegerInputParameter("LowLatencyState",lowLatencyIndex) != 0) { + LOGERR("Failed in Brightness range validation:%s", __FUNCTION__); + returnResponse(false); + } + + if (parsingSetInputArgument(parameters, "LowLatencyState",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if( !isCapablityCheckPassed( "LowLatencyState" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for LowLatencyState\n", __FUNCTION__); + returnResponse(false); + } + + int retval= updateAVoutputTVParam("set","LowLatencyState",inputInfo,PQ_PARAM_LOWLATENCY_STATE,lowLatencyIndex); + if(retval != 0 ) { + LOGERR("Failed to SaveLowLatency to ssm_data\n"); + returnResponse(false); + } else { + + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with setLowLatencyState\n"); + ret = SetLowLatencyState( lowLatencyIndex ); + } - case tvColorTemp_USER: - LOGINFO("Color Temp Value: User Defined\n"); - response["colorTemperature"] = "User Defined"; - break; + if(ret != tvERROR_NONE) { + LOGERR("Failed to set low latency. Fallback to previous state %d\n", prevLowLatencyIndex); + retval=updateAVoutputTVParam("set","LowLatencyState",inputInfo,PQ_PARAM_LOWLATENCY_STATE,prevLowLatencyIndex); + if(retval != 0 ){ + LOGERR("Fallback to previous low latency state %d failed.\n", prevLowLatencyIndex); + } + returnResponse(false); + } - default: - LOGINFO("Color Temp Value: Standard\n"); - response["colorTemperature"] = "Standard"; - break; + LOGINFO("Exit : setLowLatency successful to value: %d\n", lowLatencyIndex); + returnResponse(true); } - LOGINFO("Exit : ColorTemperature Value: %d \n", colortemp); - returnResponse(true); - } - else { - returnResponse(false); } - } + else + { + std::string value; + int lowLatencyIndex = 0,prevLowLatencyIndex = 0; + tvError_t ret = tvERROR_NONE; - uint32_t AVOutputTV::setColorTemperature(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); + ret = GetLowLatencyState(&prevLowLatencyIndex); + if(ret != tvERROR_NONE) { + LOGERR("Get previous low latency state failed\n"); + returnResponse(false); + } - std::string value; - std::string pqmode; - std::string source; - std::string format; - tvColorTemp_t colortemp = tvColorTemp_MAX; - tvError_t ret = tvERROR_NONE; + value = parameters.HasLabel("LowLatencyState") ? parameters["LowLatencyState"].String() : ""; + returnIfParamNotFound(parameters,"LowLatencyState"); + lowLatencyIndex = std::stoi(value); + if (lowLatencyIndex < 0 || lowLatencyIndex > m_maxlowLatencyState) { + LOGERR("Input value %d is out of range (0 - %d) for LowLatencyState", lowLatencyIndex, m_maxlowLatencyState); + returnResponse(false); + } - value = parameters.HasLabel("colorTemperature") ? parameters["colorTemperature"].String() : ""; - returnIfParamNotFound(parameters,"colorTemperature"); - if(!value.compare("Standard")) { - colortemp = tvColorTemp_STANDARD; - } - else if (!value.compare("Warm")) { - colortemp = tvColorTemp_WARM; - } - else if (!value.compare("Cold")) { - colortemp = tvColorTemp_COLD; - } - else if (!value.compare("User Defined")) { - colortemp = tvColorTemp_USER; - } - else { - returnResponse(false); - } + int retval= updateAVoutputTVParamV2("set","LowLatencyState",parameters,PQ_PARAM_LOWLATENCY_STATE,lowLatencyIndex); + if(retval != 0 ) { + LOGERR("Failed to SaveLowLatency to ssm_data\n"); + returnResponse(false); + } + else + { + if(isSetRequiredForParam(parameters, "LowLatencyState")) + { + LOGINFO("Proceed with setLowLatencyState\n"); + ret = SetLowLatencyState( lowLatencyIndex ); + } + if(ret != tvERROR_NONE) { + LOGERR("Failed to set low latency. Fallback to previous state %d\n", prevLowLatencyIndex); + retval=updateAVoutputTVParamV2("set","LowLatencyState",parameters,PQ_PARAM_LOWLATENCY_STATE, prevLowLatencyIndex); + if(retval != 0 ){ + LOGERR("Fallback to previous low latency state %d failed.\n", prevLowLatencyIndex); + } + returnResponse(false); + } - if (parsingSetInputArgument(parameters, "ColorTemperature",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); + LOGINFO("Exit : setLowLatency successful to value: %d\n", lowLatencyIndex); + returnResponse(true); + } } + } - if( !isCapablityCheckPassed( pqmode, source, format, "ColorTemperature" )) { - LOGERR("%s: CapablityCheck failed for colorTemperature\n", __FUNCTION__); - returnResponse(false); - } + uint32_t AVOutputTV::getLowLatencyState(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + if(m_lowLatencyStateStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int lowlatencystate = 0; - if( isSetRequired(pqmode,source,format) ) { - LOGINFO("Proceed with %s\n",__FUNCTION__); - ret = SetColorTemperature((tvColorTemp_t)colortemp); - } + if (parsingGetInputArgument(parameters, "LowLatencyState",inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } + if (getParamIndex("LowLatencyState",inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - if(ret != tvERROR_NONE) { - LOGERR("Failed to set ColorTemperature\n"); - returnResponse(false); + int err = getLocalparam("LowLatencyState", indexInfo ,lowlatencystate, PQ_PARAM_LOWLATENCY_STATE); + if( err == 0 ) { + response["lowLatencyState"] = std::to_string(lowlatencystate); + LOGINFO("Exit : LowLatencyState Value: %d \n", lowlatencystate); + returnResponse(true); + } + else { + returnResponse(false); + } } - else { - int params[3]={0}; - params[0]=(int)colortemp; - int retval= updateAVoutputTVParam("set","ColorTemp",pqmode,source,format,PQ_PARAM_COLOR_TEMPERATURE,params); - if(retval != 0 ) { - LOGERR("Failed to Save ColorTemperature to ssm_data\n"); + else + { + int lowlatencystate = 0; + if (getPQParamFromContext(parameters, "LowLatencyState", PQ_PARAM_LOWLATENCY_STATE, lowlatencystate)) { + response["lowLatencyState"] = std::to_string(lowlatencystate); + LOGINFO("Exit : LowLatencyState Value: %d", lowlatencystate); + returnResponse(true); + } else { + LOGERR("Failed to get LowLatencyState"); returnResponse(false); } - LOGINFO("Exit : setColorTemperature successful to value: %d\n", colortemp); - returnResponse(true); } } - uint32_t AVOutputTV::resetColorTemperature(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::resetLowLatencyState(const JsonObject& parameters, JsonObject& response) { - LOGINFO("Entry\n"); - std::string value; - std::string pqmode; - std::string source; - std::string format; - int sourceIndex=0,pqIndex=0,formatIndex=0,colortemp=0; - int params[3]={0}; + capDetails_t inputInfo; + paramIndex_t indexInfo; + int lowlatencystate=0; tvError_t ret = tvERROR_NONE; - if (parsingSetInputArgument(parameters, "ColorTemperature",source, pqmode, format) != 0) { + if (parsingSetInputArgument(parameters, "LowLatencyState", inputInfo) != 0) { LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); returnResponse(false); } - if( !isCapablityCheckPassed( pqmode, source, format, "ColorTemperature" )) { - LOGERR("%s: CapablityCheck failed for colorTemperature\n", __FUNCTION__); + if( !isCapablityCheckPassed( "LowLatencyState" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for LowLatencyState\n", __FUNCTION__); returnResponse(false); } - int retval= updateAVoutputTVParam("reset","ColorTemp",pqmode,source,format,PQ_PARAM_COLOR_TEMPERATURE,params); - + int retval= updateAVoutputTVParam("reset","LowLatencyState", inputInfo,PQ_PARAM_LOWLATENCY_STATE,lowlatencystate); if(retval != 0 ) { - LOGERR("Failed to reset ColorTemperature\n"); + LOGERR("Failed to clear Lowlatency from ssmdata and localstore\n"); returnResponse(false); } else { - if (isSetRequired(pqmode,source,format)) { - getParamIndex("Current","Current", "Current",sourceIndex,pqIndex,formatIndex); - int err = getLocalparam("ColorTemp",formatIndex,pqIndex,sourceIndex, colortemp, PQ_PARAM_COLOR_TEMPERATURE); + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("LowLatencyState",inputInfo, indexInfo); + int err = getLocalparam("LowLatencyState",indexInfo, lowlatencystate, PQ_PARAM_LOWLATENCY_STATE); if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,formatIndex, sourceIndex, pqIndex, colortemp); - ret = SetColorTemperature((tvColorTemp_t)colortemp); + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex, lowlatencystate); + ret = SetLowLatencyState(lowlatencystate); } else { LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); @@ -2010,18 +4955,15 @@ namespace Plugin { returnResponse(false); } else { - LOGINFO("Exit : resetColorTemperature Successful to value : %d \n",colortemp); + LOGINFO("Exit : resetLowLatency Successful to value : %d \n",lowlatencystate); returnResponse(true); } } - uint32_t AVOutputTV::getColorTemperatureCaps(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getLowLatencyStateCaps(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); - std::vector range; - std::vector pqmode; - std::vector source; - std::vector format; + capVectors_t info; JsonArray rangeArray; JsonArray pqmodeArray; @@ -2030,33 +4972,32 @@ namespace Plugin { unsigned int index = 0; - tvError_t ret = getParamsCaps(range,pqmode,source,format,"ColorTemperature"); + tvError_t ret = getParamsCaps("LowLatencyState", info); if(ret != tvERROR_NONE) { returnResponse(false); } else { - for (index = 0; index < range.size(); index++) { - rangeArray.Add(range[index]); + for (index = 0; index < info.rangeVector.size(); index++) { + rangeArray.Add(stoi(info.rangeVector[index])); } - response["options"]=rangeArray; - - if (((pqmode.front()).compare("none") != 0)) { - for (index = 0; index < pqmode.size(); index++) { - pqmodeArray.Add(pqmode[index]); + response["LowLatencyInfo"]=rangeArray; + if ((info.pqmodeVector.front()).compare("none") != 0) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); } response["pictureModeInfo"]=pqmodeArray; } - if ((source.front()).compare("none") != 0) { - for (index = 0; index < source.size(); index++) { - sourceArray.Add(source[index]); + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); } response["videoSourceInfo"]=sourceArray; } - if ((format.front()).compare("none") != 0) { - for (index = 0; index < format.size(); index++) { - formatArray.Add(format[index]); + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); } response["videoFormatInfo"]=formatArray; } @@ -2065,273 +5006,486 @@ namespace Plugin { } } - uint32_t AVOutputTV::getBacklightDimmingMode(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getCMS(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); + if(m_cmsStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int level = 0; + tvPQParameterIndex_t tvPQEnum; - std::string pqmode; - std::string source; - std::string format; - std::string key; - int sourceIndex=0,pqIndex=0,formatIndex=0; - int dimmingMode = 0; + inputInfo.color = parameters.HasLabel("color") ? parameters["color"].String() : ""; + inputInfo.component = parameters.HasLabel("component") ? parameters["component"].String() : ""; - if (parsingGetInputArgument(parameters, "DimmingMode",source, pqmode, format) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } + if( inputInfo.color.empty() || inputInfo.component.empty() ) { + LOGERR("%s : Color/Component param not found!!!\n",__FUNCTION__); + returnResponse(false); + } - if (getParamIndex(source,pqmode,format,sourceIndex,pqIndex,formatIndex) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); - } + if (isPlatformSupport("CMS") != 0) { + returnResponse(false); + } - int err = getLocalparam("DimmingMode",formatIndex,pqIndex,sourceIndex,dimmingMode, PQ_PARAM_DIMMINGMODE); - if( err == 0 ) { - switch(dimmingMode) { - case tvDimmingMode_Fixed: - LOGINFO("DimmingMode Value: Fixed\n"); - response["DimmingMode"] = "fixed"; - break; + if (parsingGetInputArgument(parameters, "CMS", inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } - case tvDimmingMode_Local: - LOGINFO("DimmingMode Value: Local\n"); - response["DimmingMode"] = "local"; - break; + if (getParamIndex("CMS",inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - case tvDimmingMode_Global: - LOGINFO("DimmingMode Value: Global\n"); - response["DimmingMode"] = "global"; - break; + if ( convertCMSParamToPQEnum(inputInfo.component,inputInfo.color,tvPQEnum) != 0 ) { + LOGINFO("%s: Component/Color Param Not Found \n",__FUNCTION__); + returnResponse(false); + } + int err = getLocalparam("CMS",indexInfo,level,tvPQEnum); + if( err == 0 ) { + response["level"] = level; + LOGINFO("Exit : params Value: %d \n", level); + returnResponse(true); + } + else { + returnResponse(false); } - LOGINFO("Exit : DimmingMode Value: %d \n", dimmingMode); - returnResponse(true); } - else { - returnResponse(false); + else + { + // Extract color and component from input parameters + std::string color = parameters.HasLabel("color") ? parameters["color"].String() : ""; + std::string component = parameters.HasLabel("component") ? parameters["component"].String() : ""; + + if (color.empty() || component.empty()) { + LOGERR("%s: Missing color/component parameter", __FUNCTION__); + returnResponse(false); + } + + tvPQParameterIndex_t pqEnum; + if (convertCMSParamToPQEnum(component, color, pqEnum) != 0) { + LOGERR("%s: Invalid color/component combination", __FUNCTION__); + returnResponse(false); + } + + // Get valid context from parameters using your existing context helper + tvConfigContext_t validContext = getValidContextFromGetParameters(parameters, "CMS"); + + if ((validContext.videoSrcType == VIDEO_SOURCE_ALL && + validContext.videoFormatType == VIDEO_FORMAT_NONE && + validContext.pq_mode == PQ_MODE_INVALID)) + { + LOGERR("No valid context found for CMS get"); + returnResponse(false); + } + + // Prepare paramIndex from context + paramIndex_t indexInfo = { + .sourceIndex = static_cast(validContext.videoSrcType), + .pqmodeIndex = static_cast(validContext.pq_mode), + .formatIndex = static_cast(validContext.videoFormatType) + }; + + int level = 0; + int err = getLocalparam("CMS", indexInfo, level, pqEnum); + if (err == 0) { + response["level"] = level; + LOGINFO("Exit: getCMS success, value: %d", level); + returnResponse(true); + } else { + LOGERR("Failed to get CMS param from local storage"); + returnResponse(false); + } } } - uint32_t AVOutputTV::setBacklightDimmingMode(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::setCMS(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_cmsStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + int level = 0,retVal = 0; + tvPQParameterIndex_t tvPQEnum; + tvDataComponentColor_t colorEnum=tvDataColor_NONE; + std::string color,component; + tvError_t ret = tvERROR_NONE; + std::string value; + + inputInfo.color = parameters.HasLabel("color") ? parameters["color"].String() : ""; + inputInfo.component = parameters.HasLabel("component") ? parameters["component"].String() : ""; + if( inputInfo.color.empty() || inputInfo.component.empty() ) { + LOGERR("%s : Color/Component param not found!!!\n",__FUNCTION__); + returnResponse(false); + } - std::string value; - std::string pqmode; - std::string source; - std::string format; - int dimmingMode = 0; - tvError_t ret = tvERROR_NONE; + if (isPlatformSupport("CMS") != 0) { + returnResponse(false); + } - value = parameters.HasLabel("DimmingMode") ? parameters["DimmingMode"].String() : ""; - returnIfParamNotFound(parameters,"DimmingMode"); + value = parameters.HasLabel("level") ? parameters["level"].String() : ""; + returnIfParamNotFound(parameters,"level"); + level = std::stoi(value); - if (validateInputParameter("DimmingMode",value) != 0) { - LOGERR("%s: Range validation failed for DimmingMode\n", __FUNCTION__); - returnResponse(false); - } - dimmingMode = getDimmingModeIndex(value); + if (validateCMSParameter(inputInfo.component,level) != 0) { + LOGERR("%s: CMS Failed in range validation", __FUNCTION__); + returnResponse(false); + } - if (parsingSetInputArgument(parameters, "DimmingMode",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters,"CMS",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( pqmode, source, format, "DimmingMode" )) { - LOGERR("%s: CapablityCheck failed for DimmingMode\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "CMS",inputInfo )) { + LOGERR("%s: CapablityCheck failed for CMS\n", __FUNCTION__); + returnResponse(false); + } - if( isSetRequired(pqmode,source,format) ) { - LOGINFO("Proceed with %s\n",__FUNCTION__); - ret = SetTVDimmingMode(value.c_str()); - } + if ( convertCMSParamToPQEnum(inputInfo.component,inputInfo.color,tvPQEnum) != 0 ) { + LOGERR("%s: %s/%s Param Not Found \n",__FUNCTION__,inputInfo.component.c_str(),inputInfo.color.c_str()); + returnResponse(false); + } - if(ret != tvERROR_NONE) { - LOGERR("Failed to set DimmingMode\n"); - returnResponse(false); - } - else { - int params[3]={0}; - params[0]=(int)dimmingMode; - int retval= updateAVoutputTVParam("set","DimmingMode",pqmode,source,format,PQ_PARAM_DIMMINGMODE,params); - if(retval != 0 ) { - LOGERR("Failed to Save DimmingMode to ssm_data\n"); + retVal = getCMSColorEnumFromString(inputInfo.color,colorEnum); + if( retVal == -1) { + LOGERR("%s: Invalid Color : %s\n",__FUNCTION__,inputInfo.color.c_str()); returnResponse(false); } - LOGINFO("Exit : setDimmingMode successful to value: %d\n", dimmingMode); - returnResponse(true); + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + tvError_t ret = SetCMSState(true); + if(ret != tvERROR_NONE) { + LOGWARN("CMS enable failed\n"); + returnResponse(false); + } + + if(inputInfo.component.compare("Saturation") == 0) + ret = SetCurrentComponentSaturation(colorEnum, level); + else if(inputInfo.component.compare("Hue") == 0 ) + ret = SetCurrentComponentHue(colorEnum,level); + else if( inputInfo.component.compare("Luma") == 0 ) + ret = SetCurrentComponentLuma(colorEnum,level); + } + + if(ret != tvERROR_NONE) { + LOGERR("Failed to set CMS\n"); + returnResponse(false); + } + else { + std::string cmsParam; + cmsParam = inputInfo.color+"."+inputInfo.component; + + retVal= updateAVoutputTVParam("set","CMS",inputInfo,tvPQEnum,level); + if(retVal != 0 ) { + LOGERR("%s : Failed to Save CMS %s/%s(%s) to ssm_data\n",__FUNCTION__,inputInfo.component.c_str(),inputInfo.color.c_str(),cmsParam.c_str()); + returnResponse(false); + } + LOGINFO("Exit : setCMS %s/%s successful to value: %d\n", inputInfo.component.c_str(),inputInfo.color.c_str(),level); + returnResponse(true); + } + } + else + { + bool status = setCMSParam(parameters); + if (status) { + LOGINFO("setCMS success"); + returnResponse(true); + } else { + LOGERR("setCMS failed"); + returnResponse(false); + } } } - uint32_t AVOutputTV::resetBacklightDimmingMode(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::resetCMS(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_cmsStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + int retVal = 0; + std::string color,component; + tvError_t ret = tvERROR_NONE; + JsonArray sourceArray; + JsonArray pqmodeArray; + JsonArray formatArray; + JsonArray colorArray; + JsonArray componentArray; + + if (isPlatformSupport("CMS") != 0) { + returnResponse(false); + } - std::string value; - std::string pqmode; - std::string source; - std::string format;std::string dimmingMode; - int sourceIndex=0,pqIndex=0,formatIndex=0,dMode=0; - int params[3]={0}; - tvError_t ret = tvERROR_NONE; + pqmodeArray = parameters.HasLabel("pictureMode") ? parameters["pictureMode"].Array() : JsonArray(); + for (int i = 0; i < pqmodeArray.Length(); ++i) { + inputInfo.pqmode += pqmodeArray[i].String(); + if (i != (pqmodeArray.Length() - 1) ) { + inputInfo.pqmode += ","; + } + } - if (parsingSetInputArgument(parameters, "DimmingMode",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + sourceArray = parameters.HasLabel("videoSource") ? parameters["videoSource"].Array() : JsonArray(); + for (int i = 0; i < sourceArray.Length(); ++i) { + inputInfo.source += sourceArray[i].String(); + if (i != (sourceArray.Length() - 1) ) { + inputInfo.source += ","; + } + } - if( !isCapablityCheckPassed( pqmode, source, format, "DimmingMode" )) { - LOGERR("%s: CapablityCheck failed for DimmingMode\n", __FUNCTION__); - returnResponse(false); - } + formatArray = parameters.HasLabel("videoFormat") ? parameters["videoFormat"].Array() : JsonArray(); + for (int i = 0; i < formatArray.Length(); ++i) { + inputInfo.format += formatArray[i].String(); + if (i != (formatArray.Length() - 1) ) { + inputInfo.format += ","; + } + } + colorArray = parameters.HasLabel("color") ? parameters["color"].Array() : JsonArray(); + for (int i = 0; i < colorArray.Length(); ++i) { + inputInfo.color += colorArray[i].String(); + if (i != (colorArray.Length() - 1) ) { + inputInfo.color += ","; + } + } + componentArray = parameters.HasLabel("component") ? parameters["component"].Array() : JsonArray(); + for (int i = 0; i < componentArray.Length(); ++i) { + inputInfo.component += componentArray[i].String(); + if (i != (componentArray.Length() - 1) ) { + inputInfo.component += ","; + } + } + if (inputInfo.source.empty()) { + inputInfo.source = "Global"; + } + if (inputInfo.pqmode.empty()) { + inputInfo.pqmode = "Global"; + } + if (inputInfo.format.empty()) { + inputInfo.format = "Global"; + } + if (inputInfo.color.empty()) { + inputInfo.color = "Global"; + } + if (inputInfo.component.empty()) { + inputInfo.component = "Global"; + } - int retval= updateAVoutputTVParam("reset","DimmingMode",pqmode,source,format,PQ_PARAM_DIMMINGMODE,params); + if (convertToValidInputParameter("CMS", inputInfo) != 0) { + LOGERR("%s: Failed to convert the input paramters. \n", __FUNCTION__); + returnResponse(false); + } - if(retval != 0 ) { - LOGERR("Failed to reset ldim\n"); - returnResponse(false); - } + if( !isCapablityCheckPassed( "CMS" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for CMS\n", __FUNCTION__); + returnResponse(false); + } - else { - if (isSetRequired(pqmode,source,format)) { - getParamIndex("Current","Current", "Current",sourceIndex,pqIndex,formatIndex); - int err = getLocalparam("DimmingMode",formatIndex,pqIndex,sourceIndex, dMode, PQ_PARAM_DIMMINGMODE); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,formatIndex, sourceIndex, pqIndex, dMode); - getDimmingModeStringFromEnum(dMode,dimmingMode); - ret = SetTVDimmingMode(dimmingMode.c_str()); - } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + tvError_t ret = SetCMSState(false); + if(ret != tvERROR_NONE) { + LOGWARN("CMS disable failed\n"); + returnResponse(false); } } - } - if(ret != tvERROR_NONE) { - returnResponse(false); + if(ret != tvERROR_NONE) { + LOGERR("%s : Failed to setCMSState\n",__FUNCTION__); + returnResponse(false); + } + else { + int cms = 0; + retVal= updateAVoutputTVParam("reset","CMS",inputInfo,PQ_PARAM_CMS_SATURATION_RED,cms); + if(retVal != 0 ) { + LOGERR("%s : Failed to Save CMS %s/%s to ssm_data\n",__FUNCTION__,inputInfo.component.c_str(),inputInfo.color.c_str() ); + returnResponse(false); + } + returnResponse(true); + } } - else { - LOGINFO("Exit : resetBacklightDimmingMode Successful to value : %s \n",dimmingMode.c_str()); + else + { + if (isSetRequiredForParam(parameters, "CMS")) { + LOGINFO("Proceed with SetCMSState \n"); + tvError_t ret = SetCMSState(false); + if(ret != tvERROR_NONE) { + LOGWARN("CMS disable failed\n"); + returnResponse(false); + } + } + int cms = 0; + int retVal= updateAVoutputTVParamV2("reset","CMS",parameters,PQ_PARAM_CMS,cms); + if(retVal != 0 ) { + LOGERR("%s : Failed to Save CMS to ssm_data\n",__FUNCTION__); + returnResponse(false); + } returnResponse(true); } } - uint32_t AVOutputTV::getBacklightDimmingModeCaps(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getCMSCapsV2(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry: getCMSCapsV2"); + + int max_hue = 0, max_saturation = 0, max_luma = 0; + tvDataComponentColor_t* colorArray = nullptr; + tvComponentType_t* componentArray = nullptr; + size_t num_color = 0, num_component = 0; + tvContextCaps_t* context_caps = nullptr; + + tvError_t ret = GetCMSCaps(&max_hue, &max_saturation, &max_luma, + &colorArray, &componentArray, + &num_color, &num_component, &context_caps); + + if (ret != tvERROR_NONE) { + LOGERR("GetCMSCaps failed with error: %d", ret); + returnResponse(false); + } + response["platformSupport"] = true; + + // Range Info + JsonObject rangeHue, rangeSaturation, rangeLuma; + rangeHue["from"] = 0; + rangeHue["to"] = max_hue; + rangeSaturation["from"] = 0; + rangeSaturation["to"] = max_saturation; + rangeLuma["from"] = 0; + rangeLuma["to"] = max_luma; + + response["rangeHue"] = rangeHue; + response["rangeSaturation"] = rangeSaturation; + response["rangeLuma"] = rangeLuma; + + // Color Info + JsonArray colorJson; + for (size_t i = 0; i < num_color; ++i) { + colorJson.Add(getCMSColorStringFromEnum(colorArray[i])); + } + response["color"] = colorJson; + + // Component Info + JsonArray componentJson; + for (size_t i = 0; i < num_component; ++i) { + componentJson.Add(getCMSComponentStringFromEnum(componentArray[i])); + } + response["component"] = componentJson; + response["context"] = parseContextCaps(context_caps); +#if HAL_NOT_READY + // Clean up dynamic memory + delete[] colorArray; + delete[] componentArray; +#endif + LOGINFO("Exit: getCMSCapsV2"); + returnResponse(true); + } + + uint32_t AVOutputTV::getCMSCaps(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); - std::vector supportedDimmingMode; - std::vector pqmode; - std::vector source; - std::vector format; + capVectors_t info; - JsonArray supportedDimmingModeArray; + JsonArray rangeArray; JsonArray pqmodeArray; JsonArray formatArray; JsonArray sourceArray; + JsonArray colorArray; + JsonArray componentArray; + JsonObject componentSaturationRangeInfo; + JsonObject componentHueRangeInfo; + JsonObject componentLumaRangeInfo; unsigned int index = 0; - tvError_t ret = getParamsCaps(supportedDimmingMode,pqmode,source,format,"DimmingMode"); + tvError_t ret = getParamsCaps("CMS",info); if(ret != tvERROR_NONE) { returnResponse(false); } else { - for (index = 0; index < supportedDimmingMode.size(); index++) { - supportedDimmingModeArray.Add(supportedDimmingMode[index]); - } - response["options"]=supportedDimmingModeArray; + response["platformSupport"] = (info.isPlatformSupportVector[0].compare("true") == 0) ? true : false; - if (((pqmode.front()).compare("none") != 0)) { - for (index = 0; index < pqmode.size(); index++) { - pqmodeArray.Add(pqmode[index]); - } - response["pictureModeInfo"]=pqmodeArray; - } - if ((source.front()).compare("none") != 0) { - for (index = 0; index < source.size(); index++) { - sourceArray.Add(source[index]); - } - response["videoSourceInfo"]=sourceArray; - } - if ((format.front()).compare("none") != 0) { - for (index = 0; index < format.size(); index++) { - formatArray.Add(format[index]); - } - response["videoFormatInfo"]=formatArray; - } - LOGINFO("Exit\n"); - returnResponse(true); - } - } + componentSaturationRangeInfo["from"] = stoi(info.rangeVector[0]); + componentSaturationRangeInfo["to"] = stoi(info.rangeVector[1]); + response["componentSaturationRangeInfo"]=componentSaturationRangeInfo; + + componentHueRangeInfo["from"] = stoi(info.rangeVector[2]); + componentHueRangeInfo["to"] = stoi(info.rangeVector[3]); + response["componentHueRangeInfo"]=componentHueRangeInfo; - uint32_t AVOutputTV::getSupportedDolbyVisionModes(const JsonObject& parameters, JsonObject& response) - { + componentLumaRangeInfo["from"] = stoi(info.rangeVector[4]); + componentLumaRangeInfo["to"] = stoi(info.rangeVector[5]); + response["componentLumaRangeInfo"]=componentLumaRangeInfo; - LOGINFO("Entry\n"); - tvDolbyMode_t dvModes[tvMode_Max]; - tvDolbyMode_t *dvModesPtr = dvModes; // Pointer to statically allocated tvDolbyMode_t array - unsigned short totalAvailable = 0; - // Set an initial value to indicate the mode type - dvModes[0] = tvDolbyMode_Dark; + if ((info.pqmodeVector.front()).compare("none") != 0) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); + } + response["pictureModeInfo"]=pqmodeArray; + } + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); + } + response["videoSourceInfo"]=sourceArray; + } + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); + } + response["videoFormatInfo"]=formatArray; + } - tvError_t ret = GetTVSupportedDolbyVisionModes(&dvModesPtr, &totalAvailable); - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - JsonArray SupportedDVModes; + if ((info.colorVector.front()).compare("none") != 0) { + for (index = 0; index < info.colorVector.size(); index++) { + colorArray.Add(info.colorVector[index]); + } + response["colorInfo"]=colorArray; + } - for(int count = 0;count range; - std::vector pqmode; - std::vector source; - std::vector format; - std::string isPlatformSupport; - std::vector indexInfo; + capVectors_t info; JsonArray rangeArray; JsonArray pqmodeArray; @@ -2470,564 +5616,360 @@ namespace Plugin { unsigned int index = 0; - tvError_t ret = getParamsCaps(range,pqmode,source,format,"DolbyVisionMode", isPlatformSupport, indexInfo); + tvError_t ret = getParamsCaps("HDRMode", info); if(ret != tvERROR_NONE) { returnResponse(false); } else { - response["platformSupport"] = (isPlatformSupport.compare("true") == 0 ) ? true : false; + response["platformSupport"] = (info.isPlatformSupportVector[0].compare("true") == 0 ) ? true : false; - for (index = 0; index < range.size(); index++) { - rangeArray.Add(range[index]); + for (index = 0; index < info.rangeVector.size(); index++) { + rangeArray.Add(info.rangeVector[index]); } response["options"]=rangeArray; - if ((pqmode.front()).compare("none") != 0) { - for (index = 0; index < pqmode.size(); index++) { - pqmodeArray.Add(pqmode[index]); + if ((info.pqmodeVector.front()).compare("none") != 0) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); } response["pictureModeInfo"]=pqmodeArray; } - if ((source.front()).compare("none") != 0) { - for (index = 0; index < source.size(); index++) { - sourceArray.Add(source[index]); - } - response["videoSourceInfo"]=sourceArray; - } - if ((format.front()).compare("none") != 0) { - for (index = 0; index < format.size(); index++) { - formatArray.Add(format[index]); - } - response["videoFormatInfo"]=formatArray; - } - LOGINFO("Exit\n"); - returnResponse(true); - } - } - - uint32_t AVOutputTV::getSupportedPictureModes(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); - pic_modes_t *pictureModes; - unsigned short totalAvailable = 0; - tvError_t ret = GetTVSupportedPictureModes(&pictureModes,&totalAvailable); - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - JsonArray SupportedPicModes; - - for(int count = 0;count range; - std::vector pqmode; - std::vector source; - std::vector format; - - if (getCapabilitySource(rangeArray) != 0) { - returnResponse(false); - } - response["options"]=rangeArray; - LOGINFO("Exit\n"); - returnResponse(true); - } - - uint32_t AVOutputTV::getVideoFormatCaps(const JsonObject& parameters, JsonObject& response) - { - - JsonArray rangeArray; - - std::vector range; - std::vector pqmode; - std::vector source; - std::vector format; - - tvError_t ret = getParamsCaps(range,pqmode,source,format,"VideoFormat"); - - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - if ((range.front()).compare("none") != 0) { - for (unsigned int index = 0; index < range.size(); index++) { - rangeArray.Add(range[index]); - } - response["options"]=rangeArray; - } - } - LOGINFO("Exit\n"); - returnResponse(true); - } - - uint32_t AVOutputTV::getVideoFrameRateCaps(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); - - std::vector rangeInfo; - JsonArray rangeArray; - - if ( getRangeCapability("VideoFrameRate", rangeInfo) != 0 ) { - returnResponse(false); - } - - for (unsigned int index = 0; index < rangeInfo.size(); index++) { - rangeArray.Add(std::stof(rangeInfo[index])); - } - - response["videoFrameRates"] = rangeArray; - returnResponse(true); - } - - uint32_t AVOutputTV::getVideoResolutionCaps(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); - response["maxResolution"] = "4096*2160p"; - returnResponse(true); - } - - uint32_t AVOutputTV::getPictureModeCaps(const JsonObject& parameters, JsonObject& response) - { - - JsonArray sourceArray; - JsonArray formatArray; - JsonArray rangeArray; - - std::vector range; - std::vector source; - std::vector pqmode; - std::vector format; - - unsigned int index = 0; - tvError_t ret = getParamsCaps(range,pqmode,source,format,"PictureMode"); - - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - - if ((range.front()).compare("none") != 0) { - for (index = 0; index < range.size(); index++) { - rangeArray.Add(range[index]); - } - response["options"]=rangeArray; - } - - if ((source.front()).compare("none") != 0) { - for (index = 0; index < source.size(); index++) { - sourceArray.Add(source[index]); + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); } response["videoSourceInfo"]=sourceArray; } - if ((format.front()).compare("none") != 0) { - for (index = 0; index < format.size(); index++) { - formatArray.Add(format[index]); + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); } response["videoFormatInfo"]=formatArray; } - LOGINFO("Exit\n"); - returnResponse(true); - } - } - - uint32_t AVOutputTV::getPictureMode(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); - std::string picturemode; - std::string source; - std::string format; - std::string dummyPqmode; - int current_source = 0; - int current_format = 0; - int pqIndex = 0; - std::string tr181_param_name; - TR181_ParamData_t param = {0}; - tr181ErrorCode_t err = tr181Success; - - if (parsingGetInputArgument(parameters, "PictureMode",source, dummyPqmode, format) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } - - if (getParamIndex(source,dummyPqmode,format,current_source,pqIndex,current_format) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); - } - - tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); - tr181_param_name += "." + convertSourceIndexToString(current_source) + "." + "Format."+convertVideoFormatToString(current_format)+"."+"PictureModeString"; - err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); - - if ( tr181Success != err ) { - returnResponse(false); - } - else { - std::string s; - s+=param.value; - response["pictureMode"] = s; - LOGINFO("Exit : getPictureMode() : %s\n",s.c_str()); - returnResponse(true); - } - } - - uint32_t AVOutputTV::setPictureMode(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); - std::string value; - std::string source; - std::string format; - std::string dummyPqmode; - char prevmode[PIC_MODE_NAME_MAX]={0}; - GetTVPictureMode(prevmode); - - tvError_t ret = tvERROR_NONE; - value = parameters.HasLabel("pictureMode") ? parameters["pictureMode"].String() : ""; - returnIfParamNotFound(parameters,"pictureMode"); - - // As only source need to validate, so pqmode and formate passing as currrent - if (parsingSetInputArgument(parameters, "PictureMode",source, dummyPqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } - - if (validateInputParameter("PictureMode",value) != 0) { - LOGERR("%s: Range validation failed for PictureMode\n", __FUNCTION__); - returnResponse(false); - } - if( !isCapablityCheckPassed( dummyPqmode, source,format, "PictureMode" )) { - LOGERR("%s: CapablityCheck failed for PictureMode\n", __FUNCTION__); - returnResponse(false); - } - - if( isSetRequired("Current",source,format) ) { - LOGINFO("Proceed with SetTVPictureMode\n"); - ret = SetTVPictureMode(value.c_str()); - } - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - std::vector pq_mode_vec; - std::vector source_vec; - std::vector format_vec; - - getSaveConfig("Current", source.c_str(), format.c_str(), source_vec, pq_mode_vec, format_vec); - - for (int sourceType : source_vec) { - tvVideoSrcType_t source = (tvVideoSrcType_t)sourceType; - for (int formatType : format_vec) { - tvVideoFormatType_t format = (tvVideoFormatType_t)formatType; - std::string tr181_param_name = ""; - tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); - // framing Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.AVOutput.Source.source_index[x].Format.format_index[x].PictureModeString.value - tr181_param_name += "."+convertSourceIndexToString(source)+"."+"Format."+ - convertVideoFormatToString(format)+"."+"PictureModeString"; - tr181ErrorCode_t err = setLocalParam(rfc_caller_id, tr181_param_name.c_str(), value.c_str()); - if ( err != tr181Success ) { - LOGERR("setLocalParam for %s Failed : %s\n", AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM, getTR181ErrorString(err)); - returnResponse(false); - } - else { - LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM, value.c_str()); - int pqmodeindex = (int)getPictureModeIndex(value); - SaveSourcePictureMode(source, format, pqmodeindex); - } - } - } - - //Filmmaker mode telemetry - if(!strncmp(value.c_str(),"filmmaker",strlen(value.c_str())) && strncmp(prevmode,"filmmaker",strlen(prevmode))) { - LOGINFO("%s mode has been enabled",value.c_str()); - } - else if(!strncmp(prevmode,"filmmaker",strlen(prevmode)) && strncmp(value.c_str(),"filmmaker",strlen(value.c_str()))) { - LOGINFO("%s mode has been disabled",prevmode); - } - - LOGINFO("Broadcasting the low latency change event \n"); - - if(m_isDalsEnabled) { - //GameModebroadcast - if(!strncmp(value.c_str(),"game",strlen(value.c_str())) && strncmp(prevmode,"game",strlen(prevmode))) { - broadcastLowLatencyModeChangeEvent(1); - } - else if(!strncmp(prevmode,"game",strlen(prevmode)) && strncmp(value.c_str(),"game",strlen(value.c_str()))) { - broadcastLowLatencyModeChangeEvent(0); - } - } - - LOGINFO("Exit : Value : %s \n",value.c_str()); + LOGINFO("Exit\n"); returnResponse(true); } } - uint32_t AVOutputTV::resetPictureMode(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::get2PointWB(const JsonObject& parameters, JsonObject& response) { - LOGINFO("Entry\n"); - tr181ErrorCode_t err = tr181Success; - TR181_ParamData_t param = {0}; + LOGINFO("Entry"); - std::vector pq_mode_vec; - std::vector source_vec; - std::vector format_vec; - std::string source; - std::string dummyPqmode; - std::string format; + capDetails_t inputInfo; + paramIndex_t indexInfo; + int level = 0; + tvPQParameterIndex_t tvPQEnum; - // As only source need to validate, so pqmode and formate passing as currrent - if (parsingSetInputArgument(parameters, "PictureMode",source, dummyPqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + inputInfo.color = parameters.HasLabel("color") ? parameters["color"].String() : ""; + inputInfo.control = parameters.HasLabel("control") ? parameters["control"].String() : ""; + + if( inputInfo.color.empty() || inputInfo.control.empty() ) { + LOGERR("%s : Color/Control param not found!!!\n",__FUNCTION__); returnResponse(false); } - if( !isCapablityCheckPassed( dummyPqmode, source,format, "PictureMode" )) { - LOGERR("%s: CapablityCheck failed for PictureMode\n", __FUNCTION__); + if (isPlatformSupport("WhiteBalance") != 0) { returnResponse(false); } - getSaveConfig("Current", source, format, source_vec, pq_mode_vec, format_vec); - - for (int source : source_vec) { - tvVideoSrcType_t sourceType = (tvVideoSrcType_t)source; - for (int format : format_vec) { - tvVideoFormatType_t formatType = (tvVideoFormatType_t)format; - std::string tr181_param_name = ""; - tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); - tr181_param_name += "."+convertSourceIndexToString(sourceType)+"."+"Format."+ - convertVideoFormatToString(formatType)+"."+"PictureModeString"; - - err = clearLocalParam(rfc_caller_id, tr181_param_name.c_str()); - if ( err != tr181Success ) { - LOGWARN("clearLocalParam for %s Failed : %s\n", tr181_param_name.c_str(), getTR181ErrorString(err)); - returnResponse(false); - } - else { - err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); - if ( tr181Success == err ) { - //get curren source and if matches save for that alone - tvVideoSrcType_t current_source = VIDEO_SOURCE_IP; - GetCurrentVideoSource(¤t_source); - - tvVideoFormatType_t current_format = VIDEO_FORMAT_NONE; - GetCurrentVideoFormat(¤t_format); - if( current_format == VIDEO_FORMAT_NONE) { - current_format = VIDEO_FORMAT_SDR; - } - - if (current_source == sourceType && current_format == formatType) { - - tvError_t ret = SetTVPictureMode(param.value); - if(ret != tvERROR_NONE) { - LOGWARN("Picture Mode set failed: %s\n",getErrorString(ret).c_str()); - returnResponse(false); - } - else { - LOGINFO("Exit : Picture Mode reset successfully, value: %s\n", param.value); - } - } - int pqmodeindex = (int)getPictureModeIndex(param.value); - SaveSourcePictureMode(sourceType, formatType, pqmodeindex); - } - else { - LOGWARN("getLocalParam for %s failed\n", AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); - returnResponse(false); - } - } - } + if (parsingGetInputArgument(parameters, "WhiteBalance", inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); } - returnResponse(true) - } - uint32_t AVOutputTV::signalFilmMakerMode(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); - std::string value; - char prevmode[PIC_MODE_NAME_MAX]={0}; - GetTVPictureMode(prevmode); + if (getParamIndex("WhiteBalance", inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - value = parameters.HasLabel("signalFilmMakerMode") ? parameters["signalFilmMakerMode"].String() : ""; - returnIfParamNotFound(parameters, "signalFilmMakerMode"); + if ( convertWBParamToPQEnum(inputInfo.control,inputInfo.color,tvPQEnum) != 0 ) { + LOGINFO("%s: Control/Color Param Not Found \n",__FUNCTION__); + returnResponse(false); + } - if(strncmp(value.c_str(),"ON",strlen(value.c_str())) == 0) { - NotifyFilmMakerModeChange(tvContentType_FMM); - LOGINFO(" enabling Film makermode \n"); + int err = getLocalparam("WhiteBalance",indexInfo,level, tvPQEnum); + if( err == 0 ) { + response["level"] = level; + LOGINFO("Exit : params Value: %d \n", level); + returnResponse(true); } else { - LOGINFO(" disabling Film makermode \n"); - NotifyFilmMakerModeChange(tvContentType_NONE); + returnResponse(false); } - returnResponse(true); } - uint32_t AVOutputTV::setLowLatencyState(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::set2PointWB(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); - std::string value; - std::string pqmode; - std::string source; - std::string format; - int params[3]={0}; - int lowLatencyIndex = 0,prevLowLatencyIndex = 0; + capDetails_t inputInfo; + int level = 0; + tvPQParameterIndex_t tvPQEnum; + int retVal = 0; + std::string color,control,value; tvError_t ret = tvERROR_NONE; - ret = GetLowLatencyState(&prevLowLatencyIndex); - if(ret != tvERROR_NONE) { - LOGERR("Get previous low latency state failed\n"); + inputInfo.color = parameters.HasLabel("color") ? parameters["color"].String() : ""; + inputInfo.control = parameters.HasLabel("control") ? parameters["control"].String() : ""; + + if (isPlatformSupport("WhiteBalance") != 0) { + returnResponse(false); + } + + if( inputInfo.color.empty() || inputInfo.control.empty() ) { + LOGERR("%s : Color/Control param not found!!!\n",__FUNCTION__); returnResponse(false); } - value = parameters.HasLabel("LowLatencyState") ? parameters["LowLatencyState"].String() : ""; - returnIfParamNotFound(parameters,"LowLatencyState"); - lowLatencyIndex = stoi(value); + value = parameters.HasLabel("level") ? parameters["level"].String() : ""; + returnIfParamNotFound(parameters,"level"); + level = std::stoi(value); - if (validateIntegerInputParameter("LowLatencyState",lowLatencyIndex) != 0) { - LOGERR("Failed in Brightness range validation:%s", __FUNCTION__); + if (validateWBParameter("WhiteBalance",inputInfo.control,level) != 0) { + LOGERR("%s: CMS Failed in range validation", __FUNCTION__); returnResponse(false); } - if (parsingSetInputArgument(parameters, "LowLatencyState",source, pqmode, format) != 0) { + if (parsingSetInputArgument(parameters,"WhiteBalance",inputInfo) != 0) { LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); returnResponse(false); } - if( !isCapablityCheckPassed(pqmode, source, format, "LowLatencyState" )) { - LOGERR("%s: CapablityCheck failed for LowLatencyState\n", __FUNCTION__); + if( !isCapablityCheckPassed( "WhiteBalance",inputInfo )) { + LOGERR("%s: CapablityCheck failed for WhiteBalance\n", __FUNCTION__); returnResponse(false); } - params[0]=lowLatencyIndex; - int retval= updateAVoutputTVParam("set","LowLatencyState",pqmode,source,format,PQ_PARAM_LOWLATENCY_STATE,params); - if(retval != 0 ) { - LOGERR("Failed to SaveLowLatency to ssm_data\n"); + if ( convertWBParamToPQEnum(inputInfo.control,inputInfo.color,tvPQEnum) != 0 ) { + LOGERR("%s: %s/%s Param Not Found \n",__FUNCTION__,inputInfo.component.c_str(),inputInfo.color.c_str()); returnResponse(false); - } else { - - if( isSetRequired(pqmode,source,format) ) { - LOGINFO("Proceed with setLowLatencyState\n"); - ret = SetLowLatencyState( lowLatencyIndex ); - } + } + + if( (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format))) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + + tvVideoSrcType_t currentSource = VIDEO_SOURCE_IP; + tvError_t ret = GetCurrentVideoSource(¤tSource); if(ret != tvERROR_NONE) { - LOGERR("Failed to setLowLatency\n"); - params[0]=prevLowLatencyIndex; - LOGERR("Failed to set low latency. Fallback to previous state %d\n", prevLowLatencyIndex); + LOGWARN("%s: GetCurrentVideoSource( ) Failed \n",__FUNCTION__); + return -1; + } + + tvWBColor_t colorLevel; + if ( getWBColorEnumFromString(inputInfo.color,colorLevel ) == -1 ) { + LOGERR("%s : GetColorEnumFromString Failed!!! ",__FUNCTION__); + return -1; + } + + tvWBControl_t controlLevel; + if ( getWBControlEnumFromString(inputInfo.control,controlLevel ) == -1 ) { + LOGERR("%s : GetComponentEnumFromString Failed!!! ",__FUNCTION__); + return -1; + } + + ret = SetCustom2PointWhiteBalance(colorLevel,controlLevel,level); + } - retval=updateAVoutputTVParam("set","LowLatencyState",pqmode,source,format,PQ_PARAM_LOWLATENCY_STATE,params); - if(retval != 0 ){ - LOGERR("Fallback to previous low latency state %d failed.\n", prevLowLatencyIndex); - } + if(ret != tvERROR_NONE) { + LOGERR("%s: Failed to set WhiteBalance\n",__FUNCTION__); + returnResponse(false); + } + else { + retVal= updateAVoutputTVParam("set","WhiteBalance",inputInfo,tvPQEnum,level); + if(retVal != 0 ) { + LOGERR("%s : Failed to Save WB %s/%s : %d to ssm_data\n",__FUNCTION__,inputInfo.control.c_str(),inputInfo.color.c_str(),level); returnResponse(false); } - - LOGINFO("Exit : setLowLatency successful to value: %d\n", lowLatencyIndex); + LOGINFO("Exit : set2PointWB %s/%s successful to value: %d\n", inputInfo.control.c_str(),inputInfo.color.c_str(),level); returnResponse(true); } } - uint32_t AVOutputTV::getLowLatencyState(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::reset2PointWB(const JsonObject& parameters, JsonObject& response) { - LOGINFO("Entry"); + LOGINFO("Entry\n"); - std::string pqmode; - std::string source; - std::string format; - std::string key; - int sourceIndex=0,pqIndex=0,formatIndex=0; - int lowlatencystate = 0; + capDetails_t inputInfo; + tvPQParameterIndex_t tvPQEnum; + int retVal = 0; + int level = 0; + std::string color,control; + inputInfo.color = parameters.HasLabel("color") ? parameters["color"].String() : ""; + inputInfo.control = parameters.HasLabel("control") ? parameters["control"].String() : ""; - if (parsingGetInputArgument(parameters, "LowLatencyState",source, pqmode, format) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + if (isPlatformSupport("WhiteBalance") != 0) { returnResponse(false); } - if (getParamIndex(source,pqmode,format,sourceIndex,pqIndex,formatIndex) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + + if (parsingSetInputArgument(parameters,"WhiteBalance",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); returnResponse(false); } - int err = getLocalparam("LowLatencyState",formatIndex,pqIndex,sourceIndex,lowlatencystate, PQ_PARAM_LOWLATENCY_STATE); - if( err == 0 ) { - response["lowLatencyState"] = std::to_string(lowlatencystate); - LOGINFO("Exit : LowLatencyState Value: %d \n", lowlatencystate); - returnResponse(true); + if( !isCapablityCheckPassed( "WhiteBalance",inputInfo )) { + LOGERR("%s: CapablityCheck failed for WhiteBalance\n", __FUNCTION__); + returnResponse(false); } - else { + + for( int colorIndex= tvWB_COLOR_RED; colorIndex < tvWB_COLOR_MAX; colorIndex++) { + for(int controlIndex = tvWB_CONTROL_GAIN;controlIndex < tvWB_CONTROL_MAX;controlIndex++) { + inputInfo.control = getWBControlStringFromEnum((tvWBControl_t)controlIndex); + inputInfo.color = getWBColorStringFromEnum((tvWBColor_t)colorIndex); + if ( convertWBParamToPQEnum(inputInfo.control,inputInfo.color,tvPQEnum) != 0 ) { + LOGERR("%s: %s/%s Param Not Found \n",__FUNCTION__,inputInfo.control.c_str(),inputInfo.color.c_str()); + returnResponse(false); + } + + retVal |= updateAVoutputTVParam("reset","WhiteBalance",inputInfo,tvPQEnum,level); + } + } + + if( retVal != 0 ) { + LOGWARN("Failed to reset WhiteBalance\n"); returnResponse(false); } + else { + LOGINFO("Exit : reset2PointWB successful \n"); + returnResponse(true); + } } - uint32_t AVOutputTV::resetLowLatencyState(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::get2PointWBCapsV2(const JsonObject& parameters, JsonObject& response) { - LOGINFO("Entry\n"); + LOGINFO("Entry: get2PointWBCapsV2"); - std::string value; - std::string pqmode; - std::string source; - std::string format; - int sourceIndex=0,pqIndex=0,formatIndex=0,lowlatencystate=0; - int params[3]={0}; - tvError_t ret = tvERROR_NONE; + int min_gain = 0, min_offset = 0, max_gain = 0, max_offset = 0; + tvWBColor_t* colorArray = nullptr; + tvWBControl_t* controlArray = nullptr; + size_t num_color = 0, num_control = 0; + tvContextCaps_t* context_caps = nullptr; - if (parsingSetInputArgument(parameters, "LowLatencyState",source, pqmode, format) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + tvError_t ret = GetCustom2PointWhiteBalanceCaps(&min_gain, &min_offset, &max_gain, &max_offset, + &colorArray, &controlArray, + &num_color, &num_control, &context_caps); + + if (ret != tvERROR_NONE) { + LOGERR("GetCustom2PointWhiteBalanceCaps failed with error: %d", ret); returnResponse(false); } - if( !isCapablityCheckPassed( pqmode, source, format, "LowLatencyState" )) { - LOGERR("%s: CapablityCheck failed for LowLatencyState\n", __FUNCTION__); - returnResponse(false); + response["platformSupport"] = true; + + // Range Info + JsonObject rangeGain, rangeOffset; + rangeGain["from"] = min_gain; + rangeGain["to"] = max_gain; + rangeOffset["from"] = min_offset; + rangeOffset["to"] = max_offset; + + response["rangeGain"] = rangeGain; + response["rangeOffset"] = rangeOffset; + + // Control Info + JsonArray controlJson; + for (size_t i = 0; i < num_control; ++i) { + controlJson.Add(getWBControlStringFromEnum(controlArray[i])); } + response["control"] = controlJson; - int retval= updateAVoutputTVParam("reset","LowLatencyState",pqmode,source,format,PQ_PARAM_LOWLATENCY_STATE,params); - if(retval != 0 ) { - LOGERR("Failed to clear Lowlatency from ssmdata and localstore\n"); + // Color Info + JsonArray colorJson; + for (size_t i = 0; i < num_color; ++i) { + colorJson.Add(getWBColorStringFromEnum(colorArray[i])); + } + response["color"] = colorJson; + response["context"] = parseContextCaps(context_caps); + +#if HAL_NOT_READY + delete[] colorArray; + delete[] controlArray; +#endif + + LOGINFO("Exit: get2PointWBCapsV2"); + returnResponse(true); + } + + + uint32_t AVOutputTV::get2PointWBCaps(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + capVectors_t info; + + JsonArray rangeArray; + JsonArray pqmodeArray; + JsonArray formatArray; + JsonArray sourceArray; + JsonArray colorArray; + JsonArray controlArray; + + JsonObject gainInfo; + JsonObject offsetInfo; + + unsigned int index = 0; + + tvError_t ret = getParamsCaps("WhiteBalance",info); + + if(ret != tvERROR_NONE) { returnResponse(false); } else { - if (isSetRequired(pqmode,source,format)) { - getParamIndex("Current","Current", "Current",sourceIndex,pqIndex,formatIndex); - int err = getLocalparam("LowLatencyState",formatIndex,pqIndex,sourceIndex, lowlatencystate, PQ_PARAM_LOWLATENCY_STATE); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,formatIndex, sourceIndex, pqIndex, lowlatencystate); - ret = SetLowLatencyState(lowlatencystate); + response["platformSupport"] = (info.isPlatformSupportVector[0].compare("true") == 0) ? true : false; + + gainInfo["from"] = stoi(info.rangeVector[0]); + gainInfo["to"] = stoi(info.rangeVector[1]); + response["gainInfo"]=gainInfo; + + offsetInfo["from"] = stoi(info.rangeVector[0]); + offsetInfo["to"] = stoi(info.rangeVector[1]); + response["offsetInfo"]=offsetInfo; + + + + if ((info.pqmodeVector.front()).compare("none") != 0) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; + response["pictureModeInfo"]=pqmodeArray; + } + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); } + response["videoSourceInfo"]=sourceArray; + } + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); + } + response["videoFormatInfo"]=formatArray; } - } - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - LOGINFO("Exit : resetLowLatency Successful to value : %d \n",lowlatencystate); + if ((info.colorVector.front()).compare("none") != 0) { + for (index = 0; index < info.colorVector.size(); index++) { + colorArray.Add(info.colorVector[index]); + } + response["colorInfo"]=colorArray; + } + + if ((info.controlVector.front()).compare("none") != 0) { + for (index = 0; index < info.controlVector.size(); index++) { + controlArray.Add(info.controlVector[index]); + } + response["controlInfo"]=controlArray; + } + + LOGINFO("Exit\n"); returnResponse(true); } } - uint32_t AVOutputTV::getLowLatencyStateCaps(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getAutoBacklightModeCaps(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); - std::vector range; - std::vector pqmode; - std::vector source; - std::vector format; + capVectors_t info; JsonArray rangeArray; JsonArray pqmodeArray; @@ -3036,32 +5978,36 @@ namespace Plugin { unsigned int index = 0; - tvError_t ret = getParamsCaps(range,pqmode,source,format,"LowLatencyState"); + tvError_t ret = getParamsCaps("AutoBacklightMode",info); if(ret != tvERROR_NONE) { returnResponse(false); } else { - for (index = 0; index < range.size(); index++) { - rangeArray.Add(stoi(range[index])); - } - response["LowLatencyInfo"]=rangeArray; - if ((pqmode.front()).compare("none") != 0) { - for (index = 0; index < pqmode.size(); index++) { - pqmodeArray.Add(pqmode[index]); + response["platformSupport"] = (info.isPlatformSupportVector[0].compare("true") == 0 ) ? true : false; + + for (index = 0; index < info.rangeVector.size(); index++) { + rangeArray.Add(info.rangeVector[index]); + } + + response["options"]=rangeArray; + + if (info.pqmodeVector.front().compare("none") != 0) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); } response["pictureModeInfo"]=pqmodeArray; } - if ((source.front()).compare("none") != 0) { - for (index = 0; index < source.size(); index++) { - sourceArray.Add(source[index]); + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); } response["videoSourceInfo"]=sourceArray; } - if ((format.front()).compare("none") != 0) { - for (index = 0; index < format.size(); index++) { - formatArray.Add(format[index]); + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); } response["videoFormatInfo"]=formatArray; } @@ -3070,6 +6016,197 @@ namespace Plugin { } } + uint32_t AVOutputTV::setAutoBacklightMode(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + if(m_backlightModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + std::string value; + tvBacklightMode_t mode = tvBacklightMode_AMBIENT; + capDetails_t inputInfo; + + value = parameters.HasLabel("mode") ? parameters["mode"].String() : ""; + returnIfParamNotFound(parameters,"mode"); + + if (validateInputParameter("AutoBacklightMode",value) != 0) { + LOGERR("%s: Range validation failed for AutoBacklightMode\n", __FUNCTION__); + returnResponse(false); + } + + if (isPlatformSupport("AutoBacklightMode") != 0) { + returnResponse(false); + } + + if (parsingSetInputArgument(parameters,"AutoBacklightMode",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if( !isCapablityCheckPassed( "AutoBacklightMode",inputInfo )) { + LOGERR("%s: CapablityCheck failed for AutoBacklightMode\n", __FUNCTION__); + returnResponse(false); + } + + if(!value.compare("Manual")) { + mode = tvBacklightMode_MANUAL; + } + else if (!value.compare("Ambient")) { + mode = tvBacklightMode_AMBIENT; + } + else { + returnResponse(false); + } + + tvError_t ret = SetCurrentBacklightMode (mode); + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + //Save AutoBacklightMode to localstore + + tr181ErrorCode_t err = setLocalParam(rfc_caller_id, AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, value.c_str()); + if ( err != tr181Success ) { + LOGERR("setLocalParam for %s Failed : %s\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, getTR181ErrorString(err)); + returnResponse(false); + } + else { + LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, value.c_str()); + } + LOGINFO("Exit : SetAutoBacklightMode() value : %s\n",value.c_str()); + returnResponse(true); + } + } + else + { + bool success = false; + success = setEnumPQParam( + parameters, + "mode", + "BacklightMode", + backlightModeReverseMap, + PQ_PARAM_BACKLIGHT_MODE, + [](int val) { + return SetCurrentBacklightMode(static_cast(val)); + }); + + returnResponse(success); + } + } + + uint32_t AVOutputTV::getAutoBacklightMode(const JsonObject& parameters, JsonObject& response) + { + if(m_backlightModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + TR181_ParamData_t param; + + if (isPlatformSupport("AutoBacklightMode") != 0) { + returnResponse(false); + } + + tr181ErrorCode_t err = getLocalParam(rfc_caller_id, AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, ¶m); + if (err!= tr181Success) { + returnResponse(false); + } + else { + std::string s; + s+=param.value; + response["mode"] = s; + LOGINFO("Exit getAutoBacklightMode(): %s\n",s.c_str()); + returnResponse(true); + } + } + else + { + std::string mode; + if (getEnumPQParamString(parameters, "BacklightMode", + PQ_PARAM_BACKLIGHT_MODE, backlightModeMap, mode)) { + response["mode"] = mode; + returnResponse(true); + } else { + returnResponse(false); + } + } + + } + + uint32_t AVOutputTV::resetAutoBacklightMode(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + if(m_backlightModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + tvError_t ret = tvERROR_NONE; + + if (isPlatformSupport("AutoBacklightMode") != 0) { + returnResponse(false); + } + + tr181ErrorCode_t err = clearLocalParam(rfc_caller_id,AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM); + if ( err != tr181Success ) { + LOGWARN("clearLocalParam for %s Failed : %s\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, getTR181ErrorString(err)); + ret = tvERROR_GENERAL; + } + else { + LOGINFO("clearLocalParam for %s Successful\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM); + + TR181_ParamData_t param; + memset(¶m, 0, sizeof(param)); + + tr181ErrorCode_t err = getLocalParam(rfc_caller_id, AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM,¶m); + if ( err != tr181Success ) { + LOGWARN("getLocalParam for %s Failed : %s\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, getTR181ErrorString(err)); + ret = tvERROR_GENERAL; + } + else { + tvBacklightMode_t blMode = tvBacklightMode_NONE; + + if(!std::string(param.value).compare("none")) { + blMode = tvBacklightMode_NONE; + } + else if (!std::string(param.value).compare("Manual")){ + blMode = tvBacklightMode_MANUAL; + } + else if (!std::string(param.value).compare("Ambient")){ + blMode = tvBacklightMode_AMBIENT; + } + else if (!std::string(param.value).compare("Eco")){ + blMode = tvBacklightMode_ECO; + } + else { + blMode = tvBacklightMode_NONE; + } + ret = SetCurrentBacklightMode(blMode); + if(ret != tvERROR_NONE) { + LOGWARN("Autobacklight Mode set failed: %s\n",getErrorString(ret).c_str()); + } + else { + LOGINFO("Exit : Autobacklight Mode set successfully, value: %s\n", param.value); + } + } + } + if(ret != tvERROR_NONE) + { + returnResponse(false); + } + else + { + returnResponse(true); + } + } + else + { + bool success = resetEnumPQParamToDefault( + parameters, + "BacklightMode", + PQ_PARAM_BACKLIGHT_MODE, + backlightModeMap, + [](int value, const std::unordered_map&) -> tvError_t { + return SetCurrentBacklightMode(static_cast(value)); + }); + returnResponse(success); + } + } + uint32_t AVOutputTV::getVideoSource(const JsonObject& parameters,JsonObject& response) { LOGINFO("Entry\n"); @@ -3081,7 +6218,7 @@ namespace Plugin { returnResponse(false); } else { - response["currentVideoSource"] = convertSourceIndexToString(currentSource); + response["currentVideoSource"] = convertSourceIndexToStringV2(currentSource); LOGINFO("Exit: getVideoSource :%d success \n", currentSource); returnResponse(true); } diff --git a/AVOutput/AVOutputTV.h b/AVOutput/AVOutputTV.h index 0c390651..84c89526 100644 --- a/AVOutput/AVOutputTV.h +++ b/AVOutput/AVOutputTV.h @@ -27,7 +27,6 @@ #include "tvTypes.h" #include "tvSettings.h" -#include "tvSettingsExtODM.h" #include #include "Module.h" #include "tvError.h" @@ -68,6 +67,10 @@ #define STRING_FORMAT "Format." #define STRING_DEFAULT "Default" #define STRING_SOURCE "Source." +#define STRING_COMPONENT "Component." +#define STRING_COLOR "Color." +#define STRING_CONTROL "Control." +#define STRING_COLORTEMPERATURE "ColorTemperature." #define CREATE_DIRTY(__X__) (__X__+=STRING_DIRTY) #define CAPABLITY_FILE_NAME "pq_capabilities.ini" @@ -116,6 +119,61 @@ class CIniFile namespace WPEFramework { namespace Plugin { +typedef struct +{ + std::string range; + std::string pqmode; + std::string format; + std::string source; + std::string isPlatformSupport; + std::string index; + std::string color; + std::string component; + std::string colorTemperature; + std::string control; +}capDetails_t; + +typedef struct +{ + std::vector rangeVector; + std::vector pqmodeVector; + std::vector formatVector; + std::vector sourceVector; + std::vector isPlatformSupportVector; + std::vector indexVector; + std::vector colorVector; + std::vector componentVector; + std::vector colorTempVector; + std::vector controlVector; +}capVectors_t; + + +typedef struct +{ + std::vector rangeValues; + std::vector pqmodeValues; + std::vector formatValues; + std::vector sourceValues; + std::vector isPlatformSupportValues; + std::vector indexValues; + std::vector colorValues; + std::vector componentValues; + std::vector colorTempValues; + std::vector controlValues; +}valueVectors_t; + +typedef struct +{ + uint8_t sourceIndex; + uint8_t pqmodeIndex; + uint8_t formatIndex; + uint8_t colorIndex; + uint8_t componentIndex; + uint8_t colorTempIndex; + uint8_t controlIndex; +}paramIndex_t; + + //class AVOutputTV : public PluginHost::IPlugin, public PluginHost::JSONRPC { class AVOutputTV : public AVOutputBase { private: @@ -142,6 +200,17 @@ class AVOutputTV : public AVOutputBase { DECLARE_JSON_RPC_METHOD(getLowLatencyState) DECLARE_JSON_RPC_METHOD(getZoomMode) DECLARE_JSON_RPC_METHOD(getVideoContentType) + DECLARE_JSON_RPC_METHOD(getCMS) + DECLARE_JSON_RPC_METHOD(getHDRMode) + DECLARE_JSON_RPC_METHOD(get2PointWB) + DECLARE_JSON_RPC_METHOD(getAutoBacklightMode) + DECLARE_JSON_RPC_METHOD(getAISuperResolution) + DECLARE_JSON_RPC_METHOD(getPrecisionDetail) + DECLARE_JSON_RPC_METHOD(getLocalContrastEnhancement) + DECLARE_JSON_RPC_METHOD(getMPEGNoiseReduction) + DECLARE_JSON_RPC_METHOD(getDigitalNoiseReduction) + DECLARE_JSON_RPC_METHOD(getMEMC) + /*Get Capability API's*/ @@ -161,6 +230,33 @@ class AVOutputTV : public AVOutputBase { DECLARE_JSON_RPC_METHOD(getVideoResolutionCaps) DECLARE_JSON_RPC_METHOD(getLowLatencyStateCaps) DECLARE_JSON_RPC_METHOD(getZoomModeCaps) + DECLARE_JSON_RPC_METHOD(getCMSCaps) + DECLARE_JSON_RPC_METHOD(get2PointWBCaps) + DECLARE_JSON_RPC_METHOD(getHDRModeCaps) + DECLARE_JSON_RPC_METHOD(getAutoBacklightModeCaps) + DECLARE_JSON_RPC_METHOD(getBacklightCapsV2) + DECLARE_JSON_RPC_METHOD(getBrightnessCapsV2) + DECLARE_JSON_RPC_METHOD(getContrastCapsV2) + DECLARE_JSON_RPC_METHOD(getSharpnessCapsV2) + DECLARE_JSON_RPC_METHOD(getSaturationCapsV2) + DECLARE_JSON_RPC_METHOD(getHueCapsV2) + DECLARE_JSON_RPC_METHOD(getPrecisionDetailCaps) + DECLARE_JSON_RPC_METHOD(getLowLatencyStateCapsV2) + DECLARE_JSON_RPC_METHOD(getColorTemperatureCapsV2) + DECLARE_JSON_RPC_METHOD(getSDRGammaCaps) + DECLARE_JSON_RPC_METHOD(getBacklightDimmingModeCapsV2) + DECLARE_JSON_RPC_METHOD(getZoomModeCapsV2) + DECLARE_JSON_RPC_METHOD(getCMSCapsV2) + DECLARE_JSON_RPC_METHOD(get2PointWBCapsV2) + DECLARE_JSON_RPC_METHOD(getDolbyVisionCalibrationCaps) + DECLARE_JSON_RPC_METHOD(getPictureModeCapsV2) + DECLARE_JSON_RPC_METHOD(getAutoBacklightModeCapsV2) + DECLARE_JSON_RPC_METHOD(getLocalContrastEnhancementCaps) + DECLARE_JSON_RPC_METHOD(getMPEGNoiseReductionCaps) + DECLARE_JSON_RPC_METHOD(getDigitalNoiseReductionCaps) + DECLARE_JSON_RPC_METHOD(getAISuperResolutionCaps) + DECLARE_JSON_RPC_METHOD(getMEMCCaps) + DECLARE_JSON_RPC_METHOD(getMultiPointWBCaps) /*Set API's*/ DECLARE_JSON_RPC_METHOD(setBacklight) @@ -176,7 +272,17 @@ class AVOutputTV : public AVOutputBase { DECLARE_JSON_RPC_METHOD(setLowLatencyState) DECLARE_JSON_RPC_METHOD(setZoomMode) DECLARE_JSON_RPC_METHOD(setWBCtrl ) - DECLARE_JSON_RPC_METHOD(signalFilmMakerMode) + DECLARE_JSON_RPC_METHOD(setHDRMode ) + DECLARE_JSON_RPC_METHOD(setCMS ) + DECLARE_JSON_RPC_METHOD(set2PointWB ) + DECLARE_JSON_RPC_METHOD(signalFilmMakerMode) + DECLARE_JSON_RPC_METHOD(setAutoBacklightMode) + DECLARE_JSON_RPC_METHOD(setAISuperResolution) + DECLARE_JSON_RPC_METHOD(setPrecisionDetail) + DECLARE_JSON_RPC_METHOD(setLocalContrastEnhancement) + DECLARE_JSON_RPC_METHOD(setMPEGNoiseReduction) + DECLARE_JSON_RPC_METHOD(setDigitalNoiseReduction) + DECLARE_JSON_RPC_METHOD(setMEMC) /*Reset API's*/ DECLARE_JSON_RPC_METHOD(resetBacklight) @@ -191,31 +297,46 @@ class AVOutputTV : public AVOutputBase { DECLARE_JSON_RPC_METHOD(resetPictureMode ) DECLARE_JSON_RPC_METHOD(resetLowLatencyState) DECLARE_JSON_RPC_METHOD(resetZoomMode) + DECLARE_JSON_RPC_METHOD(resetHDRMode) + DECLARE_JSON_RPC_METHOD(resetCMS) + DECLARE_JSON_RPC_METHOD(reset2PointWB) + DECLARE_JSON_RPC_METHOD(resetAutoBacklightMode) + DECLARE_JSON_RPC_METHOD(resetAISuperResolution) + DECLARE_JSON_RPC_METHOD(resetPrecisionDetail) + DECLARE_JSON_RPC_METHOD(resetLocalContrastEnhancement) + DECLARE_JSON_RPC_METHOD(resetMPEGNoiseReduction) + DECLARE_JSON_RPC_METHOD(resetDigitalNoiseReduction) + DECLARE_JSON_RPC_METHOD(resetMEMC) + + private: - tvContentFormatType_t getContentFormatIndex(tvVideoHDRFormat_t formatToConvert); int getPictureModeIndex(std::string pqmode); int getSourceIndex(std::string source); int getFormatIndex(std::string format); int getPqParamIndex(); - int getParamIndex(string source,string pqmode,string format,int& sourceIndex,int& pqmodeIndex,int& formatIndex); + int getParamIndex(std::string param, capDetails_t& paramInfo, paramIndex_t& indexInfo); int getDolbyModeIndex(const char * dolbyMode); + int getHDRModeIndex(const std::string HDRMode, const std::string format,tvDolbyMode_t &value); tvDimmingMode_t getDimmingModeIndex(string mode); bool isIncluded(const std::set set1,const std::set set2); bool isSetRequired(std::string pqmode,std::string source,std::string format); int isPlatformSupport(std::string pqparam); - void spliltCapablities( std::vector &range,std::vector &pqmode,std::vector &format,std::vector &source, std::vector &index,std::string rangeInfo, std::string pqmodeInfo, std::string formatInfo, std::string sourceInfo, std::string indexInfo); - bool isCapablityCheckPassed( std::string pqmodeInputInfo,std::string sourceInputInfo,std::string formatInputInfo,std::string param ); - int parsingSetInputArgument(const JsonObject& parameters, std::string pqparam,std::string & source, std::string & pqmode, std::string & format); - int parsingGetInputArgument(const JsonObject& parameters, std::string pqparam,std::string & source, std::string & pqmode, std::string & format); + + bool isCapablityCheckPassed( std::string param, capDetails_t inputInfo ); + int parsingSetInputArgument(const JsonObject& parameters, std::string pqparam,capDetails_t& paramInfo); + int parsingGetInputArgument(const JsonObject& parameters, std::string pqparam, capDetails_t& info); + void spliltCapablities( capVectors_t& vectorInfo, capDetails_t stringInfo); void spliltStringsAndConvertToSet( std::string pqmodeInfo,std::string formatInfo,std::string sourceInfo,std::set &pqmode, std::set &format, std::set &source); int validateIntegerInputParameter(std::string param, int inputValue); - int fetchCapablities(string pqparam, string & source, string & pqmode, string & format); + int fetchCapablities(string pqparam, capDetails_t& info); int validateInputParameter(std::string param, std::string inputValue); + int validateWBParameter(std::string param,std::string control,int inputValue); + int validateCMSParameter(std::string component,int inputValue); /* AVoutput ini file default entries */ void locatePQSettingsFile(void); @@ -225,63 +346,219 @@ class AVOutputTV : public AVOutputBase { std::string convertToString(std::vector vec_strings); void convertParamToLowerCase(std::string &source, std::string &pqmode, std::string &format); - int convertToValidInputParameter(std::string pqparam, std::string & source, std::string & pqmode, std::string & format); + int convertToValidInputParameter(std::string pqparam, capDetails_t& info); string convertSourceIndexToString(int source); string convertVideoFormatToString(int format); string convertPictureIndexToString(int pqmode); - tvContentFormatType_t convertFormatStringToTVContentFormat(const char *format); //std::string convertSourceIndexToString(int sourceIndex); //std::string convertVideoFormatToString( int formatIndex ); void convertUserScaleBacklightToDriverScale(int format,int * params); /* Update TR181 with new values when app calls set/reset calls */ - tvError_t updateAVoutputTVParamToHAL(std::string forParam, int source, int pqmode, int format, int value,bool setNotDelete); + tvError_t updateAVoutputTVParamToHAL(std::string forParam, paramIndex_t indexInfo, int value,bool setNotDelete); /* updatePQParamsToCache will call updatePQParamToLocalCache for writing to TR181. * it will call TVSettings HAL for setting/saving the value * Will be called whenever the application invokes set/reset call */ - int updateAVoutputTVParam( std::string action, std::string tr181ParamName, std::string pqmode, std::string source, std::string format, tvPQParameterIndex_t pqParamIndex, int params[] ); + int updateAVoutputTVParam( std::string action, std::string tr181ParamName, capDetails_t info, tvPQParameterIndex_t pqParamIndex, int level ); /* Every bootup this function is called to sync TR181 to TVSettings HAL for saving the value */ tvError_t syncAvoutputTVParamsToHAL(std::string pqmode, std::string source, std::string format); /* Every Bootup this function is called to sync TR181 to TVSettings HAL for saving the picture mode assiocation to source */ int syncAvoutputTVPQModeParamsToHAL(std::string pqmode, std::string source, std::string format); + void syncCMSParams( ); + void syncWBParams( ); - uint32_t generateStorageIdentifier(std::string &key, std::string forParam,int contentFormat, int pqmode, int source); + uint32_t generateStorageIdentifier(std::string &key, std::string forParam,paramIndex_t info); + uint32_t generateStorageIdentifierCMS(std::string &key, std::string forParam, paramIndex_t info); + uint32_t generateStorageIdentifierWB(std::string &key, std::string forParam, paramIndex_t info); uint32_t generateStorageIdentifierDirty(std::string &key, std::string forParam,uint32_t contentFormat, int pqmode); std::string getErrorString (tvError_t eReturn); /* Get function to query TR181 entries or pq capability.ini file*/ - int getSaveConfig(std::string pqmode, std::string source, std::string format,std::vector &sources,std::vector &picturemodes, std::vector &formats); - int getLocalparam(std::string forParam,int formatIndex,int pqIndex,int sourceIndex,int &value, - tvPQParameterIndex_t pqParamIndex ,bool cms=false,int tunnel_type=0); + int getSaveConfig(std::string param, capDetails_t capInfo, valueVectors_t &values); + int getLocalparam( std::string forParam,paramIndex_t indexInfo,int & value,tvPQParameterIndex_t pqParamIndex,bool sync=false); + tvDataComponentColor_t getComponentColorEnum(std::string colorName); - int getDolbyParams(tvContentFormatType_t format, std::string &s, std::string source = ""); - tvError_t getParamsCaps(std::vector &range, std::vector &pqmode, std::vector &source, std::vector &format,std::string param ); - tvError_t getParamsCaps(std::vector &range, std::vector &pqmode, std::vector &source, - std::vector &format,std::string param , std::string & isPlatformSupport, - std::vector & index); + tvError_t getParamsCaps(std::string param, capVectors_t &vecInfo); int GetPanelID(char *panelid); - int ConvertHDRFormatToContentFormat(tvhdr_type_t hdrFormat); - int ReadCapablitiesFromConf(std::string &rangeInfo,std::string &pqmodeInfo,std::string &formatInfo,std::string &sourceInfo,std::string param, std::string & isPlatformSupport, std::string & indexInfo); + int ReadCapablitiesFromConf(std::string param, capDetails_t& info); + void getDimmingModeStringFromEnum(int value, std::string &toStore); void getColorTempStringFromEnum(int value, std::string &toStore); + void getDisplayModeStringFromEnum(int value, std::string &toStore); + void getBacklightModeStringFromEnum(int value, std::string &toStore); + int getCurrentPictureMode(char *picMode); int getDolbyParamToSync(int sourceIndex, int formatIndex, int& value); tvDolbyMode_t GetDolbyVisionEnumFromModeString(const char* modeString); std::string getDolbyModeStringFromEnum( tvDolbyMode_t mode); JsonArray getSupportedVideoSource(void); int getAvailableCapabilityModesWrapper(std::string param, std::string & outparam); - int getAvailableCapabilityModes(std::string & source, std::string & pqmode, std::string & format); + int getAvailableCapabilityModes( capDetails_t& info ); int getCapabilitySource(JsonArray &rangeArray); int getRangeCapability(std::string param, std::vector & rangeInfo); void getDynamicAutoLatencyConfig(); tvError_t getUserSelectedAspectRatio (tvDisplayMode_t* mode); + std::string getColorTemperatureStringFromEnum(tvColorTemp_t value); + std::string getCMSColorStringFromEnum(tvDataComponentColor_t value); + std::string getCMSComponentStringFromEnum(tvComponentType_t value); + std::string getWBControlStringFromEnum(tvWBControl_t value); + int getCMSColorEnumFromString(std::string color,tvDataComponentColor_t &value); + int getCMSComponentEnumFromString(std::string component, tvComponentType_t& value); + std::string getWBColorStringFromEnum(tvWBColor_t value); + int getWBColorEnumFromString(std::string color,tvWBColor_t& value); + int getWBControlEnumFromString(std::string color,tvWBControl_t& value); + int getColorTempEnumFromString(std::string color, tvColorTemp_t& value); + + bool checkCMSColorAndComponentCapability(const std::string capValue, const std::string inputValue); + int convertCMSParamToPQEnum(const std::string component, const std::string color,tvPQParameterIndex_t& value); + int convertWBParamToPQEnum(const std::string control, const std::string color,tvPQParameterIndex_t& value); + int convertWBParamToRGBEnum(const std::string color,const std::string control,tvRGBType_t &value); void broadcastLowLatencyModeChangeEvent(bool lowLatencyMode); tvError_t setAspectRatioZoomSettings(tvDisplayMode_t mode); tvError_t setDefaultAspectRatio(std::string pqmode="none",std::string format="none",std::string source="none"); + template + static int getEnumFromString(const std::map& reverseMap, const std::string& key, T defaultVal) { + auto it = reverseMap.find(key); + return (it != reverseMap.end()) ? it->second : defaultVal; + } + + static const std::map pqModeMap; + static const std::map videoFormatMap; + static const std::map videoSrcMap; + static const std::unordered_map backlightModeMap; + + static std::unordered_map pqModeReverseMap; + static std::unordered_map videoFormatReverseMap; + static std::unordered_map videoSrcReverseMap; + static bool reverseMapsInitialized; + static void initializeReverseMaps(); + static const std::unordered_map backlightModeReverseMap; + + tvError_t ReadJsonFile(JsonObject& root); + tvError_t ExtractContextCaps(const JsonObject& data, tvContextCaps_t** context_caps); + tvError_t ExtractRangeInfo(const JsonObject& data, int* max_value); + std::vector ParseContextCaps(const JsonObject& context); + tvContextCaps_t* AllocateContextCaps(const std::vector& contexts); + tvError_t GetCaps(const std::string& key, int* max_value, tvContextCaps_t** context_caps); + + tvError_t GetDVCalibrationCaps(tvDVCalibrationSettings_t **min_values, tvDVCalibrationSettings_t **max_values, tvContextCaps_t **context_caps); + tvError_t GetBacklightModeCaps(tvBacklightMode_t** backlight_mode, size_t* num_backlight_mode, tvContextCaps_t** context_caps); + tvError_t GetLocalContrastEnhancementCaps(int* maxLocalContrastEnhancement, tvContextCaps_t** context_caps); + tvError_t GetMPEGNoiseReductionCaps(int* maxMPEGNoiseReduction, tvContextCaps_t** context_caps); + tvError_t GetDigitalNoiseReductionCaps(int* maxDigitalNoiseReduction, tvContextCaps_t** context_caps); + tvError_t GetMultiPointWBCaps(int* num_hal_matrix_points, + int* rgb_min, + int* rgb_max, + int* num_ui_matrix_points, + double** ui_matrix_positions, + tvContextCaps_t** context_caps); + tvError_t GetCMSCaps(int* max_hue, + int* max_saturation, + int* max_luma, + tvDataComponentColor_t** color, + tvComponentType_t** component, + size_t* num_color, + size_t* num_component, + tvContextCaps_t** context_caps); + tvError_t GetCustom2PointWhiteBalanceCaps( int *min_gain, + int *min_offset, int *max_gain, + int *max_offset, tvWBColor_t **color, + tvWBControl_t **control, size_t* num_color, + size_t* num_control, tvContextCaps_t ** context_caps); +#define HAL_NOT_READY 0 +#if HAL_NOT_READY +#define CAPABLITY_FILE_NAMEV2 "/opt/panel/pq_capabilities.json" + tvError_t GetBacklightCaps(int *max_backlight, tvContextCaps_t **context_caps); + tvError_t GetBrightnessCaps(int *max_brightness, tvContextCaps_t **context_caps); + tvError_t GetContrastCaps(int* max_contrast, tvContextCaps_t** context_caps); + tvError_t GetSharpnessCaps(int *max_sharpness, tvContextCaps_t **context_caps); + tvError_t GetSaturationCaps(int* max_saturation, tvContextCaps_t** context_caps); + tvError_t GetHueCaps(int* max_hue, tvContextCaps_t** context_caps); + tvError_t GetPrecisionDetailCaps(int* max_precision, tvContextCaps_t** context_caps); + tvError_t GetLowLatencyStateCaps(int* max_latency, tvContextCaps_t ** context_caps); + tvError_t GetColorTemperatureCaps(tvColorTemp_t** color_temp, size_t* num_color_temp, tvContextCaps_t** context_caps); + tvError_t GetSdrGammaCaps(tvSdrGamma_t** sdr_gamma, size_t* num_sdr_gamma, tvContextCaps_t** context_caps); + tvError_t GetTVDimmingModeCaps(tvDimmingMode_t** dimming_mode, size_t* num_dimming_mode, tvContextCaps_t** context_caps); + tvError_t GetAspectRatioCaps(tvDisplayMode_t** aspect_ratio, size_t* num_aspect_ratio, tvContextCaps_t** context_caps); + tvError_t GetTVPictureModeCaps(tvPQModeIndex_t** mode, size_t* num_pic_modes, tvContextCaps_t** context_caps); + tvError_t GetAISuperResolutionCaps(int* maxAISuperResolution, tvContextCaps_t** context_caps); + tvError_t GetMEMCCaps(int* maxMEMC, tvContextCaps_t** context_caps); +#else +#define CAPABLITY_FILE_NAMEV2 "/etc/pq_capabilities.json" +#endif + uint32_t getPQCapabilityWithContext( + const std::function& getCapsFunc, + const JsonObject& parameters, + JsonObject& response); + JsonObject parseContextCaps(tvContextCaps_t* context_caps); + // Helper functions to extract modes/sources/formats from parameters + std::vector extractPQModes(const JsonObject& parameters); + std::vector extractVideoSources(const JsonObject& parameters); + std::vector extractVideoFormats(const JsonObject& parameters); + static bool isGlobalParam(const JsonArray& arr); + JsonArray getJsonArrayIfArray(const JsonObject& obj, const std::string& key); + int updateAVoutputTVParamV2(std::string action, std::string tr181ParamName, + const JsonObject& parameters, tvPQParameterIndex_t pqParamIndex, int level); + std::vector getValidContextsFromParameters(const JsonObject& parameters,const std::string& tr181ParamName ); + typedef tvError_t (*tvSetFunction)(int); + bool resetPQParamToDefault(const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqIndex, + tvSetFunction halSetter); + typedef tvError_t (*tvSetFunctionV2)(tvVideoSrcType_t, tvPQModeIndex_t,tvVideoFormatType_t,int); + bool resetPQParamToDefault(const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqIndex, + tvSetFunctionV2 halSetter); + bool resetEnumPQParamToDefault(const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqIndex, + const std::unordered_map& valueMap, + std::function&)> halSetter); + tvConfigContext_t getValidContextFromGetParameters(const JsonObject& parameters, const std::string& paramName); + bool getPQParamFromContext(const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t paramType, + int& outValue); + bool getEnumPQParamString( + const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqType, + const std::unordered_map& enumToStrMap, + std::string& outStr); + bool setIntPQParam(const JsonObject& parameters, const std::string& paramName, + tvPQParameterIndex_t pqType, tvSetFunction halSetter, int maxCap); + bool setEnumPQParam(const JsonObject& parameters, + const std::string& inputKey, + const std::string& paramName, + const std::unordered_map& valueMap, + tvPQParameterIndex_t paramType, + std::function halSetter); + uint32_t setContextPQParam(const JsonObject& parameters, JsonObject& response, + const std::string& inputParamName, + const std::string& tr181ParamName, + int maxAllowedValue, + tvPQParameterIndex_t pqParamType, + std::function halSetter); + bool setPictureModeV2(const JsonObject& parameters); + bool getPictureModeV2(const JsonObject& parameters, std::string& outMode); + std::string getCurrentPictureModeAsString(); + std::string getCurrentVideoFormatAsString(); + std::string getCurrentVideoSourceAsString(); + bool isSetRequiredForParam(const JsonObject& parameters, const std::string& paramName); + tvContextCaps_t* getCapsForParam(const std::string& paramName); + bool isValidSource(const std::vector& sourceArray, tvVideoSrcType_t sourceIndex); + bool isValidFormat(const std::vector& formatArray, tvVideoFormatType_t formatIndex); + tvError_t updateAVoutputTVParamToHALV2(std::string forParam, paramIndex_t indexInfo, int value, bool setNotDelete); + bool resetPictureModeV2(const JsonObject& parameters); + int syncAvoutputTVPQModeParamsToHALV2(std::string pqmode, std::string source, std::string format); + std::string getCMSNameFromEnum(tvDataComponentColor_t colorEnum); + void syncCMSParamsV2(); + public: int m_currentHdmiInResoluton; @@ -290,6 +567,125 @@ class AVOutputTV : public AVOutputBase { char rfc_caller_id[RFC_BUFF_MAX]; bool appUsesGlobalBackLightFactor; int pic_mode_index[PIC_MODES_SUPPORTED_MAX]; + + + int m_maxBacklight = 0; + tvContextCaps_t* m_backlightCaps = nullptr; + tvError_t m_backlightStatus = tvERROR_NONE; + + int m_maxBrightness = 0; + tvContextCaps_t* m_brightnessCaps = nullptr; + tvError_t m_brightnessStatus = tvERROR_NONE; + + int m_maxContrast = 0; + tvContextCaps_t* m_contrastCaps = nullptr; + tvError_t m_contrastStatus = tvERROR_NONE; + + int m_maxSharpness = 0; + tvContextCaps_t* m_sharpnessCaps = nullptr; + tvError_t m_sharpnessStatus = tvERROR_NONE; + + int m_maxSaturation = 0; + tvContextCaps_t* m_saturationCaps = nullptr; + tvError_t m_saturationStatus = tvERROR_NONE; + + int m_maxHue = 0; + tvContextCaps_t* m_hueCaps = nullptr; + tvError_t m_hueStatus = tvERROR_NONE; + + int m_maxlowLatencyState = 0; + tvContextCaps_t* m_lowLatencyStateCaps = nullptr; + tvError_t m_lowLatencyStateStatus = tvERROR_NONE; + + int m_maxPrecisionDetail = 0; + tvContextCaps_t* m_precisionDetailCaps = nullptr; + tvError_t m_precisionDetailStatus = tvERROR_NONE; + + int m_maxLocalContrastEnhancement = 0; + tvContextCaps_t* m_localContrastEnhancementCaps = nullptr; + tvError_t m_localContrastEnhancementStatus = tvERROR_NONE; + + int m_maxMPEGNoiseReduction = 0; + tvContextCaps_t* m_MPEGNoiseReductionCaps = nullptr; + tvError_t m_MPEGNoiseReductionStatus = tvERROR_NONE; + + int m_maxDigitalNoiseReduction = 0; + tvContextCaps_t* m_digitalNoiseReductionCaps = nullptr; + tvError_t m_digitalNoiseReductionStatus = tvERROR_NONE; + + int m_maxAISuperResolution = 0; + tvContextCaps_t* m_AISuperResolutionCaps = nullptr; + tvError_t m_AISuperResolutionStatus = tvERROR_NONE; + + int m_maxMEMC = 0; + tvContextCaps_t* m_MEMCCaps = nullptr; + tvError_t m_MEMCStatus = tvERROR_NONE; + + tvColorTemp_t* m_colortemp = nullptr; + size_t m_numColortemp = 0; + tvContextCaps_t* m_colortempCaps = nullptr; + tvError_t m_colorTempStatus = tvERROR_NONE; + + tvDisplayMode_t* m_aspectRatio = nullptr; + size_t m_numAspectRatio = 0; + tvContextCaps_t* m_aspectRatioCaps = nullptr; + tvError_t m_aspectRatioStatus = tvERROR_NONE; + + tvDimmingMode_t* m_dimmingModes = nullptr; + size_t m_numdimmingModes = 0; + tvContextCaps_t* m_dimmingModeCaps = nullptr; + tvError_t m_dimmingModeStatus = tvERROR_NONE; + + tvPQModeIndex_t* m_pictureModes = nullptr; + size_t m_numPictureModes = 0; + tvContextCaps_t* m_pictureModeCaps = nullptr; + tvError_t m_pictureModeStatus = tvERROR_NONE; + + tvBacklightMode_t* m_backlightModes = nullptr; + size_t m_numBacklightModes = 0; + tvContextCaps_t* m_backlightModeCaps = nullptr; + tvError_t m_backlightModeStatus = tvERROR_NONE; + + tvSdrGamma_t* m_sdrGammaModes = nullptr; + size_t m_numsdrGammaModes = 0; + tvContextCaps_t* m_sdrGammaModeCaps = nullptr; + tvError_t m_sdrGammaModeStatus = tvERROR_NONE; + + int m_numHalMatrixPoints = 0; + int m_rgbMin = 0; + int m_rgbMax = 0; + int m_numUiMatrixPoints = 0; + double* m_uiMatrixPositions = nullptr; + tvContextCaps_t* m_multiPointWBCaps = nullptr; + tvError_t m_multiPointWBStatus = tvERROR_NONE; + + tvDVCalibrationSettings_t* m_minValues; + tvDVCalibrationSettings_t* m_maxValues; + tvContextCaps_t* m_DVCalibrationCaps = nullptr; + tvError_t m_DVCalibrationStatus = tvERROR_NONE; + + int m_maxCmsHue = 0; + int m_maxCmsSaturation = 0; + int m_maxCmsLuma = 0; + size_t m_numColor = 0; + size_t m_numComponent = 0; + tvDataComponentColor_t* m_cmsColorArr; + tvComponentType_t* m_cmsComponentArr; + std::vector m_cmsColorList; + std::vector m_cmsComponentList; + std::unordered_map m_cmsIndexMap; + tvContextCaps_t* m_cmsCaps = nullptr; + tvError_t m_cmsStatus = tvERROR_NONE; + + bool setCMSParam(const JsonObject& parameters); + + std::string convertPictureIndexToStringV2(int pqmode); + std::string convertVideoFormatToStringV2(int format); + std::string convertSourceIndexToStringV2(int source); + + uint32_t generateStorageIdentifierV2(std::string &key, std::string forParam, paramIndex_t info); + void generateStorageIdentifierCMSV2(std::string &key, std::string forParam, paramIndex_t info); + void generateStorageIdentifierWBV2(std::string &key, std::string forParam, paramIndex_t info); AVOutputTV(); ~AVOutputTV(); @@ -301,7 +697,6 @@ class AVOutputTV : public AVOutputBase { void NotifyFilmMakerModeChange(tvContentType_t mode); void NotifyVideoResolutionChange(tvResolutionParam_t resolution); void NotifyVideoFrameRateChange(tvVideoFrameRate_t frameRate); - //override API static void dsHdmiVideoModeEventHandler(const char *owner, IARM_EventId_t eventId, void *data, size_t len); static void dsHdmiStatusEventHandler(const char *owner, IARM_EventId_t eventId, void *data, size_t len); @@ -313,6 +708,7 @@ class AVOutputTV : public AVOutputBase { void DeinitializeIARM(); }; + }//namespace Plugin }//namespace WPEFramework #endif diff --git a/AVOutput/AVOutputTVHelper.cpp b/AVOutput/AVOutputTVHelper.cpp index d5536fbf..22bbdd31 100644 --- a/AVOutput/AVOutputTVHelper.cpp +++ b/AVOutput/AVOutputTVHelper.cpp @@ -22,6 +22,8 @@ #include "UtilsIarm.h" #include "rfcapi.h" +#define CAPABLITY_FILE_NAME "pq_capabilities.ini" + static std::map supportedSourcemap; static std::map supportedPictureModemap; static std::map supportedFormatmap; @@ -30,36 +32,6 @@ static bool m_isDalsEnabled = false; namespace WPEFramework { namespace Plugin { - tvContentFormatType_t AVOutputTV::getContentFormatIndex(tvVideoHDRFormat_t formatToConvert) - { - /* default to SDR always*/ - tvContentFormatType_t ret = tvContentFormatType_NONE; - switch(formatToConvert) { - case tvVideoHDRFormat_HLG: - ret = tvContentFormatType_HLG; - break; - - case tvVideoHDRFormat_HDR10: - ret = tvContentFormatType_HDR10; - break; - - case tvVideoHDRFormat_HDR10PLUS: - ret = tvContentFormatType_HDR10PLUS; - break; - - case tvVideoHDRFormat_DV: - ret = tvContentFormatType_DOVI; - break; - - case tvVideoHDRFormat_SDR: - case tvVideoHDRFormat_NONE: - default: - ret = tvContentFormatType_SDR; - break; - } - return ret; - } - int AVOutputTV::getPictureModeIndex(std::string pqparam) { int index = -1; @@ -104,19 +76,14 @@ namespace Plugin { int AVOutputTV::getPqParamIndex() { - std::vector localpq; - std::vector localformat; - std::vector localsource; - std::vector localrange; - std::string platformsupport; - std::vector index; + + capVectors_t info; - tvError_t ret = getParamsCaps(localrange, localpq, localformat, localsource, - "VideoSource", platformsupport, index); + tvError_t ret = getParamsCaps("VideoSource", info); if (ret == tvERROR_NONE) { - if (localrange.size() == index.size()) { - for (unsigned int i = 0; i< localrange.size(); i++) { - supportedSourcemap[localrange[i]] = stoi(index[i]); + if (info.rangeVector.size() == info.indexVector.size()) { + for (unsigned int i = 0; i< info.rangeVector.size(); i++) { + supportedSourcemap[info.rangeVector[i]] = stoi(info.indexVector[i]); } } } @@ -124,28 +91,18 @@ namespace Plugin { LOGERR("%s: Failed to fetch the source index \n", __FUNCTION__); return -1; } - if (!localpq.empty()) { - localpq.clear(); - } - if (!localformat.empty()) { - localformat.clear(); - } - if (!localsource.empty()) { - localsource.clear(); - } - if (!localrange.empty()) { - localrange.clear(); - } - if(!index.empty()) { - index.clear(); - } - - ret = getParamsCaps(localrange, localpq, localformat, localsource, - "PictureMode", platformsupport, index); + + info.pqmodeVector.clear(); + info.sourceVector.clear(); + info.formatVector.clear(); + info.indexVector.clear(); + info.rangeVector.clear(); + + ret = getParamsCaps("PictureMode", info); if (ret == tvERROR_NONE) { - if (localrange.size() == index.size()) { - for (unsigned int i = 0; i< localrange.size(); i++) { - supportedPictureModemap[localrange[i]] = stoi(index[i]); + if (info.rangeVector.size() == info.indexVector.size()) { + for (unsigned int i = 0; i< info.rangeVector.size(); i++) { + supportedPictureModemap[info.rangeVector[i]] = stoi(info.indexVector[i]); } } } @@ -153,28 +110,18 @@ namespace Plugin { LOGERR("%s: Failed to fetch the picture index \n", __FUNCTION__); return -1; } - if (!localpq.empty()) { - localpq.clear(); - } - if (!localformat.empty()) { - localformat.clear(); - } - if (!localsource.empty()) { - localsource.clear(); - } - if (!localrange.empty()) { - localrange.clear(); - } - if(!index.empty()) { - index.clear(); - } - - ret = getParamsCaps(localrange, localpq, localformat, localsource, - "VideoFormat", platformsupport, index); + + info.pqmodeVector.clear(); + info.sourceVector.clear(); + info.formatVector.clear(); + info.indexVector.clear(); + info.rangeVector.clear(); + + ret = getParamsCaps( "VideoFormat", info); if (ret == tvERROR_NONE) { - if (localrange.size() == index.size()) { - for (unsigned int i = 0; i< localrange.size(); i++) { - supportedFormatmap[localrange[i]] = stoi(index[i]); + if ( info.rangeVector.size() == info.indexVector.size()) { + for (unsigned int i = 0; i< info.rangeVector.size(); i++) { + supportedFormatmap[info.rangeVector[i]] = stoi(info.indexVector[i]); } } } @@ -186,50 +133,101 @@ namespace Plugin { return 0; } - int AVOutputTV::getParamIndex(string source,string pqmode,string format,int& sourceIndex,int& pqmodeIndex,int& formatIndex) + int AVOutputTV::getParamIndex(std::string param, capDetails_t& paramInfo, paramIndex_t& indexInfo) { - LOGINFO("Entry : %s pqmode : %s source :%s format :%s\n",__FUNCTION__,pqmode.c_str(),source.c_str(),format.c_str()); + LOGINFO("Entry : %s param : %s pqmode : %s source :%s format :%s\n",__FUNCTION__,param.c_str(),paramInfo.pqmode.c_str(),paramInfo.source.c_str(),paramInfo.format.c_str()); - if( source.compare("none") == 0 || source.compare("Current") == 0 ) { + if( paramInfo.source.compare("none") == 0 || paramInfo.source.compare("Current") == 0 ) { tvVideoSrcType_t currentSource = VIDEO_SOURCE_IP; GetCurrentVideoSource(¤tSource); - sourceIndex = (int)currentSource; + indexInfo.sourceIndex = (int)currentSource; } else { - sourceIndex = getSourceIndex(source); + indexInfo.sourceIndex = getSourceIndex(paramInfo.source); } - if( pqmode.compare("none") == 0 || pqmode.compare("Current") == 0) { + if( paramInfo.pqmode.compare("none") == 0 || paramInfo.pqmode.compare("Current") == 0) { char picMode[PIC_MODE_NAME_MAX]={0}; if(!getCurrentPictureMode(picMode)) { LOGERR("Failed to get the Current picture mode\n"); } else { std::string local = picMode; - pqmodeIndex = getPictureModeIndex(local); + indexInfo.pqmodeIndex = getPictureModeIndex(local); } } else { - pqmodeIndex = getPictureModeIndex(pqmode); + indexInfo.pqmodeIndex = getPictureModeIndex(paramInfo.pqmode); } - if( format.compare("none") == 0 || format.compare("Current") == 0) { + if( paramInfo.format.compare("none") == 0 || paramInfo.format.compare("Current") == 0) { tvVideoFormatType_t currentFormat = VIDEO_FORMAT_NONE; GetCurrentVideoFormat(¤tFormat); if( VIDEO_FORMAT_NONE == currentFormat ) { - formatIndex = VIDEO_FORMAT_SDR; + indexInfo.formatIndex = VIDEO_FORMAT_SDR; } else { - formatIndex = (int)currentFormat; + indexInfo.formatIndex = (int)currentFormat; } } else { - formatIndex = getFormatIndex(format); + indexInfo.formatIndex = getFormatIndex(paramInfo.format); } - if (sourceIndex == -1 || pqmodeIndex == -1 || formatIndex == -1) { - return -1; - } - LOGINFO("%s: Exit sourceIndex = %d pqmodeIndex = %d formatIndex = %d\n",__FUNCTION__,sourceIndex,pqmodeIndex,formatIndex); + if(param == "CMS") + { + tvDataComponentColor_t level = tvDataColor_NONE; + if ( getCMSColorEnumFromString(paramInfo.color,level ) == -1 ) { + LOGERR("%s : GetColorEnumFromString Failed!!! ",__FUNCTION__); + return -1; + } + + indexInfo.colorIndex = level; + + tvComponentType_t componentLevel; + if ( getCMSComponentEnumFromString(paramInfo.component,componentLevel ) == -1 ) { + LOGERR("%s : GetComponentEnumFromString Failed!!! ",__FUNCTION__); + return -1; + } + + indexInfo.componentIndex = componentLevel; + + LOGINFO("%s colorIndex : %d , componentIndex : %d\n",__FUNCTION__,indexInfo.colorIndex, indexInfo.componentIndex); + } + + if(param == "WhiteBalance") + { + tvWBColor_t level; + if ( getWBColorEnumFromString(paramInfo.color,level ) == -1 ) { + LOGERR("%s : GetColorEnumFromString Failed!!! ",__FUNCTION__); + return -1; + } + + indexInfo.colorIndex = level; + + tvWBControl_t controlLevel; + if ( getWBControlEnumFromString(paramInfo.control,controlLevel ) == -1 ) { + LOGERR("%s : GetComponentEnumFromString Failed!!! ",__FUNCTION__); + return -1; + } + + indexInfo.controlIndex = controlLevel; + + /*tvColorTemp_t colorTemp; + if ( getColorTempEnumFromString(paramInfo.colorTemperature,colorTemp ) == -1 ) { + LOGERR("%s : GetComponentEnumFromString Failed!!! ",__FUNCTION__); + return -1; + } + + indexInfo.colorTempIndex = colorTemp; */ + + LOGINFO("%s colorIndex : %d , controlIndex : %d \n",__FUNCTION__,indexInfo.colorIndex, indexInfo.controlIndex); + + } + + if (indexInfo.sourceIndex == -1 || indexInfo.pqmodeIndex == -1 || indexInfo.formatIndex == -1) { + return -1; + } + LOGINFO("%s: Exit sourceIndex = %d pqmodeIndex = %d formatIndex = %d\n",__FUNCTION__,indexInfo.sourceIndex,indexInfo.pqmodeIndex,indexInfo.formatIndex); return 0; } @@ -237,30 +235,63 @@ namespace Plugin { int AVOutputTV::getDolbyModeIndex(const char * dolbyMode) { int mode = 0; - tvDolbyMode_t dolbyModes[tvMode_Max]; - tvDolbyMode_t *dolbyModesPtr = dolbyModes; // Pointer to statically allocated tvDolbyMode_t array + tvDolbyMode_t dolbyModes[tvMode_Max] = { tvDolbyMode_Invalid }; + tvDolbyMode_t *dolbyModesPtr[tvMode_Max] = { 0 }; unsigned short totalAvailable = 0; + for (int i = 0; i < tvMode_Max; i++) + { + dolbyModesPtr[i] = &dolbyModes[i]; + } + // Set an initial value to indicate the mode type dolbyModes[0] = tvDolbyMode_Dark; - tvError_t ret = GetTVSupportedDolbyVisionModes(&dolbyModesPtr, &totalAvailable); + tvError_t ret = GetTVSupportedDolbyVisionModes(dolbyModesPtr, &totalAvailable); if (ret == tvERROR_NONE) { for (int count = 0; count < totalAvailable; count++) { if(strncasecmp(dolbyMode, getDolbyModeStringFromEnum(dolbyModes[count]).c_str(), strlen(dolbyMode))==0) { mode = dolbyModes[count]; break; } - } } else { mode = -1; printf("(%s):get supported mode is failed\n", __func__); } - return mode; } + int AVOutputTV::getHDRModeIndex(const std::string HDRMode, const std::string format,tvDolbyMode_t &value) { + // Create a map to associate format-mode pairs with enum values + int ret = 0; + static const std::unordered_map hdrModeIndexMap = { + {"DVDark", tvDolbyMode_Dark}, + {"DVBright", tvDolbyMode_Bright}, + {"DVGame", tvDolbyMode_Game}, + {"HDR10Dark", tvHDR10Mode_Dark}, + {"HDR10Bright", tvHDR10Mode_Bright}, + {"HDR10Game", tvHDR10Mode_Game}, + {"HLGDark", tvHLGMode_Dark}, + {"HLGBright", tvHLGMode_Bright}, + {"HLGGame", tvHLGMode_Game} + }; + + // Create the key by concatenating the format and HDRMode + std::string key = format+HDRMode; + + // Look up the key in the map + auto it = hdrModeIndexMap.find(key); + if (it != hdrModeIndexMap.end()) { + value = it->second; + ret = 0; + } else { + LOGERR("%s : Invalid format/mode\n",__FUNCTION__); + ret = -1; + } + return ret; + } + tvDimmingMode_t AVOutputTV::getDimmingModeIndex(std::string mode) { tvDimmingMode_t index = tvDimmingMode_MAX; @@ -337,21 +368,16 @@ namespace Plugin { int AVOutputTV::isPlatformSupport(std::string pqparam) { - std::vector range; - std::vector sourceVec; - std::vector pqmodeVec; - std::vector formatVec; - std::string isPlatformSupport; - std::vector index; + capVectors_t vectorInfo; - tvError_t ret = getParamsCaps(range, pqmodeVec, sourceVec, formatVec, pqparam, isPlatformSupport, index); + tvError_t ret = getParamsCaps(pqparam,vectorInfo); if (ret != tvERROR_NONE) { LOGINFO("%s: failed to get the capability \n", __FUNCTION__); return -1; } else { - if(isPlatformSupport.compare("true") != 0) { + if(vectorInfo.isPlatformSupportVector[0].compare("true") != 0) { LOGERR("%s: platform support not available\n", __FUNCTION__); return -1; } @@ -359,51 +385,37 @@ namespace Plugin { return 0; } - void AVOutputTV::spliltCapablities( std::vector &range,std::vector &pqmode,std::vector &format, - std::vector &source, std::vector &index, std::string rangeInfo, - std::string pqmodeInfo, std::string formatInfo, std::string sourceInfo, std::string indexInfo) + void AVOutputTV::spliltCapablities( capVectors_t& vectorInfo, capDetails_t stringInfo) { - std::string token; - std::stringstream rangeStream(rangeInfo); - std::stringstream pqmodeStream(pqmodeInfo); - std::stringstream formatStream(formatInfo); - std::stringstream sourceStream(sourceInfo); - std::stringstream indexStream(indexInfo); - - while( getline(rangeStream,token,',')) { - range.push_back(token ); - token.clear(); - } - - while( getline(pqmodeStream,token,',') ) { - pqmode.push_back(token ); - token.clear(); - } - - while( getline(formatStream,token,',')) { - format.push_back( token ); - token.clear(); - } - while( getline(sourceStream,token,',') ) { - source.push_back( token ); - token.clear(); - } - - while( getline(indexStream,token,',') ) { - index.push_back( token ); - token.clear(); + std::vector&>> streamVector; + + // Initializing the streamVector with stringstreams and corresponding vectors + streamVector.push_back({std::stringstream(stringInfo.range), vectorInfo.rangeVector}); + streamVector.push_back({std::stringstream(stringInfo.pqmode), vectorInfo.pqmodeVector}); + streamVector.push_back({std::stringstream(stringInfo.format), vectorInfo.formatVector}); + streamVector.push_back({std::stringstream(stringInfo.source), vectorInfo.sourceVector}); + streamVector.push_back({std::stringstream(stringInfo.isPlatformSupport), vectorInfo.isPlatformSupportVector}); + streamVector.push_back({std::stringstream(stringInfo.index), vectorInfo.indexVector}); + streamVector.push_back({std::stringstream(stringInfo.color), vectorInfo.colorVector}); + streamVector.push_back({std::stringstream(stringInfo.component), vectorInfo.componentVector}); + streamVector.push_back({std::stringstream(stringInfo.colorTemperature), vectorInfo.colorTempVector}); + streamVector.push_back({std::stringstream(stringInfo.control), vectorInfo.controlVector}); + + for (auto& pair : streamVector) { + std::stringstream& ss = pair.first; + std::vector& vec = pair.second; + + std::string token; + while (getline(ss, token, ',')) { + vec.push_back(token); + } } } - - bool AVOutputTV::isCapablityCheckPassed( std::string pqmodeInputInfo,std::string sourceInputInfo,std::string formatInputInfo,std::string param ) + + bool AVOutputTV::isCapablityCheckPassed( std::string param, capDetails_t inputInfo ) { - std::string rangeCapInfo; - std::string sourceCapInfo; - std::string formatCapInfo; - std::string pqmodeCapInfo; - std::string isPlatformSupport; - std::string indexInfo; + capDetails_t paramInfo; std::set pqmodeCapSet; std::set formatCapSet; @@ -412,17 +424,38 @@ namespace Plugin { std::set formatInputSet; std::set sourceInputSet; - if( ReadCapablitiesFromConf( rangeCapInfo, pqmodeCapInfo, formatCapInfo, sourceCapInfo,param, isPlatformSupport, indexInfo) ) { + + if( ReadCapablitiesFromConf( param, paramInfo ) != 0 ) { LOGINFO( "%s: readCapablitiesFromConf Failed !!!\n",__FUNCTION__); return false; } + + if( param == "CMS") + { + // Check color + if (! checkCMSColorAndComponentCapability(paramInfo.color, inputInfo.color)) { + LOGINFO( "%s:CMS color Capablity Failed CapColor : %s inputColor : %s!!!\n",__FUNCTION__,paramInfo.color.c_str(), inputInfo.color.c_str()); + return false; + } + + // Check component + if (! checkCMSColorAndComponentCapability(paramInfo.component, inputInfo.component)) { + LOGINFO( "%s:CMS component Capablity capComponent : %s inputComponent : %s Failed!!!.\n",__FUNCTION__,paramInfo.component.c_str(), inputInfo.component.c_str()); + return false; + } + } + else if( param == "WhiteBalance") + { + if ( ( paramInfo.color.find(inputInfo.color) == std::string::npos ) || ( paramInfo.control.find(inputInfo.control) == std::string::npos) ) + return false; + } //Compare capablityInfo with Input params //1.convertCapablity Info to set for comparison - spliltStringsAndConvertToSet( pqmodeCapInfo, formatCapInfo, sourceCapInfo, pqmodeCapSet, formatCapSet, sourceCapset); + spliltStringsAndConvertToSet( paramInfo.pqmode, paramInfo.format, paramInfo.source, pqmodeCapSet, formatCapSet, sourceCapset); //2.convert Application Input Info to set for comparison - spliltStringsAndConvertToSet( pqmodeInputInfo, formatInputInfo, sourceInputInfo, pqmodeInputSet, formatInputSet, sourceInputSet ); + spliltStringsAndConvertToSet( inputInfo.pqmode, inputInfo.format, inputInfo.source, pqmodeInputSet, formatInputSet, sourceInputSet ); //3.Compare Each pqmode/format/source InputInfo against CapablityInfo if ( isIncluded(pqmodeCapSet,pqmodeInputSet) && isIncluded(formatCapSet,formatInputSet) && isIncluded(sourceCapset,sourceInputSet) ) { @@ -435,8 +468,7 @@ namespace Plugin { } } - int AVOutputTV::parsingSetInputArgument(const JsonObject& parameters, std::string pqparam, std::string & source, - std::string & pqmode, std::string & format) { + int AVOutputTV::parsingSetInputArgument(const JsonObject& parameters, std::string pqparam,capDetails_t& paramInfo) { JsonArray sourceArray; JsonArray pqmodeArray; @@ -445,39 +477,60 @@ namespace Plugin { pqmodeArray = parameters.HasLabel("pictureMode") ? parameters["pictureMode"].Array() : JsonArray(); for (int i = 0; i < pqmodeArray.Length(); ++i) { - pqmode += pqmodeArray[i].String(); + paramInfo.pqmode += pqmodeArray[i].String(); if (i != (pqmodeArray.Length() - 1) ) { - pqmode += ","; + paramInfo.pqmode += ","; } } sourceArray = parameters.HasLabel("videoSource") ? parameters["videoSource"].Array() : JsonArray(); for (int i = 0; i < sourceArray.Length(); ++i) { - source += sourceArray[i].String(); + paramInfo.source += sourceArray[i].String(); if (i != (sourceArray.Length() - 1) ) { - source += ","; - } + paramInfo.source += ","; + } } formatArray = parameters.HasLabel("videoFormat") ? parameters["videoFormat"].Array() : JsonArray(); for (int i = 0; i < formatArray.Length(); ++i) { - format += formatArray[i].String(); + paramInfo.format += formatArray[i].String(); if (i != (formatArray.Length() - 1) ) { - format += ","; + paramInfo.format += ","; } } - if (source.empty()) { - source = "Global"; - } - if (pqmode.empty()) { - pqmode = "Global"; - } - if (format.empty()) { - format = "Global"; - } + if (paramInfo.source.empty()) { + paramInfo.source = "Global"; + } + if (paramInfo.pqmode.empty()) { + paramInfo.pqmode = "Global"; + } + if (paramInfo.format.empty()) { + paramInfo.format = "Global"; + } + + if( pqparam.compare("WhiteBalance") == 0 ) + { + if ( paramInfo.color.empty() ) + paramInfo.color = "Global"; + + if ( paramInfo.control.empty() ) + paramInfo.control = "Global"; - if (convertToValidInputParameter(pqparam, source, pqmode, format) != 0) { + if ( paramInfo.colorTemperature.empty() ) + paramInfo.colorTemperature = "Global"; + } + + if( pqparam.compare("CMS") == 0 ) + { + if ( paramInfo.color.empty() ) + paramInfo.color = "Global"; + + if ( paramInfo.component.empty() ) + paramInfo.component = "Global"; + } + + if (convertToValidInputParameter(pqparam, paramInfo) != 0) { LOGERR("%s: Failed to convert the input paramters. \n", __FUNCTION__); return -1; } @@ -485,30 +538,30 @@ namespace Plugin { return 0; } - int AVOutputTV::parsingGetInputArgument(const JsonObject& parameters, std::string pqparam, - std::string & source, std::string & pqmode, std::string & format) { - pqmode = parameters.HasLabel("pictureMode") ? parameters["pictureMode"].String() : ""; + int AVOutputTV::parsingGetInputArgument(const JsonObject& parameters, std::string pqparam, capDetails_t& info) + { + info.pqmode = parameters.HasLabel("pictureMode") ? parameters["pictureMode"].String() : ""; - source = parameters.HasLabel("videoSource") ? parameters["videoSource"].String() : ""; + info.source = parameters.HasLabel("videoSource") ? parameters["videoSource"].String() : ""; - format = parameters.HasLabel("videoFormat") ? parameters["videoFormat"].String() : ""; + info.format = parameters.HasLabel("videoFormat") ? parameters["videoFormat"].String() : ""; - if ( (source.compare("Global") == 0) || (pqmode.compare("Global") == 0) || (format.compare("Global") == 0) ) { + if ( (info.source.compare("Global") == 0) || (info.pqmode.compare("Global") == 0) || (info.format.compare("Global") == 0) ) { LOGERR("%s: get cannot fetch the Global inputs \n", __FUNCTION__); return -1; } - if (source.empty()) { - source = "Current"; - } - if (pqmode.empty()) { - pqmode = "Current"; - } - if (format.empty()) { - format = "Current"; - } + if (info.source.empty()) { + info.source = "Current"; + } + if (info.pqmode.empty()) { + info.pqmode = "Current"; + } + if (info.format.empty()) { + info.format = "Current"; + } - if (convertToValidInputParameter(pqparam,source, pqmode, format) != 0) { + if (convertToValidInputParameter(pqparam,info) != 0) { LOGERR("%s: Failed to convert the input paramters. \n", __FUNCTION__); return -1; } @@ -541,13 +594,8 @@ namespace Plugin { int AVOutputTV::validateIntegerInputParameter(std::string param, int inputValue) { - - std::vector range; - std::vector pqmode; - std::vector source; - std::vector format; - - tvError_t ret = getParamsCaps(range, pqmode, source, format, param); + capVectors_t info; + tvError_t ret = getParamsCaps(param, info); if (ret != tvERROR_NONE) { LOGERR("Failed to fetch the range capability[%s] \n", param.c_str()); @@ -557,9 +605,9 @@ namespace Plugin { if ( (param == "Brightness") || (param == "Contrast") || (param == "Sharpness") || (param == "Saturation") || (param == "Hue") || (param == "WhiteBalance") || - (param == "ComponentSaturation") || (param == "Backlight") || - (param == "ComponentHue") || (param == "ComponentLuma") || (param == "LowLatencyState") ) { - if (inputValue < stoi(range[0]) || inputValue > stoi(range[1])) { + (param == "CMS") || (param == "Backlight") || + (param == "WhiteBalance") || (param == "LowLatencyState") ) { + if (inputValue < stoi(info.rangeVector[0]) || inputValue > std::stoi(info.rangeVector[1])) { LOGERR("wrong Input value[%d]", inputValue); return -1; } @@ -567,32 +615,45 @@ namespace Plugin { return 0; } - int AVOutputTV::fetchCapablities(string pqparam, string & source, string & pqmode, string & format) { + int AVOutputTV::fetchCapablities(string pqparam, capDetails_t& info) { - std::vector range; - std::vector sourceVec; - std::vector pqmodeVec; - std::vector formatVec; + capVectors_t vectorInfo; tvError_t ret = tvERROR_NONE; - ret = getParamsCaps(range, pqmodeVec, sourceVec, formatVec, pqparam); + ret = getParamsCaps(pqparam, vectorInfo); if (ret != tvERROR_NONE) { LOGINFO("%s: failed to get the capability \n", __FUNCTION__); return -1; } - if (sourceVec.size() != 0) { - source = convertToString(sourceVec); + if (vectorInfo.sourceVector.size() != 0) { + info.source = convertToString(vectorInfo.sourceVector); + } + + if (vectorInfo.pqmodeVector.size() != 0) { + info.pqmode = convertToString(vectorInfo.pqmodeVector); } - if (pqmodeVec.size() != 0) { - pqmode = convertToString(pqmodeVec); + if (vectorInfo.formatVector.size() != 0) { + info.format = convertToString(vectorInfo.formatVector); } - if (formatVec.size() != 0) { - format = convertToString(formatVec); + if (vectorInfo.colorVector.size() != 0) { + info.color = convertToString(vectorInfo.colorVector); + } + + if (vectorInfo.componentVector.size() != 0) { + info.component = convertToString(vectorInfo.componentVector); + } + + if (vectorInfo.controlVector.size() != 0) { + info.control = convertToString(vectorInfo.controlVector); + } + + if (vectorInfo.colorTempVector.size() != 0) { + info.colorTemperature = convertToString(vectorInfo.colorTempVector); } return 0; @@ -601,12 +662,9 @@ namespace Plugin { int AVOutputTV::validateInputParameter(std::string param, std::string inputValue) { - std::vector range; - std::vector pqmode; - std::vector source; - std::vector format; + capVectors_t info; - tvError_t ret = getParamsCaps(range, pqmode, source, format, param); + tvError_t ret = getParamsCaps( param, info); if (ret != tvERROR_NONE) { LOGERR("Failed to fetch the range capability[%s] \n", param.c_str()); @@ -614,12 +672,12 @@ namespace Plugin { } if ( (param == "ColorTemperature") || - (param == "DimmingMode") || (param == "AutoBacklightControl") || + (param == "DimmingMode") || (param == "AutoBacklightMode") || (param == "DolbyVisionMode") || (param == "HDR10Mode") || - (param == "HLGMode") || (param == "AspectRatio") || (param == "PictureMode") ) { - auto iter = find(range.begin(), range.end(), inputValue); + (param == "HLGMode") || (param == "AspectRatio") || (param == "PictureMode") ) { + auto iter = find(info.rangeVector.begin(), info.rangeVector.end(), inputValue); - if (iter == range.end()) { + if (iter == info.rangeVector.end()) { LOGERR("Not a valid input value[%s].\n", inputValue.c_str()); return -1; } @@ -679,7 +737,7 @@ namespace Plugin { GetCurrentVideoSource(¤t_source); tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); - tr181_param_name += "."+convertSourceIndexToString(current_source)+"."+"Format."+convertVideoFormatToString(current_format)+"."+"PictureModeString"; + tr181_param_name += "."+convertSourceIndexToStringV2(current_source)+"."+"Format."+convertVideoFormatToStringV2(current_format)+"."+"PictureModeString"; tr181ErrorCode_t err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); if ( tr181Success == err ) { ret = SetTVPictureMode(param.value); @@ -709,49 +767,36 @@ namespace Plugin { return result; } - int AVOutputTV::convertToValidInputParameter(std::string pqparam, std::string & source, std::string & pqmode, std::string & format) + int AVOutputTV::convertToValidInputParameter(std::string pqparam, capDetails_t& info) { - LOGINFO("Entry %s source %s pqmode %s format %s \n", __FUNCTION__, source.c_str(), pqmode.c_str(), format.c_str()); + LOGINFO("Entry %s source %s pqmode %s format %s \n", __FUNCTION__, info.source.c_str(), info.pqmode.c_str(), info.format.c_str()); + capDetails_t localInfo; + if (fetchCapablities(pqparam, localInfo) != 0) { + LOGINFO("%s, Failed to get capability fo %s\n", __FUNCTION__,pqparam.c_str()); + return -1; + } + // converting pq to valid paramter format - if (pqmode == "Global") { - std::string localSource; - std::string localPqmode; - std::string localFormat; - if (fetchCapablities(pqparam, localSource, localPqmode, localFormat) == 0) { - pqmode = localPqmode; - //if pqmode none from capabilty then lets keep pqmode as global to fail the capabilty - } - else { - LOGINFO("%s, Failed to get picturemode capability\n", __FUNCTION__); - return -1; - } + if (info.pqmode == "Global") { + info.pqmode = localInfo.pqmode; } - else if (pqmode == "Current") { + else if (info.pqmode == "Current") { char picMode[PIC_MODE_NAME_MAX]={0}; if(!getCurrentPictureMode(picMode)) { LOGINFO("Failed to get the Current picture mode\n"); return -1; } else { - pqmode = picMode; + info.pqmode = picMode; } } - if (source == "Global") { - std::string localSource; - std::string localPqmode; - std::string localFormat; - if (fetchCapablities(pqparam, localSource, localPqmode, localFormat) == 0) { - source = localSource; - } - else { - LOGINFO("%s, Failed to get source capability\n", __FUNCTION__); - return -1; - } + if (info.source == "Global") { + info.source = localInfo.source; } - else if (source == "Current") { + else if (info.source == "Current") { tvVideoSrcType_t currentSource = VIDEO_SOURCE_IP; tvError_t ret = GetCurrentVideoSource(¤tSource); @@ -759,32 +804,60 @@ namespace Plugin { LOGWARN("%s: GetCurrentVideoSource( ) Failed \n",__FUNCTION__); return -1; } - source = convertSourceIndexToString(currentSource); + info.source = convertSourceIndexToString(currentSource); } //convert format into valid parameter - if (format == "Global") { - std::string localSource; - std::string localPqmode; - std::string localFormat; - if (fetchCapablities(pqparam, localSource, localPqmode, localFormat) == 0) { - format = localFormat; - } - else { - LOGINFO("%s, Failed to get format capability\n", __FUNCTION__); - return -1; - } + if (info.format == "Global") { + info.format = localInfo.format; } - else if (format == "Current") { + else if (info.format == "Current") { tvVideoFormatType_t formatIndex = VIDEO_FORMAT_NONE; GetCurrentVideoFormat(&formatIndex); if ( formatIndex == VIDEO_FORMAT_NONE) { formatIndex = VIDEO_FORMAT_SDR; - } - format = convertVideoFormatToString(formatIndex); + } + info.format = convertVideoFormatToString(formatIndex); + } + + //convert WB and CMS params + if( pqparam.compare("WhiteBalance") == 0 ) + { + if( info.control.compare("Global") == 0 ) + { + info.control = localInfo.control; + } + + if( info.color.compare("Global") == 0 ) + { + info.color = localInfo.color; + } + + if( info.colorTemperature.compare("Global") == 0 ) + { + info.colorTemperature= localInfo.colorTemperature; + } + + LOGINFO("%s : control : %s color : %s colorTemp : %s \n",__FUNCTION__,info.control.c_str(),info.color.c_str(),info.colorTemperature.c_str()); + + } + + if( pqparam.compare("CMS") == 0 ) + { + if( info.component.compare("Global") == 0 ) + { + info.component = localInfo.component; + } + + if( info.color.compare("Global") == 0 ) + { + info.color = localInfo.color; + } + + LOGINFO("%s : component : %s color : %s \n",__FUNCTION__,info.component.c_str(),info.color.c_str()); } - LOGINFO("Exit %s source %s pqmode %s format %s \n", __FUNCTION__, source.c_str(), pqmode.c_str(), format.c_str()); + LOGINFO("Exit %s source %s pqmode %s format %s \n", __FUNCTION__, info.source.c_str(), info.pqmode.c_str(), info.format.c_str()); return 0; } @@ -827,34 +900,73 @@ namespace Plugin { return ret; } - tvContentFormatType_t AVOutputTV::convertFormatStringToTVContentFormat(const char *format) + tvError_t AVOutputTV::updateAVoutputTVParamToHALV2(std::string forParam, paramIndex_t indexInfo, int value, bool setNotDelete) { - tvContentFormatType_t ret = tvContentFormatType_SDR; + tvError_t ret = tvERROR_NONE; + std::string key; - if( strncmp(format,"sdr",strlen(format)) == 0 || strncmp(format,"SDR",strlen(format)) == 0 ) { - ret = tvContentFormatType_SDR; - } - else if( strncmp(format,"hdr10",strlen(format)) == 0 || strncmp(format,"HDR10",strlen(format))==0 ) { - ret = tvContentFormatType_HDR10; - } - else if( strncmp(format,"hlg",strlen(format)) == 0 || strncmp(format,"HLG",strlen(format)) == 0 ) { - ret = tvContentFormatType_HLG; - } - else if( strncmp(format,"dolby",strlen(format)) == 0 || strncmp(format,"DOLBY",strlen(format)) == 0 ) { - ret=tvContentFormatType_DOVI; - } + // Generate storage key based on parameter type + if (forParam == "CMS") + generateStorageIdentifierCMS(key, forParam, indexInfo); + else if (forParam == "WhiteBalance") + generateStorageIdentifierWB(key, forParam, indexInfo); + else + generateStorageIdentifierV2(key, forParam, indexInfo); + + if (key.empty()) { + LOGERR("%s generateStorageIdentifier failed\n", __FUNCTION__); + return tvERROR_GENERAL; + } + + tr181ErrorCode_t err = tr181Success; + + if (setNotDelete) { + std::string toStore = std::to_string(value); + + // Map parameters to their string transformation logic (if applicable) + std::map> fnMap = { + {"ColorTemp", [this](int v, std::string& s) { getColorTempStringFromEnum(v, s); }}, + {"DimmingMode", [this](int v, std::string& s) { getDimmingModeStringFromEnum(v, s); }}, + {"AspectRatio", [this](int v, std::string& s) { getDisplayModeStringFromEnum(v, s); }}, + {"BacklightMode", [this](int v, std::string& s) { getBacklightModeStringFromEnum(v, s); }} + }; + + // If there's a custom string conversion for this parameter, apply it + auto it = fnMap.find(forParam); + if (it != fnMap.end()) { + it->second(value, toStore); + } + // Set the value using TR-181 + err = setLocalParam(rfc_caller_id, key.c_str(), toStore.c_str()); + } + else + { + // Delete the value using TR-181 + err = clearLocalParam(rfc_caller_id, key.c_str()); + } + + if (err != tr181Success) { + LOGERR("%s: %s for %s Failed : %s\n",__FUNCTION__, setNotDelete ? "Set" : "Delete", key.c_str(), getTR181ErrorString(err)); + ret = tvERROR_GENERAL; + } return ret; } - tvError_t AVOutputTV::updateAVoutputTVParamToHAL(std::string forParam, int source, int pqmode, int format, int value,bool setNotDelete) + tvError_t AVOutputTV::updateAVoutputTVParamToHAL(std::string forParam, paramIndex_t indexInfo, int value,bool setNotDelete) { tvError_t ret = tvERROR_NONE; std::string key; - generateStorageIdentifier(key,forParam,format,pqmode,source); + if( forParam.compare("CMS") == 0 ) + generateStorageIdentifierCMS(key,forParam,indexInfo); + else if( forParam.compare("WhiteBalance") == 0 ) + generateStorageIdentifierWB(key,forParam,indexInfo); + else + generateStorageIdentifier(key,forParam,indexInfo); + if(key.empty()) { - LOGERR("generateStorageIdentifierDirty failed\n"); + LOGERR("%s generateStorageIdentifierDirty failed\n", __FUNCTION__); ret = tvERROR_GENERAL; } else { @@ -867,13 +979,14 @@ namespace Plugin { else if(forParam.compare("DimmingMode") == 0 ) { getDimmingModeStringFromEnum(value, toStore); } - else if (forParam.compare("DolbyVisionMode") == 0 ) { + else if (forParam.compare("DolbyVisionMode") == 0 || forParam.compare("HDRMode") == 0 ) { toStore = getDolbyModeStringFromEnum((tvDolbyMode_t)value); } err = setLocalParam(rfc_caller_id, key.c_str(),toStore.c_str()); } - else { + else + { err = clearLocalParam(rfc_caller_id, key.c_str()); } @@ -885,9 +998,11 @@ namespace Plugin { return ret; } - int AVOutputTV::updateAVoutputTVParam( std::string action, std::string tr181ParamName, std::string pqmode, std::string source, std::string format, tvPQParameterIndex_t pqParamIndex, int params[] ) + int AVOutputTV::updateAVoutputTVParam( std::string action, std::string tr181ParamName, capDetails_t info, tvPQParameterIndex_t pqParamIndex, int level ) { LOGINFO("Entry : %s\n",__FUNCTION__); + valueVectors_t values; + paramIndex_t paramIndex; std::vector sources; std::vector pictureModes; std::vector formats; @@ -896,14 +1011,15 @@ namespace Plugin { bool reset = !(action.compare("reset")); bool set = !(action.compare("set")); - LOGINFO("%s: Entry param : %s Action : %s pqmode : %s source :%s format :%s\n",__FUNCTION__,tr181ParamName.c_str(),action.c_str(),pqmode.c_str(),source.c_str(),format.c_str() ); - ret = getSaveConfig(pqmode, source, format, sources, pictureModes, formats); + LOGINFO("%s: Entry param : %s Action : %s pqmode : %s source :%s format :%s color:%s component:%s control:%s\n",__FUNCTION__,tr181ParamName.c_str(),action.c_str(),info.pqmode.c_str(),info.source.c_str(),info.format.c_str(),info.color.c_str(),info.component.c_str(),info.control.c_str() ); + ret = getSaveConfig(tr181ParamName,info, values); if( 0 == ret ) { - for(int sourceType: sources) { - tvVideoSrcType_t source = (tvVideoSrcType_t)sourceType; - for(int mode : pictureModes) { - for(int formatType : formats) { - tvVideoFormatType_t format = (tvVideoFormatType_t)formatType; + for( int sourceType: values.sourceValues ) { + paramIndex.sourceIndex = sourceType; + for( int modeType : values.pqmodeValues ) { + paramIndex.pqmodeIndex = modeType; + for( int formatType : values.formatValues ) { + paramIndex.formatIndex = formatType; switch(pqParamIndex) { case PQ_PARAM_BRIGHTNESS: case PQ_PARAM_CONTRAST: @@ -916,17 +1032,17 @@ namespace Plugin { case PQ_PARAM_LOWLATENCY_STATE: case PQ_PARAM_DOLBY_MODE: if(reset) { - ret |= updateAVoutputTVParamToHAL(tr181ParamName,source, mode, format,0,false); - } + ret |= updateAVoutputTVParamToHAL(tr181ParamName,paramIndex,0,false); + } if(sync || reset) { int value=0; - if(getLocalparam(tr181ParamName,format,mode,source,value,pqParamIndex,sync)) { - continue; - } - params[0]=value; + if(getLocalparam(tr181ParamName,paramIndex,value,pqParamIndex,sync)) { + continue; + } + level=value; } if(set) { - ret |= updateAVoutputTVParamToHAL(tr181ParamName,source, mode, format, params[0],true); + ret |= updateAVoutputTVParamToHAL(tr181ParamName,paramIndex,level,true); } break; default: @@ -934,53 +1050,138 @@ namespace Plugin { } switch(pqParamIndex) { case PQ_PARAM_BRIGHTNESS: - ret |= SaveBrightness(source, mode,format,params[0]); + ret |= SaveBrightness((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); break; case PQ_PARAM_CONTRAST: - ret |= SaveContrast(source, mode,format,params[0]); + ret |= SaveContrast((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); break; case PQ_PARAM_SHARPNESS: - ret |= SaveSharpness(source, mode,format,params[0]); + ret |= SaveSharpness((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); break; case PQ_PARAM_HUE: - ret |= SaveHue(source, mode,format,params[0]); + ret |= SaveHue((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); break; case PQ_PARAM_SATURATION: - ret |= SaveSaturation(source, mode,format,params[0]); + ret |= SaveSaturation((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); break; case PQ_PARAM_COLOR_TEMPERATURE: - ret |= SaveColorTemperature(source, mode,format,(tvColorTemp_t)params[0]); + ret |= SaveColorTemperature((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvColorTemp_t)level); break; case PQ_PARAM_BACKLIGHT: - ret |= SaveBacklight(source, mode,format,params[0]); + ret |= SaveBacklight((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); break; case PQ_PARAM_DIMMINGMODE: - ret |= SaveTVDimmingMode(source,mode,format,(tvDimmingMode_t)params[0]); + ret |= SaveTVDimmingMode((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvDimmingMode_t)level); break; case PQ_PARAM_LOWLATENCY_STATE: - ret |= SaveLowLatencyState(source, mode,format,params[0]); + ret |= SaveLowLatencyState((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); break; case PQ_PARAM_DOLBY_MODE: - ret |= SaveTVDolbyVisionMode(source, mode,format,(tvDolbyMode_t)params[0]); + ret |= SaveTVDolbyVisionMode((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvDolbyMode_t)level); break; - case PQ_PARAM_ASPECT_RATIO: - ret |= SaveAspectRatio(source,mode,format,(tvDisplayMode_t)params[0]); + case PQ_PARAM_ASPECT_RATIO: + ret |= SaveAspectRatio((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvDisplayMode_t)level); break; - case PQ_PARAM_LOCALDIMMING_LEVEL: + + case PQ_PARAM_CMS_SATURATION_RED: + case PQ_PARAM_CMS_SATURATION_BLUE: + case PQ_PARAM_CMS_SATURATION_GREEN: + case PQ_PARAM_CMS_SATURATION_YELLOW: + case PQ_PARAM_CMS_SATURATION_CYAN: + case PQ_PARAM_CMS_SATURATION_MAGENTA: + case PQ_PARAM_CMS_HUE_RED: + case PQ_PARAM_CMS_HUE_BLUE: + case PQ_PARAM_CMS_HUE_GREEN: + case PQ_PARAM_CMS_HUE_YELLOW: + case PQ_PARAM_CMS_HUE_CYAN: + case PQ_PARAM_CMS_HUE_MAGENTA: + case PQ_PARAM_CMS_LUMA_RED: + case PQ_PARAM_CMS_LUMA_BLUE: + case PQ_PARAM_CMS_LUMA_GREEN: + case PQ_PARAM_CMS_LUMA_YELLOW: + case PQ_PARAM_CMS_LUMA_CYAN: + case PQ_PARAM_CMS_LUMA_MAGENTA: + { + for( int componentType : values.componentValues ) { + paramIndex.componentIndex = componentType; + for( int colorType : values.colorValues ) { + paramIndex.colorIndex = colorType; + if(reset) { + ret |= updateAVoutputTVParamToHAL(tr181ParamName,paramIndex,0,false); + } + if(sync || reset) { + int value=0; + tvPQParameterIndex_t pqIndex; + if ( convertCMSParamToPQEnum(getCMSComponentStringFromEnum((tvComponentType_t)paramIndex.componentIndex),getCMSColorStringFromEnum((tvDataComponentColor_t)paramIndex.colorIndex),pqIndex) != 0 ) + { + LOGERR("%s:convertCMSParamToPQEnum failed color : %d component : %d \n",__FUNCTION__,paramIndex.colorIndex,paramIndex.componentIndex); + return -1; + } + if(getLocalparam(tr181ParamName,paramIndex,value,pqIndex,sync)) { + continue; + } + level=value; + } + ret |= SaveCMS((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvComponentType_t)paramIndex.componentIndex,(tvDataComponentColor_t)paramIndex.colorIndex,level); + + if(set) { + ret |= updateAVoutputTVParamToHAL(tr181ParamName,paramIndex,level,true); + } + } + } + break; + } + case PQ_PARAM_WB_GAIN_RED: + case PQ_PARAM_WB_GAIN_GREEN: + case PQ_PARAM_WB_GAIN_BLUE: + case PQ_PARAM_WB_OFFSET_RED: + case PQ_PARAM_WB_OFFSET_GREEN: + case PQ_PARAM_WB_OFFSET_BLUE: + { + for( int colorType : values.colorValues ) { + paramIndex.colorIndex = colorType; + for( int controlType : values.controlValues ) { + paramIndex.controlIndex = controlType; + if(reset) { + ret |= updateAVoutputTVParamToHAL(tr181ParamName,paramIndex,0,false); + } + if(sync || reset) { + int value=0; + if(getLocalparam(tr181ParamName,paramIndex,value,pqParamIndex,sync)) { + continue; + } + level=value; + } + /* tvRGBType_t rgbIndex; + if ( convertWBParamToRGBEnum(getWBColorStringFromEnum((tvWBColor_t)(paramIndex.colorIndex)),getWBControlStringFromEnum((tvWBControl_t)(paramIndex.controlIndex)),rgbIndex) != 0 ) + { + LOGERR("%s:convertWBParamToRGBEnum failed Color : %d Control : %d \n",__FUNCTION__,paramIndex.colorIndex,paramIndex.controlIndex); + return -1; + }*/ + ret |= SaveCustom2PointWhiteBalance((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvWBColor_t)paramIndex.colorIndex,(tvWBControl_t)paramIndex.controlIndex,level); + + if(set) { + ret |= updateAVoutputTVParamToHAL(tr181ParamName,paramIndex,level,true); + } + } + } + break; + } + case PQ_PARAM_LOCALDIMMING_LEVEL: { - if(sync) { - int value=0; - getLocalparam(tr181ParamName,format,mode,source,value,pqParamIndex,sync); - params[0]=value; - } - ret |= SaveTVDimmingMode(source, mode,format,(tvDimmingMode_t)params[0]); - break; - } - case PQ_PARAM_CMS: - case PQ_PARAM_LDIM: - default: - break; + if(sync) { + int value=0; + getLocalparam(tr181ParamName,paramIndex,value,pqParamIndex,sync); + level=value; + } + ret |= SaveTVDimmingMode((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvDimmingMode_t)level); + break; + } + case PQ_PARAM_CMS: + case PQ_PARAM_LDIM: + default: + break; } } } @@ -989,129 +1190,369 @@ namespace Plugin { } return ret; } + void AVOutputTV::syncCMSParamsV2() { + JsonObject parameters; + + // Set default values to "none" to indicate all contexts (global sync) + parameters["pictureMode"] = "none"; + parameters["videoSource"] = "none"; + parameters["videoFormat"] = "none"; + + // Use "Global" to trigger syncing for all CMS components and colors + parameters["color"] = "Global"; + parameters["component"] = "Global"; + + // Dummy PQ index; unused for CMS sync but required by function signature + tvPQParameterIndex_t dummyPQIndex = PQ_PARAM_CMS_SATURATION_RED; + + int result = updateAVoutputTVParamV2("sync", "CMS", parameters, dummyPQIndex, 0); + if (result == 0) { + LOGINFO("%s: CMS sync completed successfully", __FUNCTION__); + } else { + LOGERR("%s: CMS sync encountered errors", __FUNCTION__); + } + } - tvError_t AVOutputTV::syncAvoutputTVParamsToHAL(std::string pqmode,std::string source,std::string format) + tvError_t AVOutputTV::syncAvoutputTVParamsToHAL(std::string pqmode, std::string source, std::string format) { - int params[3]={0}; + int level = {0}; + capDetails_t info; + info.pqmode = pqmode; + info.source = source; + info.format = format; + + JsonObject paramJson; + paramJson["pictureMode"] = info.pqmode; + paramJson["videoSource"] = info.source; + paramJson["videoFormat"] = info.format; + LOGINFO("Entry %s : pqmode : %s source : %s format : %s\n", __FUNCTION__, pqmode.c_str(), source.c_str(), format.c_str()); + + // Brightness + m_brightnessStatus = GetBrightnessCaps(&m_maxBrightness, &m_brightnessCaps); + LOGINFO("GetBrightnessCaps returned status: %d, max: %d", m_brightnessStatus, m_maxBrightness); + if (m_brightnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "Brightness", info, PQ_PARAM_BRIGHTNESS, level); + } else { + updateAVoutputTVParamV2("sync", "Brightness", paramJson, PQ_PARAM_BRIGHTNESS,level); + } - LOGINFO("Entry %s : pqmode : %s source : %s format : %s\n",__FUNCTION__,pqmode.c_str(),source.c_str(),format.c_str()); + // Contrast + m_contrastStatus = GetContrastCaps(&m_maxContrast, &m_contrastCaps); + LOGINFO("GetContrastCaps returned status: %d, max: %d", m_contrastStatus, m_maxContrast); + if (m_contrastStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "Contrast", info, PQ_PARAM_CONTRAST, level); + } else { + updateAVoutputTVParamV2("sync", "Contrast", paramJson, PQ_PARAM_CONTRAST,level); + } - if( !updateAVoutputTVParam("sync","Brightness",pqmode,source,format,PQ_PARAM_BRIGHTNESS,params)) { - LOGINFO("Brightness Successfully sync to Drive Cache\n"); - } - else { - LOGERR("Brightness Sync to cache Failed !!!\n"); - } + // Sharpness + m_sharpnessStatus = GetSharpnessCaps(&m_maxSharpness, &m_sharpnessCaps); + LOGINFO("GetSharpnessCaps returned status: %d, max: %d", m_sharpnessStatus, m_maxSharpness); + if (m_sharpnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "Sharpness", info, PQ_PARAM_SHARPNESS, level); + } else { + updateAVoutputTVParamV2("sync", "Sharpness", paramJson, PQ_PARAM_SHARPNESS, level); + } - if( !updateAVoutputTVParam("sync","Contrast",pqmode,source,format,PQ_PARAM_CONTRAST,params)) { - LOGINFO("Contrast Successfully Synced to Drive Cache\n"); - } - else { - LOGERR("Contrast Sync to cache Failed !!!\n"); - } + // Saturation + m_saturationStatus = GetSaturationCaps(&m_maxSaturation, &m_saturationCaps); + LOGINFO("GetSaturationCaps returned status: %d, max: %d", m_saturationStatus, m_maxSaturation); + if (m_saturationStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "Saturation", info, PQ_PARAM_SATURATION, level); + } else { + updateAVoutputTVParamV2("sync", "Saturation", paramJson, PQ_PARAM_SATURATION,level); + } - if( !updateAVoutputTVParam("sync","Sharpness",pqmode,source,format,PQ_PARAM_SHARPNESS,params)) { - LOGINFO("Sharpness Successfully Synced to Drive Cache\n"); - } - else { - LOGERR("Sharpness Sync to cache Failed !!!\n"); - } + // Hue + m_hueStatus = GetHueCaps(&m_maxHue, &m_hueCaps); + LOGINFO("GetHueCaps returned status: %d, max: %d", m_hueStatus, m_maxHue); + if (m_hueStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "Hue", info, PQ_PARAM_HUE, level); + } else { + updateAVoutputTVParamV2("sync", "Hue", paramJson, PQ_PARAM_HUE, level); + } - if( !updateAVoutputTVParam("sync","Saturation",pqmode,source,format,PQ_PARAM_SATURATION,params)) { - LOGINFO("Saturation Successfully Synced to Drive Cache\n"); - } - else { - LOGERR("Saturation Sync to cache Failed !!!\n"); - } + // ColorTemperature + m_colorTempStatus = GetColorTemperatureCaps(&m_colortemp, &m_numColortemp, &m_colortempCaps); + LOGINFO("GetColorTemperatureCaps returned status: %d, numColortemp: %d", m_colorTempStatus, m_numColortemp); + if (m_colorTempStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "ColorTemp", info, PQ_PARAM_COLOR_TEMPERATURE, level); + } else { + updateAVoutputTVParamV2("sync", "ColorTemp", paramJson, PQ_PARAM_COLOR_TEMPERATURE,level); + } - if( !updateAVoutputTVParam("sync","Hue",pqmode,source,format,PQ_PARAM_HUE,params)) { - LOGINFO("Hue Successfully Synced to Drive Cache\n"); - } - else { - LOGERR("Hue Sync to cache Failed !!!\n"); - } + // HDRMode + updateAVoutputTVParam("sync", "HDRMode", info, PQ_PARAM_DOLBY_MODE, level); - if( !updateAVoutputTVParam("sync","ColorTemp",pqmode,source,format,PQ_PARAM_COLOR_TEMPERATURE,params)) { - LOGINFO("ColorTemp Successfully Synced to Drive Cache\n"); - } - else { - LOGERR("ColorTemp Sync to cache Failed !!!\n"); + // DimmingMode + m_dimmingModeStatus = GetTVDimmingModeCaps(&m_dimmingModes, &m_numdimmingModes, &m_dimmingModeCaps); + LOGINFO("GetTVDimmingModeCaps returned status: %d, numdimmingModes: %d", m_dimmingModeStatus, m_numdimmingModes); + if (m_dimmingModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "DimmingMode", info, PQ_PARAM_DIMMINGMODE, level); + } else { +#if !HAL_NOT_READY + updateAVoutputTVParamV2("sync", "DimmingMode", paramJson, PQ_PARAM_DIMMINGMODE,level); +#endif } - if( !updateAVoutputTVParam("sync","DolbyVisionMode",pqmode,source,"DV",PQ_PARAM_DOLBY_MODE,params)) { - LOGINFO("dvmode Successfully Synced to Drive Cache\n"); - } - else { - LOGERR("dvmode Sync to cache Failed !!!\n"); - } + // Backlight + LOGINFO("Calling GetBacklightCaps..."); + m_backlightStatus = GetBacklightCaps(&m_maxBacklight, &m_backlightCaps); + LOGINFO("GetBacklightCaps returned status: %d, maxBacklight: %d", m_backlightStatus, m_maxBacklight); +#if DEBUG + if (m_backlightCaps) + { + LOGINFO("Backlight caps pointer is valid. Num contexts: %zu", m_backlightCaps->num_contexts); + for (size_t i = 0; i < m_backlightCaps->num_contexts; ++i) { + const auto& context = m_backlightCaps->contexts[i]; + std::string pqModeStr = AVOutputTV::pqModeMap.count(context.pq_mode) ? + AVOutputTV::pqModeMap.at(context.pq_mode) : "Unknown"; + std::string formatStr = AVOutputTV::videoFormatMap.count(context.videoFormatType) ? + AVOutputTV::videoFormatMap.at(context.videoFormatType) : "Unknown"; + std::string srcStr = AVOutputTV::videoSrcMap.count(context.videoSrcType) ? + AVOutputTV::videoSrcMap.at(context.videoSrcType) : "Unknown"; + LOGINFO("Context[%zu]: PQMode = %s (%d), Format = %s (%d), Source = %s (%d)", + i, pqModeStr.c_str(), context.pq_mode, + formatStr.c_str(), context.videoFormatType, + srcStr.c_str(), context.videoSrcType); + } + } else { + LOGWARN("Backlight caps pointer is null."); + } +#endif + if (m_backlightStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "Backlight", info, PQ_PARAM_BACKLIGHT, level); + } else { + updateAVoutputTVParamV2("sync", "Backlight", paramJson, PQ_PARAM_BACKLIGHT, level); + } - if( !updateAVoutputTVParam("sync","DimmingMode",pqmode,source,format,PQ_PARAM_DIMMINGMODE,params)) { - LOGINFO("dimmingmode Successfully Synced to Drive Cache\n"); - } - else { - LOGERR("dimmingmode Sync to cache Failed !!!\n"); - } + //Ambient Bakclight Mode + m_backlightModeStatus = GetBacklightModeCaps(&m_backlightModes, &m_numBacklightModes, &m_backlightModeCaps); + if (m_backlightModeStatus == tvERROR_NONE) { + updateAVoutputTVParamV2("sync", "BacklightMode", paramJson, PQ_PARAM_BACKLIGHT_MODE, level); + } - if( !updateAVoutputTVParam("sync","Backlight",pqmode,source,format,PQ_PARAM_BACKLIGHT,params) ) { - LOGINFO("Backlight Successfully Synced to Drive Cache\n"); - } - else { - LOGERR("Backlight Sync to cache Failed !!!\n"); - } + //AspectRatio + m_aspectRatioStatus = GetAspectRatioCaps(&m_aspectRatio, &m_numAspectRatio, &m_aspectRatioCaps); + //LowLatencyState + m_lowLatencyStateStatus = GetLowLatencyStateCaps(&m_maxlowLatencyState, &m_lowLatencyStateCaps); + // PrecisionDetail + m_precisionDetailStatus = GetPrecisionDetailCaps(&m_maxPrecisionDetail, &m_precisionDetailCaps); + //PictureMode + m_pictureModeStatus = GetTVPictureModeCaps(&m_pictureModes, &m_numPictureModes, &m_pictureModeCaps); + + // LocalContrastEnhancement + m_localContrastEnhancementStatus = GetLocalContrastEnhancementCaps(&m_maxLocalContrastEnhancement, &m_localContrastEnhancementCaps); + if (m_localContrastEnhancementStatus == tvERROR_NONE) { + updateAVoutputTVParamV2("sync", "LocalContrastEnhancement", paramJson, PQ_PARAM_LOCAL_CONTRAST_ENHANCEMENT, level); + } - LOGINFO("Exit %s : pqmode : %s source : %s format : %s\n",__FUNCTION__,pqmode.c_str(),source.c_str(),format.c_str()); - return tvERROR_NONE; - } + // MPEGNoiseReduction + m_MPEGNoiseReductionStatus = GetMPEGNoiseReductionCaps(&m_maxMPEGNoiseReduction, &m_MPEGNoiseReductionCaps); + if (m_MPEGNoiseReductionStatus == tvERROR_NONE) { + updateAVoutputTVParamV2("sync", "MPEGNoiseReduction", paramJson, PQ_PARAM_MPEG_NOISE_REDUCTION, level); + } - int AVOutputTV::syncAvoutputTVPQModeParamsToHAL(std::string pqmode, std::string source, std::string format) + // DigitalNoiseReduction + m_digitalNoiseReductionStatus = GetDigitalNoiseReductionCaps(&m_maxDigitalNoiseReduction, &m_digitalNoiseReductionCaps); + if (m_digitalNoiseReductionStatus == tvERROR_NONE) { + updateAVoutputTVParamV2("sync", "DigitalNoiseReduction", paramJson, PQ_PARAM_DIGITAL_NOISE_REDUCTION, level); + } + + // AISuperResolution + m_AISuperResolutionStatus = GetAISuperResolutionCaps(&m_maxAISuperResolution, &m_AISuperResolutionCaps); + if (m_AISuperResolutionStatus == tvERROR_NONE) { + updateAVoutputTVParamV2("sync", "AISuperResolution", paramJson, PQ_PARAM_AI_SUPER_RESOLUTION,level); + } + + // MEMC + m_MEMCStatus = GetMEMCCaps(&m_maxMEMC, &m_MEMCCaps); + if (m_MEMCStatus == tvERROR_NONE) { + updateAVoutputTVParamV2("sync", "MEMC", paramJson, PQ_PARAM_MEMC, level); + } + + m_cmsStatus = GetCMSCaps(&m_maxCmsHue, &m_maxCmsSaturation, &m_maxCmsLuma, + &m_cmsColorArr, &m_cmsComponentArr, + &m_numColor, &m_numComponent, &m_cmsCaps); + if (m_cmsStatus == tvERROR_NONE) { + for (size_t i = 0; i < m_numColor; i++) { + std::string colorStr = getCMSColorStringFromEnum(m_cmsColorArr[i]); + m_cmsColorList.push_back(colorStr); + } + for (size_t i = 0; i < m_numComponent; i++) { + std::string componentStr = getCMSComponentStringFromEnum(m_cmsComponentArr[i]); + m_cmsComponentList.push_back(componentStr); + } + syncCMSParamsV2(); + } + if(m_cmsStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + syncCMSParams(); + } + + syncWBParams(); + + // Dolby Vision Mode + info.format = "DV"; // Sync only for Dolby + updateAVoutputTVParam("sync", "DolbyVisionMode", info, PQ_PARAM_DOLBY_MODE, level); + + LOGINFO("Exit %s : pqmode : %s source : %s format : %s\n", __FUNCTION__, pqmode.c_str(), source.c_str(), format.c_str()); + return tvERROR_NONE; + } + int AVOutputTV::syncAvoutputTVPQModeParamsToHALV2(std::string pqmode, std::string source, std::string format) { - std::vector sources; - std::vector pictureModes; - std::vector formats; tr181ErrorCode_t err = tr181Success; TR181_ParamData_t param = {0}; - int ret = 0; + bool contextSynced = false; + + // Treat "none" as "Global" + if (source == "none") + source = "Global"; + if (format == "none") + format = "Global"; + + // Handle "Current" source/format substitution + if (source == "Current" || format == "Current") { + tvVideoSrcType_t currentSrc = VIDEO_SOURCE_IP; + tvVideoFormatType_t currentFmt = VIDEO_FORMAT_SDR; + GetCurrentVideoSource(¤tSrc); + GetCurrentVideoFormat(¤tFmt); + if (currentFmt == VIDEO_FORMAT_NONE) + currentFmt = VIDEO_FORMAT_SDR; + + if (source == "Current") + source = convertSourceIndexToStringV2(currentSrc); + if (format == "Current") + format = convertVideoFormatToStringV2(currentFmt); + } + if (m_pictureModeStatus == tvERROR_NONE) + { + for (size_t i = 0; i < m_pictureModeCaps->num_contexts; ++i) { + const tvConfigContext_t& ctx = m_pictureModeCaps->contexts[i]; + + std::string sourceStr = convertSourceIndexToStringV2(ctx.videoSrcType); + std::string formatStr = convertVideoFormatToStringV2(ctx.videoFormatType); + + // Filter by provided source/format + if (source != "Global" && source != sourceStr) + continue; + if (format != "Global" && format != formatStr) + continue; + + std::string tr181Param = std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM) + + "." + sourceStr + ".Format." + formatStr + ".PictureModeString"; + + err = getLocalParam(rfc_caller_id, tr181Param.c_str(), ¶m); + if (err != tr181Success) { + LOGWARN("Failed to getLocalParam for %s\n", tr181Param.c_str()); + continue; + } + + std::string modeStr = param.value; + int modeIndex = -1; + for (size_t i = 0; i < m_numPictureModes; ++i) { + if (pqModeMap.at(m_pictureModes[i]) == modeStr) { + modeIndex = static_cast(i); + break; + } + } +#if DEBUG + LOGINFO("Got mode string from TR181: %s -> index=%d", modeStr.c_str(), modeIndex); +#endif + tvError_t tv_err = SaveSourcePictureMode(ctx.videoSrcType, ctx.videoFormatType, modeIndex); + if (tv_err != tvERROR_NONE) { + LOGWARN("Failed SaveSourcePictureMode for %s / %s\n", sourceStr.c_str(), formatStr.c_str()); + continue; + } + + contextSynced = true; + } + + if (!contextSynced) { + LOGWARN("No matching context synced for pqmode=%s source=%s format=%s\n", + pqmode.c_str(), source.c_str(), format.c_str()); + return -1; + } + return 0; + } + return -1; + } - ret = getSaveConfig(pqmode, source, format, sources, pictureModes, formats); - - if (ret == 0 ) { - for (int source : sources) { - tvVideoSrcType_t sourceType = (tvVideoSrcType_t)source; - for (int format : formats) { - tvVideoFormatType_t formatType = (tvVideoFormatType_t)format; - std::string tr181_param_name = ""; - tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); - tr181_param_name += "."+convertSourceIndexToString(sourceType)+"."+"Format."+ - convertVideoFormatToString(formatType)+"."+"PictureModeString"; - - err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); - if ( tr181Success == err ) { - std::string local = param.value; - int pqmodeindex = (int)getPictureModeIndex(local); - - tvError_t tv_err = SaveSourcePictureMode(sourceType, formatType, pqmodeindex); - if (tv_err != tvERROR_NONE) { - LOGWARN("failed to SaveSourcePictureMode \n"); + int AVOutputTV::syncAvoutputTVPQModeParamsToHAL(std::string pqmode, std::string source, std::string format) + { + if (m_pictureModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + valueVectors_t valueVectors; + tr181ErrorCode_t err = tr181Success; + TR181_ParamData_t param = {0}; + int ret = 0; + + inputInfo.pqmode = pqmode; + inputInfo.source = source; + inputInfo.format = format; + + ret = getSaveConfig("PictureMode", inputInfo, valueVectors); + + if (ret == 0 ) { + for (int source : valueVectors.sourceValues ) { + tvVideoSrcType_t sourceType = (tvVideoSrcType_t)source; + for (int format : valueVectors.formatValues ) { + tvVideoFormatType_t formatType = (tvVideoFormatType_t)format; + std::string tr181_param_name = ""; + tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); + tr181_param_name += "."+convertSourceIndexToString(sourceType)+"."+"Format."+ + convertVideoFormatToString(formatType)+"."+"PictureModeString"; + + err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); + if ( tr181Success == err ) { + std::string local = param.value; + int pqmodeindex = (int)getPictureModeIndex(local); + + tvError_t tv_err = SaveSourcePictureMode(sourceType, formatType, pqmodeindex); + if (tv_err != tvERROR_NONE) { + LOGWARN("failed to SaveSourcePictureMode \n"); + return -1; + } + } + else { + LOGWARN("Failed to get the getLocalParam \n"); return -1; } } - else { - LOGWARN("Failed to get the getLocalParam \n"); - return -1; - } } } + return ret; + } + else + { + return syncAvoutputTVPQModeParamsToHALV2(pqmode,source,format); } - return ret; } - uint32_t AVOutputTV::generateStorageIdentifier(std::string &key, std::string forParam,int contentFormat, int pqmode, int source) + uint32_t AVOutputTV::generateStorageIdentifier(std::string &key, std::string forParam, paramIndex_t info) + { + key+=std::string(AVOUTPUT_GENERIC_STRING_RFC_PARAM); + key+=STRING_SOURCE+convertSourceIndexToString(info.sourceIndex)+std::string(".")+STRING_PICMODE+convertPictureIndexToString(info.pqmodeIndex)+std::string(".")+std::string(STRING_FORMAT)+convertVideoFormatToString(info.formatIndex)+std::string(".")+forParam; + return tvERROR_NONE; + } + + uint32_t AVOutputTV::generateStorageIdentifierCMS(std::string &key, std::string forParam, paramIndex_t info) + { + key+=std::string(AVOUTPUT_GENERIC_STRING_RFC_PARAM); + key+=STRING_SOURCE+convertSourceIndexToString(info.sourceIndex)+std::string(".")+STRING_PICMODE+convertPictureIndexToString(info.pqmodeIndex)+std::string(".")+std::string(STRING_FORMAT)+convertVideoFormatToString(info.formatIndex)+std::string(".")+STRING_COLOR+getCMSColorStringFromEnum((tvDataComponentColor_t)info.colorIndex)+std::string(".")+STRING_COMPONENT+getCMSComponentStringFromEnum((tvComponentType_t)info.componentIndex)+std::string(".")+forParam; + return tvERROR_NONE; + } + + uint32_t AVOutputTV::generateStorageIdentifierWB(std::string &key, std::string forParam, paramIndex_t info) { key+=std::string(AVOUTPUT_GENERIC_STRING_RFC_PARAM); - key+=STRING_SOURCE+convertSourceIndexToString(source)+std::string(".")+STRING_PICMODE+convertPictureIndexToString(pqmode)+std::string(".")+std::string(STRING_FORMAT)+convertVideoFormatToString(contentFormat)+std::string(".")+forParam; + key+=STRING_SOURCE+convertSourceIndexToString(info.sourceIndex)+std::string(".")+STRING_PICMODE+convertPictureIndexToString(info.pqmodeIndex)+std::string(".")+std::string(STRING_FORMAT)+convertVideoFormatToString(info.formatIndex)+std::string(".")+STRING_COLOR+getWBColorStringFromEnum((tvWBColor_t)info.colorIndex)+std::string(".")+STRING_CONTROL+getWBControlStringFromEnum((tvWBControl_t)info.controlIndex)+std::string(".")+forParam; return tvERROR_NONE; } + + uint32_t AVOutputTV::generateStorageIdentifierDirty(std::string &key, std::string forParam,uint32_t contentFormat, int pqmode) { key+=std::string(AVOUTPUT_GENERIC_STRING_RFC_PARAM); @@ -1138,48 +1579,128 @@ namespace Plugin { return "UNKNOWN ERROR"; } - int AVOutputTV::getSaveConfig(std::string pqmode, std::string source, std::string format,std::vector &sources,std::vector &picturemodes, std::vector &formats) + int AVOutputTV::getSaveConfig(std::string param, capDetails_t capInfo, valueVectors_t &values) { - LOGINFO("Entry : %s pqmode : %s source :%s format :%s\n",__FUNCTION__,pqmode.c_str(),source.c_str(),format.c_str()); + LOGINFO("Entry : %s pqmode : %s source :%s format :%s component : %s color : %s control:%s\n",__FUNCTION__,capInfo.pqmode.c_str(),capInfo.source.c_str(),capInfo.format.c_str(),capInfo.component.c_str(),capInfo.color.c_str(),capInfo.control.c_str()); int ret = 0; - if (getAvailableCapabilityModes(source, pqmode, format) != 0) { + if (getAvailableCapabilityModes(capInfo) != 0) { LOGERR("%s: failed to get picture/source/format mode capability \n", __FUNCTION__); return -1; } //pqmode - char *modeString = strdup(pqmode.c_str()); + char *modeString = strdup(capInfo.pqmode.c_str()); char *token = NULL; while ((token = strtok_r(modeString,",",&modeString))) { std::string local = token; - picturemodes.push_back(getPictureModeIndex(local)); + values.pqmodeValues.push_back(getPictureModeIndex(local)); } //source - char *sourceString = strdup(source.c_str()); + char *sourceString = strdup(capInfo.source.c_str()); char *sourceToken = NULL; while ((sourceToken = strtok_r(sourceString,",",&sourceString))) { std::string local = sourceToken; - sources.push_back(getSourceIndex(local)); + if( local == "All") continue; + values.sourceValues.push_back(getSourceIndex(local)); } //3)check format - char *formatString = strdup(format.c_str()); + char *formatString = strdup(capInfo.format.c_str()); char *formatToken = NULL; while ((formatToken = strtok_r(formatString,",",&formatString))) { std::string local = formatToken; - formats.push_back(getFormatIndex(local)); + values.formatValues.push_back(getFormatIndex(local)); + } + + if( param.compare("CMS") == 0 ) + { + //Check Color + char *colorString = strdup(capInfo.color.c_str()); + char *colorToken = NULL; + while ((colorToken = strtok_r(colorString,",",&colorString))) { + std::string local = colorToken; + tvDataComponentColor_t level = tvDataColor_NONE; + if ( getCMSColorEnumFromString(local,level ) == -1 ) { + LOGERR("%s : GetColorEnumFromString Failed!!! ",__FUNCTION__); + return -1; + } + values.colorValues.push_back(level); + } + + //Check Component + char *componentString = strdup(capInfo.component.c_str()); + char *componentToken = NULL; + while ((componentToken = strtok_r(componentString,",",&componentString))) { + std::string local = componentToken; + tvComponentType_t level; + if ( getCMSComponentEnumFromString(local,level ) == -1 ) { + LOGERR("%s : GetComponentEnumFromString Failed!!! ",__FUNCTION__); + return -1; + } + values.componentValues.push_back(level); + } + } + + if( param.compare("WhiteBalance") == 0 ) + { + //Check Color + char *colorString = strdup(capInfo.color.c_str()); + char *colorToken = NULL; + while ((colorToken = strtok_r(colorString,",",&colorString))) { + std::string local = colorToken; + tvWBColor_t level=tvWB_COLOR_RED; + if ( getWBColorEnumFromString(local,level ) == -1 ) { + LOGERR("%s : GetWBColorEnumFromString Failed!!! ",__FUNCTION__); + return -1; + } + values.colorValues.push_back(level); + } + + //Check Control + char *controlString = strdup(capInfo.control.c_str()); + char *controlToken = NULL; + while ((controlToken = strtok_r(controlString,",",&controlString))) { + std::string local = controlToken; + tvWBControl_t level=tvWB_CONTROL_GAIN;; + if ( getWBControlEnumFromString(local,level ) == -1 ) { + LOGERR("%s : GetWBControlEnumFromString Failed!!! ",__FUNCTION__); + return -1; + } + values.controlValues.push_back(level); + } + + /* + //Check Color Temp + char *colorTempString = strdup(capInfo.colorTemperature.c_str()); + char *colorTempToken = NULL; + while ((colorTempToken = strtok_r(colorTempString,",",&colorTempString))) { + std::string local = colorTempToken; + tvColorTemp_t level; + if ( getColorTempEnumFromString(local,level ) == -1 ) { + LOGERR("%s : GetColorTempEnumFromString Failed!!! ",__FUNCTION__); + return -1; + } + values.colorTempValues.push_back(level); + }*/ } - LOGINFO("Exit : %s pqmode : %s source :%s format :%s ret:%d\n",__FUNCTION__,pqmode.c_str(),source.c_str(),format.c_str(), ret); + LOGINFO("Exit : %s pqmode : %s source :%s format :%s ret:%d\n",__FUNCTION__,capInfo.pqmode.c_str(),capInfo.source.c_str(),capInfo.format.c_str(), ret); return ret; } - int AVOutputTV::getLocalparam( std::string forParam,int formatIndex,int pqIndex,int sourceIndex,int & value, - tvPQParameterIndex_t pqParamIndex,bool sync,int color ) + int AVOutputTV::getLocalparam( std::string forParam,paramIndex_t indexInfo,int & value,tvPQParameterIndex_t pqParamIndex,bool sync) { string key; TR181_ParamData_t param={0}; - generateStorageIdentifier(key,forParam,formatIndex,pqIndex,sourceIndex); + + if( forParam.compare("CMS") == 0 ) { + generateStorageIdentifierCMS(key,forParam,indexInfo); + } else if( forParam.compare("WhiteBalance") == 0 ) { + generateStorageIdentifierWB(key,forParam,indexInfo); + } else { + generateStorageIdentifierV2(key,forParam,indexInfo); + } + if(key.empty()) { LOGERR("generateStorageIdentifier failed\n"); return -1; @@ -1191,33 +1712,48 @@ namespace Plugin { if( forParam.compare("ColorTemp") == 0 ) { if (strncmp(param.value, "Standard", strlen(param.value))==0) { value=tvColorTemp_STANDARD; - } + } else if (strncmp(param.value, "Warm", strlen(param.value))==0) { value=tvColorTemp_WARM; - } + } else if (strncmp(param.value, "Cold", strlen(param.value))==0) { value=tvColorTemp_COLD; - } - else if (strncmp(param.value, "User Defined", strlen(param.value))==0) { + } + else if (strncmp(param.value, "UserDefined", strlen(param.value))==0) { value=tvColorTemp_USER; - } + } else { value=tvColorTemp_STANDARD; - } + } return 0; - } + } else if( forParam.compare("DimmingMode") == 0 ) { - if (strncmp(param.value, "fixed", strlen(param.value))==0) { + if (strncmp(param.value, "Fixed", strlen(param.value))==0) { value=tvDimmingMode_Fixed; - } - else if (strncmp(param.value, "local", strlen(param.value))==0) { + } + else if (strncmp(param.value, "Local", strlen(param.value))==0) { value=tvDimmingMode_Local; - } - else if (strncmp(param.value, "global", strlen(param.value))==0) { + } + else if (strncmp(param.value, "Global", strlen(param.value))==0) { value=tvDimmingMode_Global; - } + } return 0; } + else if ( forParam.compare("BacklightMode") == 0 ) { + if (strncmp(param.value, "Manual", strlen(param.value)) == 0) { + value = tvBacklightMode_MANUAL; + } + else if (strncmp(param.value, "Ambient", strlen(param.value)) == 0) { + value = tvBacklightMode_AMBIENT; + } + else if (strncmp(param.value, "Eco", strlen(param.value)) == 0) { + value = tvBacklightMode_ECO; + } + else { + value = tvBacklightMode_MANUAL; // Default fallback + } + return 0; + } else if ( forParam.compare("DolbyVisionMode") == 0) { if (strncmp(param.value, "Dark", strlen(param.value)) == 0) { value = tvDolbyMode_Dark; @@ -1229,17 +1765,41 @@ namespace Plugin { value = tvDolbyMode_Bright; } return 0; + } + else if ( forParam.compare("HDRMode") == 0) { + if (strncmp(param.value, "Dark", strlen(param.value)) == 0 && key.find("DV") != std::string::npos ) { + value = tvDolbyMode_Dark; + } + else if(strncmp(param.value, "Bright", strlen(param.value)) == 0 && key.find("DV") != std::string::npos ) { + value = tvDolbyMode_Game; + } + else if(strncmp(param.value, "Dark", strlen(param.value)) == 0 && key.find("HDR10") != std::string::npos ) { + value = tvHDR10Mode_Dark; + } + else if(strncmp(param.value, "Bright", strlen(param.value)) == 0 && key.find("HDR10") != std::string::npos ) { + value = tvHDR10Mode_Bright; + } + else if(strncmp(param.value, "Dark", strlen(param.value)) == 0 && key.find("HLG") != std::string::npos ) { + value = tvHLGMode_Dark; + } + else if(strncmp(param.value, "Bright", strlen(param.value)) == 0 && key.find("HLG") != std::string::npos ) { + value = tvHLGMode_Bright; + } + else { + value = tvDolbyMode_Game; + } + return 0; } else { value=std::stoi(param.value); - return 0; + return 0; } } else {// default value from DB if( sync ) { return 1; } - GetDefaultPQParams(pqIndex,(tvVideoSrcType_t)sourceIndex,(tvVideoFormatType_t)formatIndex,pqParamIndex,&value); + GetDefaultPQParams(indexInfo.pqmodeIndex,(tvVideoSrcType_t)indexInfo.sourceIndex,(tvVideoFormatType_t)indexInfo.formatIndex,pqParamIndex,&value); LOGINFO("Default value from DB : %s : %d \n",key.c_str(),value); return 0; } @@ -1273,102 +1833,20 @@ namespace Plugin { return CompColorEnum; } - int AVOutputTV::getDolbyParams(tvContentFormatType_t format, std::string &s, std::string source) - { - int ret = -1; - TR181_ParamData_t param; - std::string rfc_param = AVOUTPUT_HDR10MODE_RFC_PARAM; - int dolby_mode_value = 0; - tvVideoSrcType_t sourceIndex = VIDEO_SOURCE_IP; - /*Since dolby vision is source specific, we should for check for specific source*/ - if (!source.empty()) { - sourceIndex = (tvVideoSrcType_t)getSourceIndex(source); - } - else { - GetCurrentVideoSource(&sourceIndex); - } - - char picMode[PIC_MODE_NAME_MAX]={0}; - int pqmodeIndex = 0; - if(!getCurrentPictureMode(picMode)) { - LOGERR("Failed to get the Current picture mode\n"); - } - else { - std::string local = picMode; - pqmodeIndex = getPictureModeIndex(local); - } - memset(¶m, 0, sizeof(param)); - if (format == tvContentFormatType_HLG ) { - rfc_param = AVOUTPUT_HLGMODE_RFC_PARAM; - } - else if (format == tvContentFormatType_DOVI) { - rfc_param = AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM + std::to_string(sourceIndex) + "."+"DolbyVisionMode"; - } - - tr181ErrorCode_t err = getLocalParam(rfc_caller_id, rfc_param.c_str(), ¶m); - if ( tr181Success != err) { - tvError_t retVal = GetDefaultPQParams(pqmodeIndex,(tvVideoSrcType_t)sourceIndex, - (tvVideoFormatType_t)ConvertHDRFormatToContentFormat((tvhdr_type_t)format), - PQ_PARAM_DOLBY_MODE,&dolby_mode_value); - if( retVal != tvERROR_NONE ) { - LOGERR("%s : failed\n",__FUNCTION__); - return ret; - } - s = getDolbyModeStringFromEnum((tvDolbyMode_t)dolby_mode_value); - ret = 0; - } - else { - s += param.value; - ret = 0; - } - return ret; - } - - tvError_t AVOutputTV::getParamsCaps(std::vector &range - , std::vector &pqmode, std::vector &source, std::vector &format,std::string param ) - { - tvError_t ret = tvERROR_NONE; - - std::string rangeInfo; - std::string sourceInfo; - std::string formatInfo; - std::string pqmodeInfo; - - std::string platformsupport; - std::string indexInfo; - std::vector localIndex; - - if( ReadCapablitiesFromConf( rangeInfo, pqmodeInfo, formatInfo ,sourceInfo,param, platformsupport, indexInfo)) { - LOGERR( "%s: ReadCapablitiesFromConf Failed !!!\n",__FUNCTION__); - return tvERROR_GENERAL; - } - else { - spliltCapablities( range, pqmode, format, source, localIndex,rangeInfo, pqmodeInfo, formatInfo, sourceInfo , indexInfo); - } - - return ret; - } - - tvError_t AVOutputTV::getParamsCaps(std::vector &range - , std::vector &pqmode, std::vector &source, std::vector &format,std::string param, - std::string & isPlatformSupport, std::vector & index) + tvError_t AVOutputTV::getParamsCaps(std::string param, capVectors_t &vecInfo) { tvError_t ret = tvERROR_NONE; - - std::string rangeInfo; - std::string sourceInfo; - std::string formatInfo; - std::string pqmodeInfo; - std::string indexInfo; - - if( ReadCapablitiesFromConf( rangeInfo, pqmodeInfo, formatInfo ,sourceInfo,param, isPlatformSupport, indexInfo)) { + capDetails_t stringInfo; + + if( ReadCapablitiesFromConf( param, stringInfo) != 0 ) + { LOGERR( "%s: ReadCapablitiesFromConf Failed !!!\n",__FUNCTION__); return tvERROR_GENERAL; } - else { - spliltCapablities( range, pqmode, format, source, index,rangeInfo, pqmodeInfo, formatInfo, sourceInfo, indexInfo); + else + { + spliltCapablities( vecInfo, stringInfo); } - return ret; } @@ -1405,95 +1883,15 @@ namespace Plugin { return 0; } - int AVOutputTV::ConvertHDRFormatToContentFormat(tvhdr_type_t hdrFormat) - { - int ret=tvContentFormatType_SDR; - switch(hdrFormat) - { - case HDR_TYPE_SDR: - ret=tvContentFormatType_SDR; - break; - case HDR_TYPE_HDR10: - ret=tvContentFormatType_HDR10; - break; - case HDR_TYPE_HDR10PLUS: - ret=tvContentFormatType_HDR10PLUS; - break; - case HDR_TYPE_DOVI: - ret=tvContentFormatType_DOVI; - break; - case HDR_TYPE_HLG: - ret=tvContentFormatType_HLG; - break; - default: - break; - } - return ret; - } - - - int AVOutputTV::ReadCapablitiesFromConf(std::string &rangeInfo,std::string &pqmodeInfo,std::string &formatInfo,std::string &sourceInfo, - std::string param, std::string & isPlatformSupport, std::string & indexInfo) - { - int ret = 0; - - try { - CIniFile inFile(CAPABLITY_FILE_NAME); - std::string configString; - if ((param == "DolbyVisionMode") || (param == "Backlight") ) { - configString = param + ".platformsupport"; - isPlatformSupport = inFile.Get(configString); - printf(" platfromsupport : %s\n",isPlatformSupport.c_str() ); - } - - if ( (param == "ColorTemperature") || (param == "DimmingMode") || - ( param == "AutoBacklightControl") || (param == "DolbyVisionMode") || - (param == "HDR10Mode") || (param == "HLGMode") || (param == "AspectRatio") || - (param == "PictureMode") || (param == "VideoSource") || (param == "VideoFormat") || - (param == "VideoFrameRate") ) { - configString = param + ".range"; - rangeInfo = inFile.Get(configString); - printf(" String Range info : %s\n",rangeInfo.c_str() ); - } else { - configString = param + ".range_from"; - rangeInfo = inFile.Get(configString); - configString = param + ".range_to"; - rangeInfo += ","+inFile.Get(configString); - printf(" Integer Range Info : %s\n",rangeInfo.c_str() ); - } - - if ((param == "VideoSource") || (param == "PictureMode") || (param == "VideoFormat") ) { - configString.clear(); - configString = param + ".index"; - indexInfo = inFile.Get(configString); - printf("Index value %s\n", indexInfo.c_str()); - } - - configString.clear(); - configString = param + ".pqmode"; - pqmodeInfo = inFile.Get(configString); - configString = param + ".format"; - formatInfo = inFile.Get(configString); - configString = param + ".source"; - sourceInfo = inFile.Get(configString); - ret = 0; - } - catch(const boost::property_tree::ptree_error &e) { - printf("%s: error %s::config table entry not found in ini file\n",__FUNCTION__,e.what()); - ret = -1; - } - return ret; - } - void AVOutputTV::getDimmingModeStringFromEnum(int value, std::string &toStore) { - const char *color_temp_string[] = { - [tvDimmingMode_Fixed] = "fixed", - [tvDimmingMode_Local] = "local", - [tvDimmingMode_Global] = "global", + const char *dimmingmode_string[] = { + [tvDimmingMode_Fixed] = "Fixed", + [tvDimmingMode_Local] = "Local", + [tvDimmingMode_Global] = "Global", }; toStore.clear(); - toStore+=color_temp_string[value]; + toStore+=dimmingmode_string[value]; } void AVOutputTV::getColorTempStringFromEnum(int value, std::string &toStore) @@ -1502,11 +1900,49 @@ namespace Plugin { [tvColorTemp_STANDARD] = "Standard", [tvColorTemp_WARM] = "Warm", [tvColorTemp_COLD] = "Cold", - [tvColorTemp_USER] = "User Defined" + [tvColorTemp_USER] = "UserDefined" }; toStore.clear(); toStore+=color_temp_string[value]; } + void AVOutputTV::getDisplayModeStringFromEnum(int value, std::string &toStore) + { + static const char* display_mode_string[] = { + [tvDisplayMode_4x3] = "TV 4X3 PILLARBOX", + [tvDisplayMode_16x9] = "TV 16X9 STRETCH", + [tvDisplayMode_FULL] = "TV FULL", + [tvDisplayMode_NORMAL] = "TV NORMAL", + [tvDisplayMode_AUTO] = "TV AUTO", + [tvDisplayMode_DIRECT] = "TV DIRECT", + [tvDisplayMode_ZOOM] = "TV ZOOM" + }; + + toStore.clear(); + if (value >= 0 && value < tvDisplayMode_MAX && display_mode_string[value]) { + toStore += display_mode_string[value]; + } else { + toStore += "TV AUTO"; + } + } + + void AVOutputTV::getBacklightModeStringFromEnum(int value, std::string& toStore) + { + toStore.clear(); + switch (static_cast(value)) { + case tvBacklightMode_MANUAL: + toStore = "Manual"; + break; + case tvBacklightMode_AMBIENT: + toStore = "Ambient"; + break; + case tvBacklightMode_ECO: + toStore = "Eco"; + break; + default: + toStore = "Unknown"; + break; + } + } int AVOutputTV::getCurrentPictureMode(char *picMode) { @@ -1528,56 +1964,22 @@ namespace Plugin { } tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); - tr181_param_name += "." + convertSourceIndexToString(currentSource) + "." + "Format."+convertVideoFormatToString(current_format)+"."+"PictureModeString"; + tr181_param_name += "." + convertSourceIndexToStringV2(currentSource) + "." + "Format."+convertVideoFormatToStringV2(current_format)+"."+"PictureModeString"; memset(¶m, 0, sizeof(param)); tr181ErrorCode_t err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); if ( err == tr181Success ) { strncpy(picMode, param.value, strlen(param.value)+1); - picMode[strlen(param.value)] = '\0'; - LOGINFO("getLocalParam success, mode = %s\n", picMode); + //LOGINFO("getLocalParam success, mode = %s\n", picMode); return 1; } else { - LOGERR("getLocalParam failed"); + LOGERR("getLocalParam failed %s\n",tr181_param_name.c_str()); return 0; } } - - int AVOutputTV::getDolbyParamToSync(int sourceIndex, int formatIndex, int& value) - { - int ret=0; - TR181_ParamData_t param; - int pqmodeIndex = 0; - char picMode[PIC_MODE_NAME_MAX]={0}; - if(!getCurrentPictureMode(picMode)) { - LOGERR("Failed to get the Current picture mode\n"); - } - else { - std::string local = picMode; - pqmodeIndex = getPictureModeIndex(local); - } - std ::string rfc_param = AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM + std::to_string(sourceIndex) + "."+"DolbyVisionMode"; - memset(¶m, 0, sizeof(param)); - tr181ErrorCode_t err = getLocalParam(rfc_caller_id, rfc_param.c_str(), ¶m); - - if ( tr181Success != err) { - tvError_t retVal = GetDefaultPQParams(pqmodeIndex,(tvVideoSrcType_t)sourceIndex, (tvVideoFormatType_t)formatIndex, - PQ_PARAM_DOLBY_MODE, &value); - if( retVal != tvERROR_NONE ) { - LOGERR("%s : failed\n",__FUNCTION__); - return -1; - } - ret = 0; - } - else { - value=getDolbyModeIndex(param.value); - ret = 0; - } - - return ret; - } + tvDolbyMode_t AVOutputTV::GetDolbyVisionEnumFromModeString(const char* modeString) { if (strcmp(modeString, "Invalid") == 0) { @@ -1588,45 +1990,31 @@ namespace Plugin { return tvDolbyMode_Bright; } else if (strcmp(modeString, "Game") == 0) { return tvDolbyMode_Game; - } else if (strcmp(modeString, "HDR10 Dark") == 0) { - return tvHDR10Mode_Dark; - } else if (strcmp(modeString, "HDR10 Bright") == 0) { - return tvHDR10Mode_Bright; - } else if (strcmp(modeString, "HDR10 Game") == 0) { - return tvHDR10Mode_Game; - } else if (strcmp(modeString, "HLG Dark") == 0) { - return tvHLGMode_Dark; - } else if (strcmp(modeString, "HLG Bright") == 0) { - return tvHLGMode_Bright; - } else if (strcmp(modeString, "HLG Game") == 0) { - return tvHLGMode_Game; } - return tvDolbyMode_Invalid; // Default case for invalid input } std::string AVOutputTV::getDolbyModeStringFromEnum( tvDolbyMode_t mode) - { std::string value; switch(mode) { case tvDolbyMode_Dark: case tvHDR10Mode_Dark: case tvHLGMode_Dark: - value = "Dark"; - break; + value = "Dark"; + break; case tvDolbyMode_Bright: case tvHDR10Mode_Bright: case tvHLGMode_Bright: - value = "Bright"; - break; - case tvDolbyMode_Game: + value = "Bright"; + break; + case tvDolbyMode_Game: case tvHDR10Mode_Game: case tvHLGMode_Game: value = "Game"; break; default: - break; + break; } return value; @@ -1635,39 +2023,36 @@ namespace Plugin { int AVOutputTV::getAvailableCapabilityModesWrapper(std::string param, std::string & outparam) { tvError_t err = tvERROR_NONE; - std::vector range; - std::vector picmodeVec; - std::vector sourceVec; - std::vector formatVec; + capVectors_t info; - err = getParamsCaps(range,picmodeVec,sourceVec,formatVec, param); + err = getParamsCaps(param,info); if (err != tvERROR_NONE) { LOGERR("%s: failed to get [%s] capability \n", __FUNCTION__, param.c_str()); return -1; } - outparam = convertToString(range); + outparam = convertToString(info.rangeVector); return 0; } - int AVOutputTV::getAvailableCapabilityModes(std::string & source, std::string & pqmode, std::string & format) + int AVOutputTV::getAvailableCapabilityModes( capDetails_t &info) { - if ((pqmode.compare("none") == 0 )) { - if (getAvailableCapabilityModesWrapper("PictureMode", pqmode) != 0) { + if ((info.pqmode.compare("none") == 0 )) { + if (getAvailableCapabilityModesWrapper("PictureMode", info.pqmode) != 0) { LOGERR("%s: failed to get picture mode capability \n", __FUNCTION__); return -1; } } - if( (source.compare("none") == 0)) { - if (getAvailableCapabilityModesWrapper("VideoSource",source) != 0) { + if( (info.source.compare("none") == 0)) { + if (getAvailableCapabilityModesWrapper("VideoSource",info.source) != 0) { LOGERR("%s: failed to get source capability \n", __FUNCTION__); return -1; } } - if( (format.compare("none") == 0) ) { - if (getAvailableCapabilityModesWrapper("VideoFormat",format) != 0) { + if( (info.format.compare("none") == 0) ) { + if (getAvailableCapabilityModesWrapper("VideoFormat",info.format) != 0) { LOGERR("%s: failed to get format capability \n", __FUNCTION__); return -1; } @@ -1677,17 +2062,17 @@ namespace Plugin { int AVOutputTV::getCapabilitySource(JsonArray & rangeArray) { - std::vector range,pqmode,source,format; + capVectors_t info; - tvError_t ret = getParamsCaps(range,pqmode,source,format,"VideoSource"); + tvError_t ret = getParamsCaps("VideoSource",info); if(ret != tvERROR_NONE) { return -1; } else { - if ((range.front()).compare("none") != 0) { - for (unsigned int index = 0; index < range.size(); index++) { - rangeArray.Add(range[index]); + if ((info.rangeVector.front()).compare("none") != 0) { + for (unsigned int index = 0; index < info.rangeVector.size(); index++) { + rangeArray.Add(info.rangeVector[index]); } } } @@ -1696,16 +2081,16 @@ namespace Plugin { int AVOutputTV::getRangeCapability(std::string param, std::vector & rangeInfo) { - std::vector range,pqmode,source,format; + capVectors_t info; - tvError_t ret = getParamsCaps(range,pqmode,source,format, param); + tvError_t ret = getParamsCaps(param,info); if(ret != tvERROR_NONE) { return -1; } else { - if ((range.front()).compare("none") != 0) { - rangeInfo = range; + if ((info.rangeVector.front()).compare("none") != 0) { + rangeInfo = info.rangeVector; } } return 0; @@ -1730,7 +2115,7 @@ namespace Plugin { { tvError_t ret = tvERROR_GENERAL; #if !defined (HDMIIN_4K_ZOOM) - LOGERR("%s:mode selected is: %d", __FUNCTION__, m_videoZoomMode); + LOGINFO("%s:mode selected is: %d", __FUNCTION__, m_videoZoomMode); if (AVOutputTV::instance->m_isDisabledHdmiIn4KZoom) { if (!(AVOutputTV::instance->m_currentHdmiInResolutonm_currentHdmiInResoluton))) { @@ -1783,6 +2168,11 @@ namespace Plugin { tvDisplayMode_t mode = tvDisplayMode_MAX; TR181_ParamData_t param; tvError_t ret = tvERROR_NONE; + capDetails_t inputInfo; + + inputInfo.pqmode = pqmode; + inputInfo.source = source; + inputInfo.format = format; memset(¶m, 0, sizeof(param)); tr181ErrorCode_t err = getLocalParam(rfc_caller_id, AVOUTPUT_ASPECTRATIO_RFC_PARAM, ¶m); @@ -1829,9 +2219,7 @@ namespace Plugin { } else { //Save DisplayMode to ssm_data - int params[3]={0}; - params[0]=mode; - int retval=updateAVoutputTVParam("set","AspectRatio",pqmode,source,format,PQ_PARAM_ASPECT_RATIO,params); + int retval=updateAVoutputTVParam("set","ZoomMode",inputInfo,PQ_PARAM_ASPECT_RATIO,mode); if(retval != 0) { LOGERR("Failed to Save DisplayMode to ssm_data\n"); @@ -1848,5 +2236,2074 @@ namespace Plugin { return ret; } + int AVOutputTV::getCMSComponentEnumFromString(std::string component, tvComponentType_t& value) + { + int ret = 0; + + if( component.compare("Luma") == 0 ) + value = COMP_LUMA; + else if( component.compare("Saturation") == 0 ) + value = COMP_SATURATION; + else if( component.compare("Hue") == 0 ) + value = COMP_HUE; + else + ret = -1; + + return ret; + } + + int AVOutputTV::getCMSColorEnumFromString(std::string color,tvDataComponentColor_t& value) + { + int ret = 0; + + if( color.compare("Red") == 0 ) + value = tvDataColor_RED; + else if( color.compare("Green") == 0 ) + value = tvDataColor_GREEN; + else if( color.compare("Blue") == 0 ) + value = tvDataColor_BLUE; + else if( color.compare("Yellow") == 0) + value = tvDataColor_YELLOW; + else if( color.compare("Cyan") == 0) + value = tvDataColor_CYAN; + else if( color.compare("Magenta") == 0) + value = tvDataColor_MAGENTA; + else + ret = -1; + + return ret; + } + + int AVOutputTV::getColorTempEnumFromString(std::string color, tvColorTemp_t& value) + { + int ret = 0; + + if( color.compare("Standard") == 0 ) + value = tvColorTemp_STANDARD; + else if( color.compare("Warm") == 0 ) + value = tvColorTemp_WARM; + else if( color.compare("Cold") == 0 ) + value = tvColorTemp_COLD; + else if( color.compare("UserDefined") == 0 ) + value =tvColorTemp_USER; + else + ret = -1; + return ret; + } + + void AVOutputTV::syncCMSParams( ) + { + int level = 0; + std::string cmsParam; + tvPQParameterIndex_t tvPQEnum; + capDetails_t inputInfo; + tvDataComponentColor_t colors[] = {tvDataColor_RED,tvDataColor_GREEN,tvDataColor_BLUE,tvDataColor_YELLOW,tvDataColor_CYAN,tvDataColor_MAGENTA}; + + inputInfo.pqmode = "none"; + inputInfo.source = "none"; + inputInfo.format = "none"; + + for ( int component = COMP_HUE; component < COMP_MAX;component++) { + for(int count = 0;count < (int)(sizeof(colors)/sizeof(colors[0])); ++count) { + tvDataComponentColor_t color = colors[count]; + std::string componentString = getCMSComponentStringFromEnum((tvComponentType_t)component); + std::string colorString = getCMSColorStringFromEnum((tvDataComponentColor_t)color); + cmsParam = componentString+"."+colorString; + + if ( convertCMSParamToPQEnum(componentString,colorString,tvPQEnum) != 0 ) { + LOGINFO("%s: %s/%s Param Not Found \n",__FUNCTION__,componentString.c_str(),componentString.c_str()); + continue; + } + + inputInfo.color = colorString; + inputInfo.component = componentString; + if( !updateAVoutputTVParam("sync","CMS", inputInfo,tvPQEnum,level)) + LOGINFO("CMS Successfully Synced to Drive Cache\n"); + else + LOGERR("CMS Sync to cache Failed !!!\n"); + } + } + } + + void AVOutputTV::syncWBParams( ) + { + int level = 0; + tvPQParameterIndex_t tvPQEnum; + capDetails_t inputInfo; + + inputInfo.pqmode = "none"; + inputInfo.source = "none"; + inputInfo.format = "none"; + + for( int colorIndex= tvWB_COLOR_RED; colorIndex < tvWB_COLOR_MAX; colorIndex++) { + for(int controlIndex = tvWB_CONTROL_GAIN;controlIndex < tvWB_CONTROL_MAX;controlIndex++) { + inputInfo.control = getWBControlStringFromEnum((tvWBControl_t)controlIndex); + inputInfo.color = getWBColorStringFromEnum((tvWBColor_t)colorIndex); + + if ( convertWBParamToPQEnum(inputInfo.control,inputInfo.color,tvPQEnum) != 0 ) { + LOGERR("%s: %s/%s Param Not Found \n",__FUNCTION__,inputInfo.control.c_str(),inputInfo.color.c_str()); + } + updateAVoutputTVParam("sync","WhiteBalance",inputInfo,tvPQEnum,level); + } + } + } + + + int AVOutputTV:: convertCMSParamToPQEnum(const std::string component, const std::string color,tvPQParameterIndex_t& value) { + // Create a map to associate color-component pairs with enum values + int ret = 0; + static const std::unordered_map colorComponentMap = { + {"SaturationRed", PQ_PARAM_CMS_SATURATION_RED}, + {"SaturationGreen", PQ_PARAM_CMS_SATURATION_GREEN}, + {"SaturationBlue", PQ_PARAM_CMS_SATURATION_BLUE}, + {"SaturationCyan", PQ_PARAM_CMS_SATURATION_CYAN}, + {"SaturationMagenta", PQ_PARAM_CMS_SATURATION_MAGENTA}, + {"SaturationYellow", PQ_PARAM_CMS_SATURATION_YELLOW}, + {"HueRed", PQ_PARAM_CMS_HUE_RED}, + {"HueGreen", PQ_PARAM_CMS_HUE_GREEN}, + {"HueBlue", PQ_PARAM_CMS_HUE_BLUE}, + {"HueCyan", PQ_PARAM_CMS_HUE_CYAN}, + {"HueMagenta", PQ_PARAM_CMS_HUE_MAGENTA}, + {"HueYellow", PQ_PARAM_CMS_HUE_YELLOW}, + {"LumaRed", PQ_PARAM_CMS_LUMA_RED}, + {"LumaGreen", PQ_PARAM_CMS_LUMA_GREEN}, + {"LumaBlue", PQ_PARAM_CMS_LUMA_BLUE}, + {"LumaCyan", PQ_PARAM_CMS_LUMA_CYAN}, + {"LumaMagenta", PQ_PARAM_CMS_LUMA_MAGENTA}, + {"LumaYellow", PQ_PARAM_CMS_LUMA_YELLOW} + }; + + // Create the key by concatenating the component and color + std::string key = component + color; + + // Look up the key in the map + auto it = colorComponentMap.find(key); + if (it != colorComponentMap.end()) { + value = it->second; + ret = 0; + } else { + LOGERR("%s : Invalid color/component\n",__FUNCTION__); + ret = -1; + } + return ret; + } + + int AVOutputTV:: convertWBParamToRGBEnum(const std::string color,std::string control,tvRGBType_t &value) + { + // Create a map to associate color-ntrol pairs with enum values + int ret = 0; + static const std::unordered_map colorControlMap = { + {"RedGain", R_GAIN}, + {"GreenGain", G_GAIN}, + {"BlueGain", B_GAIN}, + {"RedOffset", R_POST_OFFSET}, + {"GreenOffset", G_POST_OFFSET}, + {"BlueOffset", B_POST_OFFSET} + }; + + // Create the key by concatenating the color and control + std::string key = color + control; + + // Look up the key in the map + auto it = colorControlMap.find(key); + if (it != colorControlMap.end()) { + value = it->second; + ret = 0; + } else { + LOGERR("%s : Invalid color/control\n",__FUNCTION__); + ret = -1; + } + return ret; + } + + int AVOutputTV:: convertWBParamToPQEnum(const std::string control, const std::string color,tvPQParameterIndex_t& value) { + // Create a map to associate color-component pairs with enum values + int ret = 0; + static const std::unordered_map colorControlMap = { + {"RedGain", PQ_PARAM_WB_GAIN_RED}, + {"RedOffset", PQ_PARAM_WB_OFFSET_RED}, + {"GreenGain", PQ_PARAM_WB_GAIN_GREEN}, + {"GreenOffset", PQ_PARAM_WB_OFFSET_GREEN}, + {"BlueGain", PQ_PARAM_WB_GAIN_BLUE}, + {"BlueOffset", PQ_PARAM_WB_OFFSET_BLUE}, + }; + + // Create the key by concatenating the component and color + std::string key = color+control; + + // Look up the key in the map + auto it = colorControlMap.find(key); + if (it != colorControlMap.end()) { + value = it->second; + ret = 0; + } else { + LOGERR("%s : Invalid color/control\n",__FUNCTION__); + ret = -1; + } + return ret; + } + + std::string AVOutputTV::getCMSColorStringFromEnum(tvDataComponentColor_t value) + { + switch(value) + { + case tvDataColor_RED: return "Red"; + case tvDataColor_GREEN: return "Green"; + case tvDataColor_BLUE: return "Blue"; + case tvDataColor_YELLOW: return "Yellow"; + case tvDataColor_CYAN: return "Cyan"; + case tvDataColor_MAGENTA: return "Magenta"; + default : return "Max"; + } + } + + std::string AVOutputTV::getCMSComponentStringFromEnum(tvComponentType_t value) { + switch(value) { + case COMP_HUE: return "Hue"; + case COMP_SATURATION: return "Saturation"; + case COMP_LUMA: return "Luma"; + default : return "Max"; + } + } + + std::string AVOutputTV::getWBColorStringFromEnum(tvWBColor_t value) { + switch(value) { + case tvWB_COLOR_RED: return "Red"; + case tvWB_COLOR_GREEN: return "Green"; + case tvWB_COLOR_BLUE: return "Blue"; + default : return "Max"; + } + } + + std::string AVOutputTV::getWBControlStringFromEnum(tvWBControl_t value) { + switch(value) + { + case tvWB_CONTROL_GAIN: return "Gain"; + case tvWB_CONTROL_OFFSET: return "Offset"; + default: return "Max"; + } + } + + int AVOutputTV::getWBColorEnumFromString(std::string color,tvWBColor_t& value) { + int ret = 0; + + if( color.compare("Red") == 0 ) + value = tvWB_COLOR_RED; + else if( color.compare("Green") == 0 ) + value = tvWB_COLOR_GREEN; + else if( color.compare("Blue") == 0 ) + value = tvWB_COLOR_BLUE; + else + ret = -1; + + return ret; + } + + int AVOutputTV::getWBControlEnumFromString(std::string color,tvWBControl_t& value) { + int ret = 0; + + if( color.compare("Gain") == 0 ) + value = tvWB_CONTROL_GAIN; + else if( color.compare("Offset") == 0 ) + value = tvWB_CONTROL_OFFSET; + else + ret = -1; + + return ret; + } + + std::string AVOutputTV::getColorTemperatureStringFromEnum(tvColorTemp_t value) { + switch(value) { + case tvColorTemp_STANDARD: return "Standard"; + case tvColorTemp_WARM: return "Warm"; + case tvColorTemp_COLD: return "Cold"; + case tvColorTemp_USER : return "UserDefined"; + default : return "Standard"; + } + } + + int AVOutputTV:: validateCMSParameter(std::string component,int inputValue) + { + capVectors_t info; + tvError_t ret = getParamsCaps("CMS", info); + + LOGINFO("%s : component : %s inputValue : %d\n",__FUNCTION__,component.c_str(),inputValue); + + if (ret != tvERROR_NONE) { + LOGERR("Failed to fetch the range capability \n"); + return -1; + } + + if( component == "Saturation" ) { + if (inputValue < stoi(info.rangeVector[0]) || inputValue > std::stoi(info.rangeVector[1])) { + LOGERR("wrong Input value[%d] for %s\n", inputValue,component.c_str()); + return -1; + } + } else if ( component == "Hue" ) { + if (inputValue < stoi(info.rangeVector[2]) || inputValue > std::stoi(info.rangeVector[3])) { + LOGERR("wrong Input value[%d] for %s\n", inputValue,component.c_str()); + return -1; + } + } else if ( component == "Luma" ) { + if (inputValue < stoi(info.rangeVector[4]) || inputValue > std::stoi(info.rangeVector[5])) { + LOGERR("wrong Input value[%d] for %s\n", inputValue,component.c_str()); + return -1; + } + } + return 0; + } + + int AVOutputTV:: validateWBParameter(std::string param,std::string control,int inputValue) + { + capVectors_t info; + tvError_t ret = getParamsCaps(param, info); + + if (ret != tvERROR_NONE) { + LOGERR("Failed to fetch the range capability[%s] \n", param.c_str()); + return -1; + } + + if( control == "Gain" ) { + if (inputValue < stoi(info.rangeVector[0]) || inputValue > std::stoi(info.rangeVector[1])) { + LOGERR("wrong Input value[%d] for %s\n", inputValue,control.c_str()); + return -1; + } + } else if ( control == "Offset" ) { + if (inputValue < stoi(info.rangeVector[2]) || inputValue > std::stoi(info.rangeVector[3])) { + LOGERR("wrong Input value[%d] for %s\n", inputValue,control.c_str()); + return -1; + } + } + return 0; + } +//JSON Based V2 Helpers + const std::map AVOutputTV::pqModeMap = { + {PQ_MODE_SPORTS, "Sports"}, + {PQ_MODE_THEATER, "Theater"}, + {PQ_MODE_GAME, "Game"}, + {PQ_MODE_IQ, "IQ"}, + {PQ_MODE_DARK, "Dark"}, + {PQ_MODE_BRIGHT, "Bright"}, + {PQ_MODE_AIPQ, "AI PQ"}, + {PQ_MODE_STANDARD, "Standard"}, + {PQ_MODE_VIVID, "Vivid"}, + {PQ_MODE_ENERGY_SAVING, "EnergySaving"}, + {PQ_MODE_CUSTOM, "Custom"} + }; + + const std::map AVOutputTV::videoFormatMap = { + {VIDEO_FORMAT_NONE, "None"}, + {VIDEO_FORMAT_SDR, "SDR"}, + {VIDEO_FORMAT_HDR10, "HDR10"}, + {VIDEO_FORMAT_HDR10PLUS, "HDR10Plus"}, + {VIDEO_FORMAT_DV, "DV"}, + {VIDEO_FORMAT_HLG, "HLG"} + }; + + const std::map AVOutputTV::videoSrcMap = { + {VIDEO_SOURCE_COMPOSITE1, "Composite1"}, + {VIDEO_SOURCE_HDMI1, "HDMI1"}, + {VIDEO_SOURCE_HDMI2, "HDMI2"}, + {VIDEO_SOURCE_HDMI3, "HDMI3"}, + {VIDEO_SOURCE_HDMI4, "HDMI4"}, + {VIDEO_SOURCE_IP, "IP"}, + {VIDEO_SOURCE_TUNER, "Tuner"} + }; + const std::unordered_map AVOutputTV::backlightModeMap = { + {tvBacklightMode_MANUAL, "Manual"}, + {tvBacklightMode_AMBIENT, "Ambient"}, + {tvBacklightMode_ECO, "Eco"} + }; + + std::unordered_map AVOutputTV::pqModeReverseMap; + std::unordered_map AVOutputTV::videoFormatReverseMap; + std::unordered_map AVOutputTV::videoSrcReverseMap; + bool AVOutputTV::reverseMapsInitialized = false; + + void AVOutputTV::initializeReverseMaps() { + if (reverseMapsInitialized) return; + + for (const auto& entry : pqModeMap) { + pqModeReverseMap[entry.second] = static_cast(entry.first); + } + for (const auto& entry : videoFormatMap) { + videoFormatReverseMap[entry.second] = static_cast(entry.first); + } + for (const auto& entry : videoSrcMap) { + videoSrcReverseMap[entry.second] = static_cast(entry.first); + } + reverseMapsInitialized = true; + } + + const std::unordered_map AVOutputTV::backlightModeReverseMap = []{ + std::unordered_map m; + for (const auto& pair : AVOutputTV::backlightModeMap) m[pair.second] = pair.first; + return m; + }(); + + std::string AVOutputTV::convertSourceIndexToStringV2(int source) { + auto it = videoSrcMap.find(source); + return (it != videoSrcMap.end()) ? it->second : ""; + } + + std::string AVOutputTV::convertVideoFormatToStringV2(int format) { + auto it = videoFormatMap.find(format); + return (it != videoFormatMap.end()) ? it->second : ""; + } + + std::string AVOutputTV::convertPictureIndexToStringV2(int pqmode) { + auto it = pqModeMap.find(pqmode); + return (it != pqModeMap.end()) ? it->second : ""; + } + uint32_t AVOutputTV::generateStorageIdentifierV2(std::string &key, std::string forParam, paramIndex_t info) + { + key += AVOUTPUT_GENERIC_STRING_RFC_PARAM; + key += STRING_SOURCE + convertSourceIndexToStringV2(info.sourceIndex) + "." + + STRING_PICMODE + convertPictureIndexToStringV2(info.pqmodeIndex) + "." + + STRING_FORMAT + convertVideoFormatToStringV2(info.formatIndex) + "." + + forParam; + return tvERROR_NONE; + } + + bool AVOutputTV::isValidSource(const std::vector& sourceArray, tvVideoSrcType_t sourceIndex) + { + // If "Current" is passed, match the current source + if (std::find(sourceArray.begin(), sourceArray.end(), "Current") != sourceArray.end()) { + tvVideoSrcType_t currentSource = VIDEO_SOURCE_IP; + GetCurrentVideoSource(¤tSource); + return (sourceIndex == currentSource); + } + + // Match against specific source strings + const std::string srcStr = convertSourceIndexToStringV2(sourceIndex); + return std::find(sourceArray.begin(), sourceArray.end(), srcStr) != sourceArray.end(); + } + + bool AVOutputTV::isValidFormat(const std::vector& formatArray, tvVideoFormatType_t formatIndex) + { + // If "Current" is passed, match the current format + if (std::find(formatArray.begin(), formatArray.end(), "Current") != formatArray.end()) { + tvVideoFormatType_t currentFormat = VIDEO_FORMAT_NONE; + GetCurrentVideoFormat(¤tFormat); + return (formatIndex == currentFormat); + } + + // Match against specific format strings + const std::string fmtStr = convertVideoFormatToStringV2(formatIndex); + return std::find(formatArray.begin(), formatArray.end(), fmtStr) != formatArray.end(); + } + + tvConfigContext_t AVOutputTV::getValidContextFromGetParameters(const JsonObject& parameters, const std::string& paramName) + { + tvConfigContext_t validContext = {PQ_MODE_INVALID, VIDEO_FORMAT_NONE, VIDEO_SOURCE_ALL}; + // Picture Mode + std::string pictureModeStr; + //"Current", empty string, or missing key as a cue to fetch system values + if (!parameters.HasLabel("pictureMode") || + (pictureModeStr = parameters["pictureMode"].String()).empty() || + pictureModeStr == "Current") + { + char picMode[PIC_MODE_NAME_MAX] = {0}; + getCurrentPictureMode(picMode); + std::string pictureModeStr(picMode); + LOGINFO("Current Picture Mode: %s", picMode); + validContext.pq_mode = static_cast( + pqModeReverseMap.count(pictureModeStr) ? pqModeReverseMap.at(pictureModeStr) : PQ_MODE_INVALID + ); + } + else + { + validContext.pq_mode = static_cast( + pqModeReverseMap.count(pictureModeStr) ? pqModeReverseMap.at(pictureModeStr) : PQ_MODE_INVALID + ); + } + // Video Format + std::string videoFormatStr; + if (!parameters.HasLabel("videoFormat") || + (videoFormatStr = parameters["videoFormat"].String()).empty() || + videoFormatStr == "Current") + { + GetCurrentVideoFormat(&validContext.videoFormatType); + } + else + { + validContext.videoFormatType = static_cast( + videoFormatReverseMap.count(videoFormatStr) ? videoFormatReverseMap.at(videoFormatStr) : VIDEO_FORMAT_NONE + ); + } + // Video Source + std::string videoSourceStr; + if (!parameters.HasLabel("videoSource") || + (videoSourceStr = parameters["videoSource"].String()).empty() || + videoSourceStr == "Current") + { + GetCurrentVideoSource(&validContext.videoSrcType); + } + else + { + validContext.videoSrcType = static_cast( + videoSrcReverseMap.count(videoSourceStr) ? videoSrcReverseMap.at(videoSourceStr) : VIDEO_SOURCE_ALL + ); + } + tvContextCaps_t* caps = getCapsForParam(paramName); + LOGINFO("Looking for context: PQMode=%d, Format=%d, Source=%d", + validContext.pq_mode, validContext.videoFormatType, validContext.videoSrcType); + // Match context if caps exist + if (caps && caps->num_contexts > 0) { + for (size_t i = 0; i < caps->num_contexts; ++i) { + const tvConfigContext_t& available = caps->contexts[i]; +#if DEBUG + LOGINFO("Context[%zu]: PQMode=%d, Format=%d, Source=%d", i, + available.pq_mode, available.videoFormatType, available.videoSrcType); +#endif + if (available.videoSrcType == validContext.videoSrcType && + available.videoFormatType == validContext.videoFormatType && + available.pq_mode == validContext.pq_mode) { + return available; // valid context found + } + } + } + LOGWARN("No valid context found for %s with provided parameters", paramName.c_str()); + validContext = {PQ_MODE_INVALID, VIDEO_FORMAT_NONE, VIDEO_SOURCE_ALL}; + return validContext; + } + + bool AVOutputTV::isGlobalParam(const JsonArray& arr) { + return (arr.Length() == 0) || + (arr.Length() == 1 && ( + arr[0].String() == "Global" || arr[0].String() == "none")); + } + + std::vector AVOutputTV::extractPQModes(const JsonObject& parameters) { + initializeReverseMaps(); + + std::vector pqModes; + if (!parameters.HasLabel("pictureMode")) { + return pqModes; + } + + JsonArray pqmodeArray = parameters["pictureMode"].Array(); + pqModes.reserve(pqmodeArray.Length()); // Pre-allocate + + for (uint32_t i = 0; i < pqmodeArray.Length(); ++i) { + std::string modeStr = pqmodeArray[i].String(); + + if (modeStr == "Current") { + char picMode[PIC_MODE_NAME_MAX] = {0}; + if (getCurrentPictureMode(picMode)) { + auto it = pqModeReverseMap.find(std::string(picMode)); + if (it != pqModeReverseMap.end()) { + pqModes.push_back(it->second); + } + } + } else { + auto it = pqModeReverseMap.find(modeStr); + if (it != pqModeReverseMap.end()) { + pqModes.push_back(it->second); + } + } + } + return pqModes; + } + + std::vector AVOutputTV::extractVideoSources(const JsonObject& parameters) { + initializeReverseMaps(); + + std::vector sources; + if (!parameters.HasLabel("videoSource")) { + return sources; + } + + JsonArray sourceArray = parameters["videoSource"].Array(); + sources.reserve(sourceArray.Length()); // Pre-allocate + + for (uint32_t i = 0; i < sourceArray.Length(); ++i) { + std::string srcStr = sourceArray[i].String(); + + if (srcStr == "Current") { + tvVideoSrcType_t sourceIndex = VIDEO_SOURCE_IP; + if (GetCurrentVideoSource(&sourceIndex) == tvERROR_NONE) { + sources.push_back(sourceIndex); + } + } else { + auto it = videoSrcReverseMap.find(srcStr); + if (it != videoSrcReverseMap.end()) { + sources.push_back(it->second); + } + } + } + return sources; + } + + std::vector AVOutputTV::extractVideoFormats(const JsonObject& parameters) { + initializeReverseMaps(); + + std::vector formats; + if (!parameters.HasLabel("videoFormat")) { + return formats; + } + + JsonArray formatArray = parameters["videoFormat"].Array(); + formats.reserve(formatArray.Length()); // Pre-allocate + + for (uint32_t i = 0; i < formatArray.Length(); ++i) { + std::string fmtStr = formatArray[i].String(); + + if (fmtStr == "Current") { + tvVideoFormatType_t formatIndex = VIDEO_FORMAT_NONE; + GetCurrentVideoFormat(&formatIndex); + if (formatIndex == VIDEO_FORMAT_NONE) { + formatIndex = VIDEO_FORMAT_SDR; + } + formats.push_back(formatIndex); + } else { + auto it = videoFormatReverseMap.find(fmtStr); + if (it != videoFormatReverseMap.end()) { + formats.push_back(it->second); + } + } + } + return formats; + } + + JsonArray AVOutputTV::getJsonArrayIfArray(const JsonObject& obj, const std::string& key) { + return (obj.HasLabel(key.c_str()) && obj[key.c_str()].Content() == JsonValue::type::ARRAY) + ? obj[key.c_str()].Array() + : JsonArray(); // returns empty array + } + + tvContextCaps_t* AVOutputTV::getCapsForParam(const std::string& paramName) + { + tvContextCaps_t* caps = nullptr; + if (paramName == "Backlight") caps = m_backlightCaps; + else if (paramName == "Brightness") caps = m_brightnessCaps; + else if (paramName == "Contrast") caps = m_contrastCaps; + else if (paramName == "Sharpness") caps = m_sharpnessCaps; + else if (paramName == "Saturation") caps = m_saturationCaps; + else if (paramName == "Hue") caps = m_hueCaps; + else if (paramName == "ColorTemp") caps = m_colortempCaps; + else if (paramName == "DimmingMode") caps = m_dimmingModeCaps; + else if (paramName == "PictureMode") caps = m_pictureModeCaps; + else if (paramName == "AspectRatio") caps = m_aspectRatioCaps; + else if (paramName == "LowLatencyState") caps = m_lowLatencyStateCaps; + else if (paramName == "PrecisionDetail") caps = m_precisionDetailCaps; + else if (paramName == "LocalContrastEnhancement") caps = m_localContrastEnhancementCaps; + else if (paramName == "MPEGNoiseReduction") caps = m_MPEGNoiseReductionCaps; + else if (paramName == "DigitalNoiseReduction") caps = m_digitalNoiseReductionCaps; + else if (paramName == "AISuperResolution") caps = m_AISuperResolutionCaps; + else if (paramName == "MEMC") caps = m_MEMCCaps; + else if (paramName == "BacklightMode") caps = m_backlightModeCaps; + else if (paramName == "CMS") caps = m_cmsCaps; + else { + LOGERR("Unknown ParamName: %s", paramName.c_str()); + return nullptr; + } + // Fallback to global pictureModeCaps if cap is empty + if (!caps || caps->num_contexts == 0) + caps = m_pictureModeCaps; + + return caps; + } + std::string AVOutputTV::getCurrentPictureModeAsString() { + char picMode[PIC_MODE_NAME_MAX] = {0}; + if (!getCurrentPictureMode(picMode)) { + LOGERR("Failed to get current picture mode"); + return ""; + } + return picMode; + } + + std::string AVOutputTV::getCurrentVideoSourceAsString() { + tvVideoSrcType_t sourceIndex = VIDEO_SOURCE_IP; + if (GetCurrentVideoSource(&sourceIndex) != tvERROR_NONE) { + LOGERR("GetCurrentVideoSource failed"); + return ""; + } + return convertSourceIndexToStringV2(sourceIndex); + } + + std::string AVOutputTV::getCurrentVideoFormatAsString() { + tvVideoFormatType_t formatIndex = VIDEO_FORMAT_NONE; + if (GetCurrentVideoFormat(&formatIndex) != tvERROR_NONE || formatIndex == VIDEO_FORMAT_NONE) { + formatIndex = VIDEO_FORMAT_SDR; + } + return convertVideoFormatToStringV2(formatIndex); + } + + bool AVOutputTV::isSetRequiredForParam(const JsonObject& parameters, const std::string& paramName) + { + // Get current state once + const std::string curPicMode = getCurrentPictureModeAsString(); + const std::string curSource = getCurrentVideoSourceAsString(); + const std::string curFormat = getCurrentVideoFormatAsString(); + + // Helper to resolve a parameter to a list of effective values + auto resolveParam = [&](const std::string& label, const std::string& currentValue) -> std::vector { + std::vector result; + + if (!parameters.HasLabel(label.c_str())){ + result.push_back(currentValue); + return result; + } + + const auto& array = parameters[label.c_str()].Array(); + if (array.Length() == 0){ + result.push_back(currentValue); + return result; + } + + for (uint16_t i = 0; i < array.Length(); ++i) { + const std::string val = array[i].String(); + if (val == "Current" || val == "Global" || val == "none") { + result.push_back(currentValue); + } else { + result.push_back(val); + } + } + return result; + }; + + // Resolve all + const auto resolvedPicModes = resolveParam("pictureMode", curPicMode); + const auto resolvedFormats = resolveParam("videoFormat", curFormat); + const auto resolvedSources = resolveParam("videoSource", curSource); +#if DEBUG + // Helper function to log vector content + auto logResolvedValues = [&](const std::string& label, const std::vector& values) { + std::string joined; + for (const auto& val : values) { + if (!joined.empty()) joined += ", "; + joined += val; + } + LOGINFO("Resolved %s: [%s]", label.c_str(), joined.c_str()); + }; + + // Debug logs + logResolvedValues("pictureMode", resolvedPicModes); + logResolvedValues("videoSource", resolvedSources); + logResolvedValues("videoFormat", resolvedFormats); +#endif + + // Check if current combination exists in resolved sets + for (const auto& pm : resolvedPicModes) { + if (pm != curPicMode) continue; + + for (const auto& fmt : resolvedFormats) { + if (fmt != curFormat) continue; + + for (const auto& src : resolvedSources) { + if (src == curSource) { + tvContextCaps_t* caps = getCapsForParam(paramName); + if (!caps) { + LOGERR("No caps found for param: %s", paramName.c_str()); + return false; + } + for (size_t i = 0; i < caps->num_contexts; ++i) { + const tvConfigContext_t& ctx = caps->contexts[i]; + std::string capPicMode = convertPictureIndexToStringV2(ctx.pq_mode); + std::string capSource = convertSourceIndexToStringV2(ctx.videoSrcType); + std::string capFormat = convertVideoFormatToStringV2(ctx.videoFormatType); + if ((capPicMode == curPicMode) && + (capSource == curSource) && + (capFormat == curFormat)) + { + // Log the matched combination + LOGINFO("isSetRequiredForParam: matched combination - pictureMode: %s, videoFormat: %s, videoSource: %s", + pm.c_str(), fmt.c_str(), src.c_str()); + return true; + } + } + } + } + } + } + + return false; + } + std::string AVOutputTV::getCMSNameFromEnum(tvDataComponentColor_t colorEnum) + { + switch (colorEnum) { + case tvDataColor_RED: return "Red"; + case tvDataColor_GREEN: return "Green"; + case tvDataColor_BLUE: return "Blue"; + case tvDataColor_CYAN: return "Cyan"; + case tvDataColor_YELLOW: return "Yellow"; + case tvDataColor_MAGENTA: return "Magenta"; + default: return "Unknown"; + } + } + std::vector AVOutputTV::getValidContextsFromParameters(const JsonObject& parameters, const std::string& tr181ParamName) + { + std::vector validContexts; + tvContextCaps_t* caps = getCapsForParam(tr181ParamName); + + if (caps == nullptr || caps->contexts == nullptr) { + LOGWARN("Caps or contexts is null for parameter: %s", tr181ParamName.c_str()); + return validContexts; + } + + // Create a hash set of available contexts for O(1) lookup instead of O(n) linear search + std::unordered_set availableContextsSet; + for (size_t i = 0; i < caps->num_contexts; ++i) { + const auto& ctx = caps->contexts[i]; + std::string key = std::to_string(ctx.pq_mode) + "_" + + std::to_string(ctx.videoFormatType) + "_" + + std::to_string(ctx.videoSrcType); + availableContextsSet.insert(key); + } + + JsonArray pqmodeArray = getJsonArrayIfArray(parameters, "pictureMode"); + JsonArray sourceArray = getJsonArrayIfArray(parameters, "videoSource"); + JsonArray formatArray = getJsonArrayIfArray(parameters, "videoFormat"); + + std::vector pqModes = extractPQModes(parameters); + std::vector sources = extractVideoSources(parameters); + std::vector formats = extractVideoFormats(parameters); + + // Handle global parameters - collect unique values to avoid duplicates + std::unordered_set pqModeSet(pqModes.begin(), pqModes.end()); + std::unordered_set sourceSet(sources.begin(), sources.end()); + std::unordered_set formatSet(formats.begin(), formats.end()); + + if (isGlobalParam(pqmodeArray)) { + for (size_t i = 0; i < caps->num_contexts; ++i) { + pqModeSet.insert(caps->contexts[i].pq_mode); + } + } + if (isGlobalParam(sourceArray)) { + for (size_t i = 0; i < caps->num_contexts; ++i) { + sourceSet.insert(caps->contexts[i].videoSrcType); + } + } + if (isGlobalParam(formatArray)) { + for (size_t i = 0; i < caps->num_contexts; ++i) { + formatSet.insert(caps->contexts[i].videoFormatType); + } + } + + if (pqModeSet.empty() || sourceSet.empty() || formatSet.empty()) { + LOGWARN("One or more parameter sets are empty: PQModes[%zu], Sources[%zu], Formats[%zu]", + pqModeSet.size(), sourceSet.size(), formatSet.size()); + return validContexts; + } + + std::unordered_set seenContexts; + validContexts.reserve(pqModeSet.size() * sourceSet.size() * formatSet.size()); // Pre-allocate memory + + // Generate contexts and check validity in single pass + for (const auto& pq : pqModeSet) { + for (const auto& fmt : formatSet) { + for (const auto& src : sourceSet) { + std::string contextKey = std::to_string(pq) + "_" + + std::to_string(fmt) + "_" + + std::to_string(src); + + if (seenContexts.find(contextKey) != seenContexts.end()) { + continue; + } + + if (availableContextsSet.find(contextKey) != availableContextsSet.end()) { + tvConfigContext_t testCtx = { pq, fmt, src }; + validContexts.push_back(testCtx); + seenContexts.insert(contextKey); + } + } + } + } + + // Sort only if we have results to sort + if (!validContexts.empty()) { + std::sort(validContexts.begin(), validContexts.end(), + [](const tvConfigContext_t& a, const tvConfigContext_t& b) { + return std::tie(a.pq_mode, a.videoFormatType, a.videoSrcType) < + std::tie(b.pq_mode, b.videoFormatType, b.videoSrcType); + }); + } + + return validContexts; + } + + int AVOutputTV::updateAVoutputTVParamV2(std::string action, std::string tr181ParamName, + const JsonObject& parameters, + tvPQParameterIndex_t pqParamIndex,int level) + { +#if DEBUG + LOGINFO("Entry %s: Action: %s, Param: %s, Level: %d", __FUNCTION__, action.c_str(), tr181ParamName.c_str(), level); +#endif + int ret = 0; + const bool isSet = (action == "set"); + const bool isReset = (action == "reset"); + const bool isSync = (action == "sync"); + + std::vector validContexts = getValidContextsFromParameters(parameters, tr181ParamName); + LOGINFO("%s: Number of validContexts = %zu", __FUNCTION__, validContexts.size()); +#if DEBUG + for (const auto& ctx : validContexts) { + + std::string pqStr = pqModeMap.count(ctx.pq_mode) ? pqModeMap.at(ctx.pq_mode) : std::to_string(ctx.pq_mode); + std::string fmtStr = videoFormatMap.count(ctx.videoFormatType) ? videoFormatMap.at(ctx.videoFormatType) : std::to_string(ctx.videoFormatType); + std::string srcStr = videoSrcMap.count(ctx.videoSrcType) ? videoSrcMap.at(ctx.videoSrcType) : std::to_string(ctx.videoSrcType); + LOGINFO("Valid Context - PQMode: %s, Format: %s, Source: %s", pqStr.c_str(), fmtStr.c_str(), srcStr.c_str()); + } +#endif + if (validContexts.empty()) { + LOGWARN("%s: No valid contexts found for parameters", __FUNCTION__); + return (int)tvERROR_GENERAL; + } + if (tr181ParamName == "CMS") { + JsonArray colorArray = getJsonArrayIfArray(parameters, "color"); + JsonArray componentArray = getJsonArrayIfArray(parameters, "component"); + + std::vector colors, components; + + for (size_t i = 0; i < colorArray.Length(); ++i) + colors.emplace_back(colorArray[i].String()); + + for (size_t i = 0; i < componentArray.Length(); ++i) + components.emplace_back(componentArray[i].String()); + + if (colors.empty()) colors.push_back("Global"); + if (components.empty()) components.push_back("Global"); + + if (colors.size() == 1 && colors[0] == "Global") + colors = m_cmsColorList; + + if (components.size() == 1 && components[0] == "Global") + components = m_cmsComponentList; + + for (const auto& ctx : validContexts) { + for (const auto& colorStr : colors) { + for (const auto& componentStr : components) { +#if DEBUG + LOGINFO("%s: Processing Color: %s, Component: %s", __FUNCTION__, colorStr.c_str(), componentStr.c_str()); +#endif + tvPQParameterIndex_t pqIndex; + if (convertCMSParamToPQEnum(componentStr, colorStr, pqIndex) != 0) { + LOGERR("%s: convertCMSParamToPQEnum failed for color: %s, component: %s", + __FUNCTION__, colorStr.c_str(), componentStr.c_str()); + ret |= 1; + continue; + } + tvDataComponentColor_t colorValue = tvDataColor_NONE; + if ( getCMSColorEnumFromString(colorStr, colorValue ) == -1 ) { + LOGERR("%s : getCMSColorEnumFromString failed for color: %s", __FUNCTION__, colorStr.c_str()); + ret |= 2; + continue; + } + tvComponentType_t componentValue; + if ( getCMSComponentEnumFromString(componentStr, componentValue ) == -1 ) { + LOGERR("%s : getCMSComponentEnumFromString failed for component: %s", __FUNCTION__, componentStr.c_str()); + ret |= 4; + continue; + } + if (std::find(m_cmsColorList.begin(), m_cmsColorList.end(), colorStr) == m_cmsColorList.end()) { + LOGERR("%s: Color '%s' is not supported as per capabilities", __FUNCTION__, colorStr.c_str()); + ret |= 8; + continue; + } + if (std::find(m_cmsComponentList.begin(), m_cmsComponentList.end(), componentStr) == m_cmsComponentList.end()) { + LOGERR("%s: Component '%s' is not supported as per capabilities", __FUNCTION__, componentStr.c_str()); + ret |= 16; + continue; + } + paramIndex_t paramIndex; + paramIndex.sourceIndex = static_cast(ctx.videoSrcType); + paramIndex.pqmodeIndex = static_cast(ctx.pq_mode); + paramIndex.formatIndex = static_cast(ctx.videoFormatType); + paramIndex.componentIndex = static_cast(componentValue); + paramIndex.colorIndex = static_cast(colorValue); + paramIndex.colorTempIndex = 0; + paramIndex.controlIndex = 0; + + int value = 0; + if (isReset) { + ret |= updateAVoutputTVParamToHAL(tr181ParamName, paramIndex, 0, false); + level = 0; + } + + if (isSync || isReset) { + if (getLocalparam(tr181ParamName, paramIndex, value, pqIndex, isSync) == 0) { + level = value; + } else { + LOGWARN("%s: Skipping sync for color: %s, component: %s", + __FUNCTION__, colorStr.c_str(), componentStr.c_str()); + continue; + } + } + ret |= SaveCMS(static_cast(paramIndex.sourceIndex), + paramIndex.pqmodeIndex, + static_cast(paramIndex.formatIndex), + static_cast(paramIndex.componentIndex), + static_cast(paramIndex.colorIndex), + level); + + if (isSet) { + ret |= updateAVoutputTVParamToHAL(tr181ParamName, paramIndex, level, true); + } + } + } + } + LOGINFO("Exit: %s, Return Value: %d", __FUNCTION__, ret); + return (ret < 0) ? -1 : 0; + } + for (const auto& ctx : validContexts) + { + paramIndex_t paramIndex { + .sourceIndex = static_cast(ctx.videoSrcType), + .pqmodeIndex = static_cast(ctx.pq_mode), + .formatIndex = static_cast(ctx.videoFormatType) + }; + std::string pqStr = pqModeMap.count(ctx.pq_mode) ? pqModeMap.at(ctx.pq_mode) : std::to_string(ctx.pq_mode); + std::string fmtStr = videoFormatMap.count(ctx.videoFormatType) ? videoFormatMap.at(ctx.videoFormatType) : std::to_string(ctx.videoFormatType); + std::string srcStr = videoSrcMap.count(ctx.videoSrcType) ? videoSrcMap.at(ctx.videoSrcType) : std::to_string(ctx.videoSrcType); + + if (isSet) + { + ret |= updateAVoutputTVParamToHALV2(tr181ParamName, paramIndex, level, true); + } + else + { + if (isReset) + { + ret |= updateAVoutputTVParamToHALV2(tr181ParamName, paramIndex, level, false); + } + if(getLocalparam(tr181ParamName,paramIndex,level,pqParamIndex,isSync)) + { + continue; + } + } + switch (pqParamIndex) + { + case PQ_PARAM_BRIGHTNESS: + ret |= SaveBrightness((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); + break; + case PQ_PARAM_CONTRAST: + ret |= SaveContrast((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); + break; + case PQ_PARAM_SHARPNESS: + ret |= SaveSharpness((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); + break; + case PQ_PARAM_HUE: + ret |= SaveHue((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); + break; + case PQ_PARAM_SATURATION: + ret |= SaveSaturation((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); + break; + case PQ_PARAM_COLOR_TEMPERATURE: + ret |= SaveColorTemperature((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvColorTemp_t)level); + break; + case PQ_PARAM_BACKLIGHT: + ret |= SaveBacklight((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); + break; + case PQ_PARAM_DIMMINGMODE: + ret |= SaveTVDimmingMode((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvDimmingMode_t)level); + break; + case PQ_PARAM_LOWLATENCY_STATE: + ret |= SaveLowLatencyState((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); + break; + case PQ_PARAM_DOLBY_MODE: + ret |= SaveTVDolbyVisionMode((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvDolbyMode_t)level); + break; + case PQ_PARAM_ASPECT_RATIO: + ret |= SaveAspectRatio((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvDisplayMode_t)level); + break; + case PQ_PARAM_PRECISION_DETAIL: + #if HAL_NOT_READY + #else + ret |= SetPrecisionDetail((tvVideoSrcType_t)paramIndex.sourceIndex, + (tvPQModeIndex_t)paramIndex.pqmodeIndex, + (tvVideoFormatType_t)paramIndex.formatIndex, + level); + #endif + break; + + case PQ_PARAM_LOCAL_CONTRAST_ENHANCEMENT: + #if HAL_NOT_READY + #else + #if ENABLE_PQ_PARAM + ret |= SetLocalContrastEnhancement((tvVideoSrcType_t)paramIndex.sourceIndex, + (tvPQModeIndex_t)paramIndex.pqmodeIndex, + (tvVideoFormatType_t)paramIndex.formatIndex, + level); + #endif + #endif + break; + + case PQ_PARAM_MPEG_NOISE_REDUCTION: + #if HAL_NOT_READY + #else + #if ENABLE_PQ_PARAM + ret |= SetMPEGNoiseReduction((tvVideoSrcType_t)paramIndex.sourceIndex, + (tvPQModeIndex_t)paramIndex.pqmodeIndex, + (tvVideoFormatType_t)paramIndex.formatIndex, + level); + #endif + #endif + break; + + case PQ_PARAM_DIGITAL_NOISE_REDUCTION: + #if HAL_NOT_READY + #else + #if ENABLE_PQ_PARAM + ret |= SetDigitalNoiseReduction((tvVideoSrcType_t)paramIndex.sourceIndex, + (tvPQModeIndex_t)paramIndex.pqmodeIndex, + (tvVideoFormatType_t)paramIndex.formatIndex, + level); + #endif + #endif + break; + + case PQ_PARAM_AI_SUPER_RESOLUTION: + #if HAL_NOT_READY + #else + ret |= SetAISuperResolution((tvVideoSrcType_t)paramIndex.sourceIndex, + (tvPQModeIndex_t)paramIndex.pqmodeIndex, + (tvVideoFormatType_t)paramIndex.formatIndex, + level); + #endif + break; + + case PQ_PARAM_MEMC: + #if HAL_NOT_READY + #else + ret |= SetMEMC((tvVideoSrcType_t)paramIndex.sourceIndex, + (tvPQModeIndex_t)paramIndex.pqmodeIndex, + (tvVideoFormatType_t)paramIndex.formatIndex, + level); + #endif + break; + #if HAL_NOT_READY + #else + case PQ_PARAM_BACKLIGHT_MODE: + ret |= SaveBacklightMode((tvVideoSrcType_t)paramIndex.sourceIndex, + (tvPQModeIndex_t)paramIndex.pqmodeIndex, + (tvVideoFormatType_t)paramIndex.formatIndex, + static_cast(level)); + #endif + break; + case PQ_PARAM_HDR10_MODE: + case PQ_PARAM_HLG_MODE: + case PQ_PARAM_LDIM: + case PQ_PARAM_LOCALDIMMING_LEVEL: + + case PQ_PARAM_WB_GAIN_RED: + case PQ_PARAM_WB_GAIN_GREEN: + case PQ_PARAM_WB_GAIN_BLUE: + case PQ_PARAM_WB_OFFSET_RED: + case PQ_PARAM_WB_OFFSET_GREEN: + case PQ_PARAM_WB_OFFSET_BLUE: + // TODO: Add implementation + break; + + default: + // Prevent compiler warning for unhandled enums + LOGWARN("Unhandled PQ parameter index: %d", pqParamIndex); + break; + } + } + LOGINFO("Exit: %s, Return Value: %d", __FUNCTION__, ret); + return ret; + } + +tvError_t AVOutputTV::ReadJsonFile(JsonObject& root) { + std::ifstream file(CAPABLITY_FILE_NAMEV2); + if (!file.is_open()) { + LOGWARN("AVOutputPlugins: %s: Unable to open file", __FUNCTION__); + return tvERROR_GENERAL; + } + + std::string jsonStr((std::istreambuf_iterator(file)), std::istreambuf_iterator()); + file.close(); + + if (!root.FromString(jsonStr)) { + LOGWARN("AVOutputPlugins: %s: JSON parsing failed", __FUNCTION__); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + +tvError_t AVOutputTV::ExtractRangeInfo(const JsonObject& data, int* max_value) { + if (!data.HasLabel("rangeInfo")) { + LOGWARN("AVOutputPlugins: %s: 'rangeInfo' not available", __FUNCTION__); + return tvERROR_NONE; + } + + JsonObject rangeInfo = data["rangeInfo"].Object(); + if (rangeInfo.HasLabel("to")) { + if (!max_value) { + LOGWARN("AVOutputPlugins: %s: NULL input param max_value", __FUNCTION__); + return tvERROR_INVALID_PARAM; + } + *max_value = rangeInfo["to"].Number(); + return tvERROR_NONE; + } + + LOGWARN("AVOutputPlugins: %s: Invalid 'rangeInfo' format", __FUNCTION__); + return tvERROR_GENERAL; +} + +tvError_t AVOutputTV::ExtractContextCaps(const JsonObject& data, tvContextCaps_t** context_caps) { + if (!context_caps) { + LOGWARN("AVOutputPlugins: %s: NULL input param", __FUNCTION__); + return tvERROR_INVALID_PARAM; + } + + if (!data.HasLabel("context")) { + LOGWARN("AVOutputPlugins: %s: 'context' missing", __FUNCTION__); + return tvERROR_GENERAL; + } + + JsonObject context = data["context"].Object(); + if (!context.IsSet()) { + LOGWARN("AVOutputPlugins: %s: Context is not set", __FUNCTION__); + return tvERROR_GENERAL; + } + + std::vector contexts = ParseContextCaps(context); + *context_caps = AllocateContextCaps(contexts); + if (!*context_caps) { + LOGWARN("AVOutputPlugins: %s: Memory allocation failed", __FUNCTION__); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} +template +bool LookupEnum(const std::string& str, const std::map& map, EnumType& outEnum) { + for (const auto& entry : map) { + if (entry.second == str) { + outEnum = static_cast(entry.first); + return true; + } + } + return false; +} + +std::vector AVOutputTV::ParseContextCaps(const JsonObject& context) +{ + std::vector contexts; + std::set> seen; + + WPEFramework::Core::JSON::VariantContainer::Iterator modeIterator = context.Variants(); + while (modeIterator.Next()) { + std::string modeStr = modeIterator.Label(); + + tvPQModeIndex_t modeEnum; + if (!LookupEnum(modeStr, pqModeMap, modeEnum)) continue; + + const auto& modeValue = context[modeStr.c_str()]; + if (!modeValue.IsSet() || modeValue.Content() != WPEFramework::Core::JSON::Variant::type::OBJECT) continue; + + JsonObject formatMap = modeValue.Object(); + WPEFramework::Core::JSON::VariantContainer::Iterator formatIterator = formatMap.Variants(); + while (formatIterator.Next()) { + std::string formatStr = formatIterator.Label(); + + tvVideoFormatType_t fmtEnum; + if (!LookupEnum(formatStr, videoFormatMap, fmtEnum)) continue; + + const auto& formatValue = formatMap[formatStr.c_str()]; + if (!formatValue.IsSet() || formatValue.Content() != WPEFramework::Core::JSON::Variant::type::ARRAY) continue; + + JsonArray sources = formatValue.Array(); + for (uint32_t i = 0; i < sources.Length(); ++i) { + std::string srcStr = sources[i].String(); + + tvVideoSrcType_t srcEnum; + if (!LookupEnum(srcStr, videoSrcMap, srcEnum)) continue; + + auto triplet = std::make_tuple(modeEnum, fmtEnum, srcEnum); + if (seen.find(triplet) == seen.end()) { + contexts.push_back({modeEnum, fmtEnum, srcEnum}); + seen.insert(triplet); + } + } + } + } + + return contexts; +} + +tvContextCaps_t* AVOutputTV::AllocateContextCaps(const std::vector& contexts) { + tvContextCaps_t* context_caps = new (std::nothrow) tvContextCaps_t; + if (!context_caps) { + return nullptr; + } + + context_caps->num_contexts = contexts.size(); + context_caps->contexts = contexts.empty() ? nullptr : new (std::nothrow) tvConfigContext_t[contexts.size()]; + + if (!contexts.empty() && !context_caps->contexts) { + delete context_caps; + return nullptr; + } + + if (!contexts.empty()) { + std::copy(contexts.begin(), contexts.end(), context_caps->contexts); + } + return context_caps; +} + +tvError_t AVOutputTV::GetCaps(const std::string& key, int* max_value, tvContextCaps_t** context_caps) { + LOGINFO("Entry\n"); + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + if (!root.HasLabel(key.c_str())) { + LOGWARN("AVOutputPlugins: %s: Missing '%s' label", __FUNCTION__, key.c_str()); + return tvERROR_GENERAL; + } + + JsonObject data = root[key.c_str()].Object(); + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) { + LOGWARN("AVOutputPlugins: %s: Platform support is false", __FUNCTION__); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + if (ExtractRangeInfo(data, max_value) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + +tvError_t AVOutputTV::GetDVCalibrationCaps(tvDVCalibrationSettings_t **min_values, tvDVCalibrationSettings_t **max_values, tvContextCaps_t **context_caps) { + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + if (!root.HasLabel("DolbyVisionCalibration")) { + LOGWARN("AVOutputPlugins: %s: Missing 'DolbyVisionCalibration' label", __FUNCTION__); + return tvERROR_GENERAL; + } + + JsonObject data = root["DolbyVisionCalibration"].Object(); + *min_values = new tvDVCalibrationSettings_t(); + *max_values = new tvDVCalibrationSettings_t(); + + std::map keyMap = { + {"Tmax", &tvDVCalibrationSettings_t::Tmax}, + {"Tmin", &tvDVCalibrationSettings_t::Tmin}, + {"Tgamma", &tvDVCalibrationSettings_t::Tgamma}, + {"Rx", &tvDVCalibrationSettings_t::Rx}, + {"Ry", &tvDVCalibrationSettings_t::Ry}, + {"Gx", &tvDVCalibrationSettings_t::Gx}, + {"Gy", &tvDVCalibrationSettings_t::Gy}, + {"Bx", &tvDVCalibrationSettings_t::Bx}, + {"By", &tvDVCalibrationSettings_t::By}, + {"Wx", &tvDVCalibrationSettings_t::Wx}, + {"Wy", &tvDVCalibrationSettings_t::Wy} + }; + + for (auto it = keyMap.begin(); it != keyMap.end(); ++it) { + const std::string& key = it->first; + double tvDVCalibrationSettings_t::*member = it->second; + std::string minKey = "range" + key; + if (data.HasLabel(minKey.c_str())) { + JsonObject range = data[minKey.c_str()].Object(); + (*min_values)->*member = range["from"].Number(); + (*max_values)->*member = range["to"].Number(); + } + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + return tvERROR_NONE; +} + +tvError_t AVOutputTV::GetBacklightModeCaps(tvBacklightMode_t** backlight_mode, size_t* num_backlight_mode, tvContextCaps_t** context_caps) +{ + LOGINFO("Entry\n"); + + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + std::string key = "BacklightMode"; + if (!root.HasLabel(key.c_str())) { + LOGWARN("AVOutputPlugins: %s: Missing '%s' label", __FUNCTION__, key.c_str()); + return tvERROR_GENERAL; + } + + JsonObject data = root[key.c_str()].Object(); + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) { + LOGWARN("AVOutputPlugins: %s: Platform support is false", __FUNCTION__); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject rangeInfo = data["rangeInfo"].Object(); + JsonArray optionsArray = rangeInfo["options"].Array(); + + *num_backlight_mode = optionsArray.Length(); + *backlight_mode = static_cast(malloc(*num_backlight_mode * sizeof(tvBacklightMode_t))); + if (!(*backlight_mode)) { + return tvERROR_GENERAL; + } + + for (size_t i = 0; i < *num_backlight_mode; ++i) { + std::string modeStr = optionsArray[i].String(); + auto it = backlightModeReverseMap.find(modeStr); + if (it != backlightModeReverseMap.end()) { + (*backlight_mode)[i] = static_cast (it->second); + } else { + (*backlight_mode)[i] = tvBacklightMode_INVALID; + } + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + free(*backlight_mode); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + +// LocalContrastEnhancement +tvError_t AVOutputTV::GetLocalContrastEnhancementCaps(int * maxLocalContrastEnhancement, tvContextCaps_t ** context_caps) { + return GetCaps("LocalContrastEnhancement", maxLocalContrastEnhancement, context_caps); +} + +// MPEGNoiseReduction +tvError_t AVOutputTV::GetMPEGNoiseReductionCaps(int * maxMPEGNoiseReduction, tvContextCaps_t ** context_caps) { + return GetCaps("MPEGNoiseReduction", maxMPEGNoiseReduction, context_caps); +} + +// DigitalNoiseReduction +tvError_t AVOutputTV::GetDigitalNoiseReductionCaps(int * maxDigitalNoiseReduction, tvContextCaps_t ** context_caps) { + return GetCaps("DigitalNoiseReduction", maxDigitalNoiseReduction, context_caps); +} + +tvError_t AVOutputTV::GetMultiPointWBCaps(int* num_hal_matrix_points, + int* rgb_min, + int* rgb_max, + int* num_ui_matrix_points, + double** ui_matrix_positions, + tvContextCaps_t** context_caps) +{ + if (!num_hal_matrix_points || !rgb_min || !rgb_max || + !num_ui_matrix_points || !ui_matrix_positions || !context_caps) + return tvERROR_INVALID_PARAM; + + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) + return tvERROR_GENERAL; + + const std::string key = "MultiPointWB"; + if (!root.HasLabel(key.c_str())) + return tvERROR_OPERATION_NOT_SUPPORTED; + + JsonObject data = root[key.c_str()].Object(); + + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) + return tvERROR_OPERATION_NOT_SUPPORTED; + + // Extract matrix points + if (!data.HasLabel("points")) + return tvERROR_INVALID_PARAM; + *num_hal_matrix_points = data["points"].Number(); + + // Extract range info + if (!data.HasLabel("rangeInfo")) + return tvERROR_INVALID_PARAM; + + JsonObject range = data["rangeInfo"].Object(); + if (!range.HasLabel("from") || !range.HasLabel("to")) + return tvERROR_INVALID_PARAM; + + *rgb_min = range["from"].Number(); + *rgb_max = range["to"].Number(); + + // Allocate UI matrix points (same count for now) + *num_ui_matrix_points = *num_hal_matrix_points; + *ui_matrix_positions = new double[*num_ui_matrix_points]; + if (!(*ui_matrix_positions)) + return tvERROR_GENERAL; + + for (int i = 0; i < *num_ui_matrix_points; ++i) + (*ui_matrix_positions)[i] = static_cast(i) / (*num_ui_matrix_points - 1); + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) + return tvERROR_GENERAL; + + return tvERROR_NONE; +} + +tvError_t AVOutputTV::GetCMSCaps(int* max_hue, + int* max_saturation, + int* max_luma, + tvDataComponentColor_t** color, + tvComponentType_t** component, + size_t* num_color, + size_t* num_component, + tvContextCaps_t** context_caps) +{ + if (!max_hue || !max_saturation || !max_luma || !color || !component || !num_color || !num_component || !context_caps) { + return tvERROR_INVALID_PARAM; + } + + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + const char* key = "CMS"; + if (!root.HasLabel(key)) { + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject cms = root[key].Object(); + + if (!cms.HasLabel("platformSupport") || !cms["platformSupport"].Boolean()) { + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + // Extract ranges + *max_hue = cms.HasLabel("rangeHue") ? cms["rangeHue"].Object()["to"].Number() : 0; + *max_saturation = cms.HasLabel("rangeSaturation") ? cms["rangeSaturation"].Object()["to"].Number() : 0; + *max_luma = cms.HasLabel("rangeLuma") ? cms["rangeLuma"].Object()["to"].Number() : 0; + + // Extract colors + const JsonArray& colorArray = cms["color"].Array(); + *num_color = colorArray.Length(); + *color = new tvDataComponentColor_t[*num_color]; + for (size_t i = 0; i < *num_color; ++i) { + std::string colorStr = colorArray[i].String(); + if (getCMSColorEnumFromString(colorStr, (*color)[i]) != 0) { + delete[] *color; + *color = nullptr; + return tvERROR_INVALID_PARAM; + } + } + + // Extract components + const JsonArray& compArray = cms["component"].Array(); + *num_component = compArray.Length(); + *component = new tvComponentType_t[*num_component]; + for (size_t i = 0; i < *num_component; ++i) { + std::string compStr = compArray[i].String(); + if (getCMSComponentEnumFromString(compStr, (*component)[i]) != 0) { + delete[] *color; + delete[] *component; + *color = nullptr; + *component = nullptr; + return tvERROR_INVALID_PARAM; + } + } + + // Extract context capabilities + if (ExtractContextCaps(cms, context_caps) != tvERROR_NONE) { + delete[] *color; + delete[] *component; + *color = nullptr; + *component = nullptr; + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + +tvError_t AVOutputTV::GetCustom2PointWhiteBalanceCaps(int* min_gain, int* min_offset, + int* max_gain, int* max_offset, + tvWBColor_t** color, + tvWBControl_t** control, + size_t* num_color, size_t* num_control, + tvContextCaps_t** context_caps) +{ + if (!min_gain || !min_offset || !max_gain || !max_offset || + !color || !control || !num_color || !num_control || !context_caps) + { + LOGERR("Invalid input pointers"); + return tvERROR_INVALID_PARAM; + } + + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + LOGERR("Failed to read JSON capabilities"); + return tvERROR_GENERAL; + } + + const char* key = "Custom2PointWhiteBalance"; + if (!root.HasLabel(key)) { + LOGERR("Missing key: %s", key); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject section = root[key].Object(); + + if (!section.HasLabel("platformSupport") || !section["platformSupport"].Boolean()) { + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + // Parse rangeGain and rangeOffset + *min_gain = section["rangeGain"].Object()["from"].Number(); + *max_gain = section["rangeGain"].Object()["to"].Number(); + *min_offset = section["rangeOffset"].Object()["from"].Number(); + *max_offset = section["rangeOffset"].Object()["to"].Number(); + + // Parse control array + JsonArray controlArray = section["control"].Array(); + *num_control = controlArray.Length(); + *control = new tvWBControl_t[*num_control]; + for (size_t i = 0; i < *num_control; ++i) { + std::string ctrlStr = controlArray[i].String(); + if (getWBControlEnumFromString(ctrlStr, (*control)[i]) != 0) { + LOGERR("Invalid control: %s", ctrlStr.c_str()); + delete[] *control; + *control = nullptr; + return tvERROR_INVALID_PARAM; + } + } + + // Parse color array + JsonArray colorArray = section["color"].Array(); + *num_color = colorArray.Length(); + *color = new tvWBColor_t[*num_color]; + for (size_t i = 0; i < *num_color; ++i) { + std::string colStr = colorArray[i].String(); + if (getWBColorEnumFromString(colStr, (*color)[i]) != 0) { + LOGERR("Invalid color: %s", colStr.c_str()); + delete[] *color; + delete[] *control; + *color = nullptr; + *control = nullptr; + return tvERROR_INVALID_PARAM; + } + } + + // Parse contextCaps + if (ExtractContextCaps(section, context_caps) != tvERROR_NONE) { + delete[] *color; + delete[] *control; + *color = nullptr; + *control = nullptr; + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + +#if HAL_NOT_READY +tvError_t AVOutputTV::GetBacklightCaps(int* max_backlight, tvContextCaps_t** context_caps) { + return GetCaps("Backlight", max_backlight, context_caps); +} + +tvError_t AVOutputTV::GetBrightnessCaps(int* max_brightness, tvContextCaps_t** context_caps) { + return GetCaps("Brightness", max_brightness, context_caps); +} + +tvError_t AVOutputTV::GetContrastCaps(int* max_contrast, tvContextCaps_t** context_caps) { + return GetCaps("Contrast", max_contrast, context_caps); +} + +tvError_t AVOutputTV::GetSharpnessCaps(int* max_sharpness, tvContextCaps_t** context_caps) { + return GetCaps("Sharpness", max_sharpness, context_caps); +} + +tvError_t AVOutputTV::GetSaturationCaps(int* max_saturation, tvContextCaps_t** context_caps) { + return GetCaps("Saturation", max_saturation, context_caps); +} + +tvError_t AVOutputTV::GetHueCaps(int* max_hue, tvContextCaps_t** context_caps) { + return GetCaps("Hue", max_hue, context_caps); +} + +tvError_t AVOutputTV::GetLowLatencyStateCaps(int* max_latency, tvContextCaps_t ** context_caps){ + return GetCaps("LowLatencyState", max_latency, context_caps); +} + +// PrecisionDetail +tvError_t AVOutputTV::GetPrecisionDetailCaps(int * maxPrecision, tvContextCaps_t ** context_caps) { + return GetCaps("PrecisionDetail", maxPrecision, context_caps); +} + +// AISuperResolution +tvError_t AVOutputTV::GetAISuperResolutionCaps(int * maxAISuperResolution, tvContextCaps_t ** context_caps) { + return GetCaps("AISuperResolution", maxAISuperResolution, context_caps); +} + +// MEMC +tvError_t AVOutputTV::GetMEMCCaps(int * maxMEMC, tvContextCaps_t ** context_caps) { + return GetCaps("MEMC", maxMEMC, context_caps); +} + +tvError_t AVOutputTV::GetColorTemperatureCaps(tvColorTemp_t** color_temp, size_t* num_color_temp, tvContextCaps_t** context_caps) { + LOGINFO("Entry\n"); + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + std::string key = "ColorTemperature"; + if (!root.HasLabel(key.c_str())) { + LOGWARN("AVOutputPlugins: %s: Missing '%s' label", __FUNCTION__, key.c_str()); + return tvERROR_GENERAL; + } + + JsonObject data = root[key.c_str()].Object(); + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) { + LOGWARN("AVOutputPlugins: %s: Platform support is false", __FUNCTION__); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject rangeInfo = data["rangeInfo"].Object(); + JsonArray optionsArray = rangeInfo["options"].Array(); + + *num_color_temp = optionsArray.Length(); + *color_temp = static_cast(malloc(*num_color_temp * sizeof(tvColorTemp_t))); + if (!(*color_temp)) { + return tvERROR_GENERAL; + } + + for (size_t i = 0; i < *num_color_temp; ++i) { + std::string tempStr = optionsArray[i].String(); + if (tempStr == "Standard") (*color_temp)[i] = tvColorTemp_STANDARD; + else if (tempStr == "Warm") (*color_temp)[i] = tvColorTemp_WARM; + else if (tempStr == "Cold") (*color_temp)[i] = tvColorTemp_COLD; + else if (tempStr == "UserDefined") (*color_temp)[i] = tvColorTemp_USER; + else if (tempStr == "Supercold") (*color_temp)[i] = tvColorTemp_SUPERCOLD; + else if (tempStr == "BoostStandard") (*color_temp)[i] = tvColorTemp_BOOST_STANDARD; + else if (tempStr == "BoostWarm") (*color_temp)[i] = tvColorTemp_BOOST_WARM; + else if (tempStr == "BoostCold") (*color_temp)[i] = tvColorTemp_BOOST_COLD; + else if (tempStr == "BoostUserDefined") (*color_temp)[i] = tvColorTemp_BOOST_USER; + else if (tempStr == "BoostSupercold") (*color_temp)[i] = tvColorTemp_BOOST_SUPERCOLD; + else (*color_temp)[i] = tvColorTemp_STANDARD; + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + free(*color_temp); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + + +tvError_t AVOutputTV::GetSdrGammaCaps(tvSdrGamma_t** sdr_gamma, size_t* num_sdr_gamma, tvContextCaps_t** context_caps) { + LOGINFO("Entry\n"); + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + std::string key = "SDRGamma"; + if (!root.HasLabel(key.c_str())) { + LOGWARN("AVOutputPlugins: %s: Missing '%s' label", __FUNCTION__, key.c_str()); + return tvERROR_GENERAL; + } + + JsonObject data = root[key.c_str()].Object(); + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) { + LOGWARN("AVOutputPlugins: %s: Platform support is false", __FUNCTION__); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject rangeInfo = data["rangeInfo"].Object(); + JsonArray optionsArray = rangeInfo["options"].Array(); + + *num_sdr_gamma = optionsArray.Length(); + *sdr_gamma = static_cast(malloc(*num_sdr_gamma * sizeof(tvSdrGamma_t))); + if (!(*sdr_gamma)) { + return tvERROR_GENERAL; + } + + for (size_t i = 0; i < *num_sdr_gamma; ++i) { + std::string gammaStr = optionsArray[i].String(); + if (gammaStr == "1.8") (*sdr_gamma)[i] = tvSdrGamma_1_8; + else if (gammaStr == "1.9") (*sdr_gamma)[i] = tvSdrGamma_1_9; + else if (gammaStr == "2.0") (*sdr_gamma)[i] = tvSdrGamma_2_0; + else if (gammaStr == "2.1") (*sdr_gamma)[i] = tvSdrGamma_2_1; + else if (gammaStr == "2.2") (*sdr_gamma)[i] = tvSdrGamma_2_2; + else if (gammaStr == "2.3") (*sdr_gamma)[i] = tvSdrGamma_2_3; + else if (gammaStr == "2.4") (*sdr_gamma)[i] = tvSdrGamma_2_4; + else if (gammaStr == "BT.1886") (*sdr_gamma)[i] = tvSdrGamma_BT_1886; + else (*sdr_gamma)[i] = tvSdrGamma_INVALID; + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + free(*sdr_gamma); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} +tvError_t AVOutputTV::GetTVDimmingModeCaps(tvDimmingMode_t** dimming_mode, size_t* num_dimming_mode, tvContextCaps_t** context_caps){ + LOGINFO("Entry\n"); + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + std::string key = "DimmingMode"; + if (!root.HasLabel(key.c_str())) { + LOGWARN("AVOutputPlugins: %s: Missing '%s' label", __FUNCTION__, key.c_str()); + return tvERROR_GENERAL; + } + JsonObject data = root[key.c_str()].Object(); + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) { + LOGWARN("AVOutputPlugins: %s: Platform support is false", __FUNCTION__); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject rangeInfo = data["rangeInfo"].Object(); + JsonArray optionsArray = rangeInfo["options"].Array(); + *num_dimming_mode = optionsArray.Length(); + *dimming_mode = static_cast(malloc(*num_dimming_mode * sizeof(tvDimmingMode_t))); + if (!(*dimming_mode)) { + return tvERROR_GENERAL; + } + + for (size_t i = 0; i < *num_dimming_mode; ++i) { + std::string modeStr = optionsArray[i].String(); + if (modeStr == "Fixed") (*dimming_mode)[i] = tvDimmingMode_Fixed; + else if (modeStr == "Local") (*dimming_mode)[i] = tvDimmingMode_Local; + else if (modeStr == "Global") (*dimming_mode)[i] = tvDimmingMode_Global; + else (*dimming_mode)[i] = tvDimmingMode_MAX; + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + free(*dimming_mode); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; + +} + +tvError_t AVOutputTV::GetAspectRatioCaps(tvDisplayMode_t** aspect_ratio, size_t* num_aspect_ratio, tvContextCaps_t** context_caps) { + LOGINFO("Entry\n"); + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + std::string key = "AspectRatio"; + if (!root.HasLabel(key.c_str())) { + LOGWARN("AVOutputPlugins: %s: Missing '%s' label", __FUNCTION__, key.c_str()); + return tvERROR_GENERAL; + } + + JsonObject data = root[key.c_str()].Object(); + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) { + LOGWARN("AVOutputPlugins: %s: Platform support is false", __FUNCTION__); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject rangeInfo = data["rangeInfo"].Object(); + JsonArray optionsArray = rangeInfo["options"].Array(); + + *num_aspect_ratio = optionsArray.Length(); + *aspect_ratio = static_cast(malloc(*num_aspect_ratio * sizeof(tvDisplayMode_t))); + if (!(*aspect_ratio)) { + return tvERROR_GENERAL; + } + + for (size_t i = 0; i < *num_aspect_ratio; ++i) { + std::string aspectStr = optionsArray[i].String(); + if (aspectStr == "TV AUTO") (*aspect_ratio)[i] = tvDisplayMode_AUTO; + else if (aspectStr == "TV DIRECT") (*aspect_ratio)[i] = tvDisplayMode_DIRECT; + else if (aspectStr == "TV FULL") (*aspect_ratio)[i] = tvDisplayMode_FULL; + else if (aspectStr == "TV NORMAL") (*aspect_ratio)[i] = tvDisplayMode_NORMAL; + else if (aspectStr == "TV 16X9 STRETCH") (*aspect_ratio)[i] = tvDisplayMode_16x9; + else if (aspectStr == "TV 4X3 PILLARBOX") (*aspect_ratio)[i] = tvDisplayMode_4x3; + else if (aspectStr == "TV ZOOM") (*aspect_ratio)[i] = tvDisplayMode_ZOOM; + else (*aspect_ratio)[i] = tvDisplayMode_MAX; + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + free(*aspect_ratio); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + +tvError_t AVOutputTV::GetTVPictureModeCaps(tvPQModeIndex_t** mode, size_t* num_pic_modes, tvContextCaps_t** context_caps) { + LOGINFO("Entry\n"); + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + std::string key = "PictureMode"; + if (!root.HasLabel(key.c_str())) { + LOGWARN("AVOutputPlugins: %s: Missing '%s' label", __FUNCTION__, key.c_str()); + return tvERROR_GENERAL; + } + + JsonObject data = root[key.c_str()].Object(); + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) { + LOGWARN("AVOutputPlugins: %s: Platform support is false", __FUNCTION__); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject rangeInfo = data["rangeInfo"].Object(); + JsonArray optionsArray = rangeInfo["options"].Array(); + + *num_pic_modes = optionsArray.Length(); + *mode = static_cast(malloc(*num_pic_modes * sizeof(tvPQModeIndex_t))); + if (!(*mode)) { + return tvERROR_GENERAL; + } + + for (size_t i = 0; i < *num_pic_modes; ++i) { + std::string modeStr = optionsArray[i].String(); + + if (modeStr == "Standard") (*mode)[i] = PQ_MODE_STANDARD; + else if (modeStr == "Vivid") (*mode)[i] = PQ_MODE_VIVID; + else if (modeStr == "EnergySaving" || modeStr == "Energy Saving") (*mode)[i] = PQ_MODE_ENERGY_SAVING; + else if (modeStr == "Theater") (*mode)[i] = PQ_MODE_THEATER; + else if (modeStr == "Game") (*mode)[i] = PQ_MODE_GAME; + else if (modeStr == "Sports") (*mode)[i] = PQ_MODE_SPORTS; + else if (modeStr == "AI PQ") (*mode)[i] = PQ_MODE_AIPQ; + else if (modeStr == "Dark") (*mode)[i] = PQ_MODE_DARK; + else if (modeStr == "Bright") (*mode)[i] = PQ_MODE_BRIGHT; + else if (modeStr == "IQ") (*mode)[i] = PQ_MODE_IQ; + else (*mode)[i] = PQ_MODE_INVALID; + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + free(*mode); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + +#endif + + int AVOutputTV::ReadCapablitiesFromConf(std::string param, capDetails_t& info) + { + int ret = 0; + + /*Consider User WhiteBalance as CustomWhiteBalance + To avoid clash with Factory WhiteBalance Calibration capablities*/ + + if ( param == "WhiteBalance") { + param = "CustomWhiteBalance"; + } else if ( param == "AutoBacklightMode") { + param = "BacklightControl"; + } + + try { + CIniFile inFile(CAPABLITY_FILE_NAME); + std::string configString; + + if(param == "CMS") + { + configString = param + ".color"; + info.color = inFile.Get(configString); + + configString = param + ".component"; + info.component = inFile.Get(configString); + } + + if(param == "CustomWhiteBalance") + { + configString = param + ".color"; + info.color = inFile.Get(configString); + + configString = param + ".control"; + info.control = inFile.Get(configString); + + } + + if ((param == "DolbyVisionMode") || (param == "Backlight") || (param == "CMS") || (param == "CustomWhiteBalance") || (param == "HDRMode") || (param == "BacklightControl")) { + configString = param + ".platformsupport"; + info.isPlatformSupport = inFile.Get(configString); + printf(" platformsupport : %s\n",info.isPlatformSupport.c_str() ); + } + + if ( (param == "ColorTemperature") || (param == "DimmingMode") || + ( param == "BacklightControl") || (param == "DolbyVisionMode") || + (param == "HDR10Mode") || (param == "HLGMode") || (param == "AspectRatio") || + (param == "PictureMode") || (param == "VideoSource") || (param == "VideoFormat") || + (param == "VideoFrameRate") || (param == "HDRMode") ) { + configString = param + ".range"; + info.range = inFile.Get(configString); + printf(" String Range info : %s\n",info.range.c_str() ); + } else if ( (param == "CMS" )) { + configString.clear(); + configString = param + ".range_Saturation_from"; + info.range = inFile.Get(configString); + configString = param + ".range_Saturation_to"; + info.range += ","+inFile.Get(configString); + + configString = param + ".range_Hue_from"; + info.range += ","+inFile.Get(configString); + configString = param + ".range_Hue_to"; + info.range += ","+inFile.Get(configString); + + configString = param + ".range_Luma_from"; + info.range += ","+inFile.Get(configString); + configString = param + ".range_Luma_to"; + info.range += ","+inFile.Get(configString); + } else if ( (param == "CustomWhiteBalance")) { + configString = param + ".range_Gain_from"; + info.range = inFile.Get(configString); + configString = param + ".range_Gain_to"; + info.range += ","+inFile.Get(configString); + + configString = param + ".range_Offset_from"; + info.range += ","+inFile.Get(configString); + configString = param + ".range_Offset_to"; + info.range += ","+inFile.Get(configString); + } else { + configString = param + ".range_from"; + info.range = inFile.Get(configString); + configString = param + ".range_to"; + info.range += ","+inFile.Get(configString); + printf(" Integer Range Info : %s\n",info.range.c_str() ); + } + + if ((param == "VideoSource") || (param == "PictureMode") || (param == "VideoFormat") ) { + configString.clear(); + configString = param + ".index"; + info.index = inFile.Get(configString); + printf("Index value %s\n", info.index.c_str()); + } + + configString.clear(); + configString = param + ".pqmode"; + info.pqmode = inFile.Get(configString); + configString = param + ".format"; + info.format = inFile.Get(configString); + configString = param + ".source"; + info.source = inFile.Get(configString); + ret = 0; + } + catch(const boost::property_tree::ptree_error &e) { + printf("%s: error %s::config table entry not found in ini file\n",__FUNCTION__,e.what()); + ret = -1; + } + return ret; + } + bool AVOutputTV::checkCMSColorAndComponentCapability(const std::string capValue, const std::string inputValue) { + // Parse capValue into a set + std::set capSet; + std::istringstream capStream(capValue); + std::string token; + + while (std::getline(capStream, token, ',')) { + capSet.insert(token); + } + + // Parse inputValue and check if each item exists in the set + std::istringstream inputStream(inputValue); + while (std::getline(inputStream, token, ',')) { + if (capSet.find(token) == capSet.end()) { + return false; + } + } + return true; + } + } //namespace Plugin } //namespace WPEFramework diff --git a/AVOutput/CHANGELOG.md b/AVOutput/CHANGELOG.md index d9497c1c..38ec94a2 100644 --- a/AVOutput/CHANGELOG.md +++ b/AVOutput/CHANGELOG.md @@ -14,9 +14,12 @@ All notable changes to this RDK Service will be documented in this file. * Changes in CHANGELOG should be updated when commits are added to the main or release branches. There should be one CHANGELOG entry per JIRA Ticket. This is not enforced on sprint branches since there could be multiple changes for the same JIRA ticket during development. -## [1.0.10] - 2025-02-17 -### Fixed -ODM API removal changes phase 1 and Fixed PQ Mode Camel Case issue +## [1.1.0] - 2025-03-14 +### Added + +## [1.0.10] - 2024-12-23 +### Added +- ODM API removal changes phase 1 ## [1.0.0] - 2025-02-17 ### Added