diff --git a/AVOutput/AVOutput.cpp b/AVOutput/AVOutput.cpp index fc8dd79f..b8a3a229 100644 --- a/AVOutput/AVOutput.cpp +++ b/AVOutput/AVOutput.cpp @@ -22,10 +22,27 @@ #include "UtilsIarm.h" #include "UtilsSearchRDKProfile.h" +#define API_VERSION_NUMBER_MAJOR 1 +#define API_VERSION_NUMBER_MINOR 2 +#define API_VERSION_NUMBER_PATCH 0 + namespace WPEFramework { namespace Plugin { - SERVICE_REGISTRATION(AVOutput,1, 0); + static Plugin::Metadata metadata( + // Version (Major, Minor, Patch) + API_VERSION_NUMBER_MAJOR, API_VERSION_NUMBER_MINOR, API_VERSION_NUMBER_PATCH, + // Preconditions + {}, + // Terminations + {}, + // Controls + {} + ); + + + SERVICE_REGISTRATION(AVOutput, API_VERSION_NUMBER_MAJOR, API_VERSION_NUMBER_MINOR, API_VERSION_NUMBER_PATCH); + AVOutput::AVOutput() { diff --git a/AVOutput/AVOutputTV.cpp b/AVOutput/AVOutputTV.cpp index 3f0e8e59..1714b671 100644 --- a/AVOutput/AVOutputTV.cpp +++ b/AVOutput/AVOutputTV.cpp @@ -358,6 +358,55 @@ namespace Plugin { registerMethod("resetAutoBacklightMode", &AVOutputTV::resetAutoBacklightMode, this); registerMethod("getAutoBacklightModeCaps", &AVOutputTV::getAutoBacklightModeCaps, this); + registerMethod("getBacklightCapsV2", &AVOutputTV::getBacklightCapsV2, this); + registerMethod("getBrightnessCapsV2", &AVOutputTV::getBrightnessCapsV2, this); + registerMethod("getContrastCapsV2", &AVOutputTV::getContrastCapsV2, this); + registerMethod("getSharpnessCapsV2", &AVOutputTV::getSharpnessCapsV2, this); + registerMethod("getSaturationCapsV2", &AVOutputTV::getSaturationCapsV2, this); + registerMethod("getHueCapsV2", &AVOutputTV::getHueCapsV2, this); + registerMethod("getLowLatencyStateCapsV2", &AVOutputTV::getLowLatencyStateCapsV2, this); + registerMethod("getColorTemperatureCapsV2", &AVOutputTV::getColorTemperatureCapsV2, this); + registerMethod("getBacklightDimmingModeCapsV2", &AVOutputTV::getBacklightDimmingModeCapsV2, this); + registerMethod("getZoomModeCapsV2", &AVOutputTV::getZoomModeCapsV2, this); + registerMethod("getDolbyVisionCalibrationCaps", &AVOutputTV::getDolbyVisionCalibrationCaps, this); + registerMethod("getPictureModeCapsV2", &AVOutputTV::getPictureModeCapsV2, this); + registerMethod("getAutoBacklightModeCapsV2", &AVOutputTV::getAutoBacklightModeCapsV2, this); + registerMethod("getCMSCapsV2", &AVOutputTV::getCMSCapsV2, this); + registerMethod("get2PointWBCapsV2", &AVOutputTV::get2PointWBCapsV2, this); + registerMethod("getSDRGammaCaps", &AVOutputTV::getSDRGammaCaps, this); + + registerMethod("getPrecisionDetailCaps", &AVOutputTV::getPrecisionDetailCaps, this); + registerMethod("getPrecisionDetail", &AVOutputTV::getPrecisionDetail, this); + registerMethod("setPrecisionDetail", &AVOutputTV::setPrecisionDetail, this); + registerMethod("resetPrecisionDetail", &AVOutputTV::resetPrecisionDetail, this); + + registerMethod("getLocalContrastEnhancementCaps", &AVOutputTV::getLocalContrastEnhancementCaps, this); + registerMethod("getLocalContrastEnhancement", &AVOutputTV::getLocalContrastEnhancement, this); + registerMethod("setLocalContrastEnhancement", &AVOutputTV::setLocalContrastEnhancement, this); + registerMethod("resetLocalContrastEnhancement", &AVOutputTV::resetLocalContrastEnhancement, this); + + registerMethod("getMPEGNoiseReductionCaps", &AVOutputTV::getMPEGNoiseReductionCaps, this); + registerMethod("getMPEGNoiseReduction", &AVOutputTV::getMPEGNoiseReduction, this); + registerMethod("setMPEGNoiseReduction", &AVOutputTV::setMPEGNoiseReduction, this); + registerMethod("resetMPEGNoiseReduction", &AVOutputTV::resetMPEGNoiseReduction, this); + + registerMethod("getDigitalNoiseReductionCaps", &AVOutputTV::getDigitalNoiseReductionCaps, this); + registerMethod("getDigitalNoiseReduction", &AVOutputTV::getDigitalNoiseReduction, this); + registerMethod("setDigitalNoiseReduction", &AVOutputTV::setDigitalNoiseReduction, this); + registerMethod("resetDigitalNoiseReduction", &AVOutputTV::resetDigitalNoiseReduction, this); + + registerMethod("getMEMCCaps", &AVOutputTV::getMEMCCaps, this); + registerMethod("getMEMC", &AVOutputTV::getMEMC, this); + registerMethod("setMEMC", &AVOutputTV::setMEMC, this); + registerMethod("resetMEMC", &AVOutputTV::resetMEMC, this); + + registerMethod("getAISuperResolutionCaps", &AVOutputTV::getAISuperResolutionCaps, this); + registerMethod("getAISuperResolution", &AVOutputTV::getAISuperResolution, this); + registerMethod("setAISuperResolution", &AVOutputTV::setAISuperResolution, this); + registerMethod("resetAISuperResolution", &AVOutputTV::resetAISuperResolution, this); + + registerMethod("getMultiPointWBCaps", &AVOutputTV::getMultiPointWBCaps, this); + LOGINFO("Exit\n"); } @@ -459,399 +508,1743 @@ namespace Plugin { LOGINFO("Exit\n"); } - uint32_t AVOutputTV::getZoomModeCaps(const JsonObject& parameters, JsonObject& response) + // Shared zoom mode mappings + static const std::unordered_map zoomModeReverseMap = { + {tvDisplayMode_16x9, "TV 16X9 STRETCH"}, + {tvDisplayMode_4x3, "TV 4X3 PILLARBOX"}, + {tvDisplayMode_NORMAL, "TV NORMAL"}, + {tvDisplayMode_DIRECT, "TV DIRECT"}, + {tvDisplayMode_AUTO, "TV AUTO"}, + {tvDisplayMode_ZOOM, "TV ZOOM"}, + {tvDisplayMode_FULL, "TV FULL"} + }; + static const std::unordered_map zoomModeMap = { + {"TV 16X9 STRETCH", tvDisplayMode_16x9}, + {"TV 4X3 PILLARBOX", tvDisplayMode_4x3}, + {"TV NORMAL", tvDisplayMode_NORMAL}, + {"TV DIRECT", tvDisplayMode_DIRECT}, + {"TV AUTO", tvDisplayMode_AUTO}, + {"TV ZOOM", tvDisplayMode_ZOOM}, + {"TV FULL", tvDisplayMode_FULL} + }; + static const std::unordered_map dimmingModeReverseMap = { + { tvDimmingMode_Fixed, "Fixed" }, + { tvDimmingMode_Local, "Local" }, + { tvDimmingMode_Global, "Global" } + }; + static const std::unordered_map dimmingModeMap = { + { "Fixed", tvDimmingMode_Fixed }, + { "Local", tvDimmingMode_Local }, + { "Global", tvDimmingMode_Global } + }; + + bool AVOutputTV::getPQParamFromContext(const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t paramType, + int& outValue) { - LOGINFO("Entry"); - capVectors_t info; - - JsonArray rangeArray; - JsonArray pqmodeArray; - JsonArray formatArray; - JsonArray sourceArray; - - unsigned int index = 0; - - tvError_t ret = getParamsCaps("AspectRatio",info); - - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - for (index = 0; index < info.rangeVector.size(); index++) { - rangeArray.Add(info.rangeVector[index]); + tvConfigContext_t validContext = getValidContextFromGetParameters(parameters, paramName); + if ((validContext.videoSrcType == VIDEO_SOURCE_ALL && + validContext.videoFormatType == VIDEO_FORMAT_NONE && + validContext.pq_mode == PQ_MODE_INVALID)) + { + LOGWARN("No Valid context for get %s", paramName.c_str()); + return false; } - response["options"]=rangeArray; + paramIndex_t indexInfo + { + .sourceIndex = static_cast(validContext.videoSrcType), + .pqmodeIndex = static_cast(validContext.pq_mode), + .formatIndex = static_cast(validContext.videoFormatType) + }; - if (info.pqmodeVector.front().compare("none") != 0) { - for (index = 0; index < info.pqmodeVector.size(); index++) { - pqmodeArray.Add(info.pqmodeVector[index]); - } - response["pictureModeInfo"]=pqmodeArray; - } - if ((info.sourceVector.front()).compare("none") != 0) { - for (index = 0; index < info.sourceVector.size(); index++) { - sourceArray.Add(info.sourceVector[index]); - } - response["videoSourceInfo"]=sourceArray; - } - if ((info.formatVector.front()).compare("none") != 0) { - for (index = 0; index < info.formatVector.size(); index++) { - formatArray.Add(info.formatVector[index]); - } - response["videoFormatInfo"]=formatArray; - } - LOGINFO("Exit\n"); - returnResponse(true); + int value = 0; + tvError_t err = static_cast(getLocalparam(paramName.c_str(), indexInfo, value, paramType)); + if (err == tvERROR_NONE) { + outValue = value; + return true; } + + LOGERR("getLocalparam failed for %s with error code %d", paramName.c_str(), err); + return false; } - uint32_t AVOutputTV::setZoomMode(const JsonObject& parameters, JsonObject& response) + bool AVOutputTV::getEnumPQParamString( + const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqType, + const std::unordered_map& enumToStrMap, + std::string& outStr) { - LOGINFO("Entry\n"); - std::string value; - tvDisplayMode_t mode = tvDisplayMode_16x9; - capDetails_t inputInfo; + LOGINFO("getEnumPQParamString Entry for %s\n", paramName.c_str()); + tvConfigContext_t validContext = getValidContextFromGetParameters(parameters, paramName); + if ((validContext.videoSrcType == VIDEO_SOURCE_ALL && + validContext.videoFormatType == VIDEO_FORMAT_NONE && + validContext.pq_mode == PQ_MODE_INVALID)) + { + LOGWARN("No valid context for get %s", paramName.c_str()); + return false; + } - value = parameters.HasLabel("zoomMode") ? parameters["zoomMode"].String() : ""; - returnIfParamNotFound(parameters,"zoomMode"); + paramIndex_t indexInfo { + .sourceIndex = static_cast(validContext.videoSrcType), + .pqmodeIndex = static_cast(validContext.pq_mode), + .formatIndex = static_cast(validContext.videoFormatType) + }; - if (validateInputParameter("AspectRatio",value) != 0) { - LOGERR("%s: Range validation failed for AspectRatio\n", __FUNCTION__); - returnResponse(false); + int paramValue = 0; + int err = getLocalparam(paramName, indexInfo, paramValue, pqType); + if (err != 0) { + LOGERR("Failed to get %s from localparam", paramName.c_str()); + return false; } - if (parsingSetInputArgument(parameters,"AspectRatio",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); + auto it = enumToStrMap.find(paramValue); + if (it != enumToStrMap.end()) { + outStr = it->second; + LOGINFO("%s = %s", paramName.c_str(), outStr.c_str()); + return true; + } else { + LOGERR("Enum value %d not found in map for %s", paramValue, paramName.c_str()); + return false; } + } + bool AVOutputTV::setCMSParam(const JsonObject& parameters) + { + LOGINFO("Entry: setCMSParam"); - if( !isCapablityCheckPassed( "AspectRatio",inputInfo )) { - LOGERR("%s: CapablityCheck failed for AspectRatio\n", __FUNCTION__); - returnResponse(false); - } + std::string colorStr = parameters.HasLabel("color") ? parameters["color"].String() : ""; + std::string componentStr = parameters.HasLabel("component") ? parameters["component"].String() : ""; + std::string levelStr = parameters.HasLabel("level") ? parameters["level"].String() : ""; - if(!value.compare("TV 16X9 STRETCH")) { - mode = tvDisplayMode_16x9; - } - else if (!value.compare("TV 4X3 PILLARBOX")) { - mode = tvDisplayMode_4x3; - } - else if (!value.compare("TV NORMAL")) { - mode = tvDisplayMode_NORMAL; - } - else if (!value.compare("TV DIRECT")) { - mode = tvDisplayMode_DIRECT; - } - else if (!value.compare("TV AUTO")) { - mode = tvDisplayMode_AUTO; - } - else if (!value.compare("TV ZOOM")) { - mode = tvDisplayMode_ZOOM; + if (colorStr.empty() || componentStr.empty() || levelStr.empty()) { + LOGERR("Missing color/component/level"); + return false; } + + int level = 0; + try { + level = std::stoi(levelStr); + } catch (...) { + LOGERR("Invalid level value: %s", levelStr.c_str()); + return false; + } + + int maxCap = 0; + if (componentStr == "Hue") + maxCap = m_maxCmsHue; + else if (componentStr == "Saturation") + maxCap = m_maxCmsSaturation; + else if (componentStr == "Luma") + maxCap = m_maxCmsLuma; else { - returnResponse(false); + LOGERR("Invalid component: %s", componentStr.c_str()); + return false; } - m_videoZoomMode = mode; - tvError_t ret = setAspectRatioZoomSettings (mode); - if(ret != tvERROR_NONE) { - returnResponse(false); + if (level < 0 || level > maxCap) { + LOGERR("Level out of range: %d (0-%d)", level, maxCap); + return false; } - else { - //Save DisplayMode to localstore and ssm_data - int retval=updateAVoutputTVParam("set","AspectRatio",inputInfo,PQ_PARAM_ASPECT_RATIO,mode); - - if(retval != 0) { - LOGERR("Failed to Save DisplayMode to ssm_data\n"); - returnResponse(false); - } - tr181ErrorCode_t err = setLocalParam(rfc_caller_id, AVOUTPUT_ASPECTRATIO_RFC_PARAM, value.c_str()); - if ( err != tr181Success ) { - LOGERR("setLocalParam for %s Failed : %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, getTR181ErrorString(err)); - returnResponse(false); - } - else { - LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, value.c_str()); - } - LOGINFO("Exit : SetAspectRatio() value : %s\n",value.c_str()); - returnResponse(true); + tvDataComponentColor_t colorEnum; + if (getCMSColorEnumFromString(colorStr, colorEnum) != 0) { + LOGERR("Invalid color: %s", colorStr.c_str()); + return false; } - } - uint32_t AVOutputTV::getZoomMode(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); - tvDisplayMode_t mode; + if( isSetRequiredForParam(parameters, "CMS") ) { + LOGINFO("Proceed with SetCMSState \n"); + tvError_t ret = SetCMSState(true); + if(ret != tvERROR_NONE) { + LOGWARN("CMS enable failed\n"); + return false; + } + if (componentStr == "Hue") + ret = SetCurrentComponentHue(colorEnum, level); + else if (componentStr == "Saturation") + ret = SetCurrentComponentSaturation(colorEnum, level); + else if (componentStr == "Luma") + ret = SetCurrentComponentLuma(colorEnum, level); - tvError_t ret = getUserSelectedAspectRatio (&mode); + if (ret != tvERROR_NONE) { + LOGERR("HAL set failed for %s", componentStr.c_str()); + return false; + } + } - if(ret != tvERROR_NONE) { - returnResponse(false); + try { + int retVal = updateAVoutputTVParamV2("set", "CMS", parameters, PQ_PARAM_CMS, level); + if (retVal < 0) { + LOGERR("setCMSParam: Failed to save CMS param, return code: %d", retVal); + return false; + } + } catch (const std::exception& e) { + LOGERR("Exception in updateAVoutputTVParamV2: %s", e.what()); + return false; + } catch (...) { + LOGERR("Unknown exception in updateAVoutputTVParamV2"); + return false; } - else { - switch(mode) { - case tvDisplayMode_16x9: - LOGINFO("Aspect Ratio: TV 16X9 STRETCH\n"); - response["zoomMode"] = "TV 16X9 STRETCH"; - break; - case tvDisplayMode_4x3: - LOGINFO("Aspect Ratio: TV 4X3 PILLARBOX\n"); - response["zoomMode"] = "TV 4X3 PILLARBOX"; - break; + LOGINFO("Exit: setCMSParam success"); + return true; + } - case tvDisplayMode_NORMAL: - LOGINFO("Aspect Ratio: TV Normal\n"); - response["zoomMode"] = "TV NORMAL"; - break; - case tvDisplayMode_AUTO: - LOGINFO("Aspect Ratio: TV AUTO\n"); - response["zoomMode"] = "TV AUTO"; - break; + bool AVOutputTV::setEnumPQParam(const JsonObject& parameters, + const std::string& inputKey, + const std::string& paramName, + const std::unordered_map& valueMap, + tvPQParameterIndex_t paramType, + std::function halSetter) + { + if (!parameters.HasLabel(inputKey.c_str())) { + LOGERR("Missing input field: %s", inputKey.c_str()); + return false; + } - case tvDisplayMode_DIRECT: - LOGINFO("Aspect Ratio: TV DIRECT\n"); - response["zoomMode"] = "TV DIRECT"; - break; + std::string value = parameters[inputKey.c_str()].String(); + auto it = valueMap.find(value); + if (it == valueMap.end()) { + LOGERR("Invalid value '%s' for parameter: %s", value.c_str(), inputKey.c_str()); + return false; + } - case tvDisplayMode_ZOOM: - LOGINFO("Aspect Ratio: TV ZOOM\n"); - response["zoomMode"] = "TV ZOOM"; - break; + int intVal = it->second; - default: - LOGINFO("Aspect Ratio: TV AUTO\n"); - response["zoomMode"] = "TV AUTO"; - break; + // Only call HAL for current system context + if (isSetRequiredForParam(parameters, paramName)) { + LOGINFO("Calling HAL for %s = %s intVal %d", paramName.c_str(), value.c_str(), intVal); + tvError_t ret = halSetter(intVal); + if (ret != tvERROR_NONE) { + LOGERR("HAL setter failed for %s", paramName.c_str()); + return false; } - returnResponse(true); } + + // Persist the parameter contextually + int result = updateAVoutputTVParamV2("set", paramName, parameters, paramType, intVal); + if (result != 0) { + LOGERR("Persistence failed for %s", paramName.c_str()); + return false; + } + + LOGINFO("setEnumPQParam successful: %s = %s", paramName.c_str(), value.c_str()); + return true; } - uint32_t AVOutputTV::resetZoomMode(const JsonObject& parameters, JsonObject& response) + bool AVOutputTV::setIntPQParam(const JsonObject& parameters, const std::string& paramName, + tvPQParameterIndex_t pqType, tvSetFunction halSetter, int maxCap) { - LOGINFO("Entry\n"); - capDetails_t inputInfo; + LOGINFO("Entry: %s\n", paramName.c_str()); + int paramValue = 0; tvError_t ret = tvERROR_NONE; + std::string value = ""; + std::string lowerParamName = paramName; + std::transform(lowerParamName.begin(), lowerParamName.end(), lowerParamName.begin(), ::tolower); - if (parsingSetInputArgument(parameters, "AspectRatio",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); + if (!parameters.HasLabel(lowerParamName.c_str())) { + LOGERR("%s: Missing parameter: %s", __FUNCTION__, lowerParamName.c_str()); + return false; } - if( !isCapablityCheckPassed( "AspectRatio",inputInfo )) { - LOGERR("%s: CapablityCheck failed for AspectRatio\n", __FUNCTION__); - returnResponse(false); - } + value = parameters[lowerParamName.c_str()].String(); - tr181ErrorCode_t err = clearLocalParam(rfc_caller_id,AVOUTPUT_ASPECTRATIO_RFC_PARAM); - if ( err != tr181Success ) { - LOGERR("clearLocalParam for %s Failed : %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, getTR181ErrorString(err)); - ret = tvERROR_GENERAL; - } - else { - ret = setDefaultAspectRatio(inputInfo.pqmode,inputInfo.source,inputInfo.format); - } - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - LOGINFO("Exit : resetDefaultAspectRatio()\n"); - returnResponse(true); + try { + paramValue = std::stoi(value); + } catch (const std::exception& e) { + LOGERR("Invalid %s value: %s. Exception: %s", paramName.c_str(), value.c_str(), e.what()); + return false; } - } - uint32_t AVOutputTV::getVideoFormat(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); - tvVideoFormatType_t videoFormat; - tvError_t ret = GetCurrentVideoFormat(&videoFormat); - if(ret != tvERROR_NONE) { - response["currentVideoFormat"] = "NONE"; - returnResponse(false); - } - else { - response["currentVideoFormat"] = getVideoFormatTypeToString(videoFormat); - LOGINFO("Exit: getVideoFormat :%d success \n",videoFormat); - returnResponse(true); + if (paramValue < 0 || paramValue > maxCap) { + LOGERR("Input value %d is out of range (0 - %d) for %s", paramValue, maxCap, paramName.c_str()); + return false; } - } - uint32_t AVOutputTV::getVideoResolution(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); - tvResolutionParam_t videoResolution; - tvError_t ret = GetCurrentVideoResolution(&videoResolution); - if(ret != tvERROR_NONE) { - response["currentVideoResolution"] = "NONE"; - returnResponse(false); + if (isSetRequiredForParam(parameters, paramName)) { + LOGINFO("Proceed with set%s\n", paramName.c_str()); + ret = halSetter(paramValue); + LOGINFO("halsetter ret %d \n", ret); + if (ret != tvERROR_NONE){ + LOGERR("Failed to set %s\n", paramName.c_str()); + return false; + } } - else { - response["currentVideoResolution"] = getVideoResolutionTypeToString(videoResolution); - LOGINFO("Exit: getVideoResolution :%d success \n",videoResolution.resolutionValue); - returnResponse(true); + LOGINFO("Calling updateAVOutputTVParamV2 \n"); + int retval = updateAVoutputTVParamV2("set", paramName, parameters, pqType, paramValue); + if (retval != 0) { + LOGERR("Failed to Save %s to ssm_data. retval: %d\n", paramName.c_str(), retval); + return false; } - } - uint32_t AVOutputTV::getVideoFrameRate(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); - tvVideoFrameRate_t videoFramerate; - tvError_t ret = GetCurrentVideoFrameRate(&videoFramerate); - if(ret != tvERROR_NONE) { - response["currentVideoFrameRate"] = "NONE"; - returnResponse(false); - } - else { - response["currentVideoFrameRate"] = getVideoFrameRateTypeToString(videoFramerate); - LOGINFO("Exit: videoFramerate :%d success \n",videoFramerate); - returnResponse(true); - } + LOGINFO("Exit: set%s successful to value: %d\n", paramName.c_str(), paramValue); + return true; } - uint32_t AVOutputTV::getBacklight(const JsonObject& parameters, JsonObject& response) + uint32_t AVOutputTV::getPQCapabilityWithContext( + const std::function& getCapsFunc, + const JsonObject& parameters, + JsonObject& response) { - LOGINFO("Entry"); - - capDetails_t inputInfo; - std::string key; - paramIndex_t indexInfo; - int backlight = 0,err = 0; + int max_value = 0; + tvContextCaps_t* context_caps = nullptr; - if (parsingGetInputArgument(parameters, "Backlight",inputInfo) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } + // Call the HAL function + tvError_t result = getCapsFunc(&context_caps, &max_value); + LOGWARN("AVOutputPlugins: %s: result: %d", __FUNCTION__, result); - if (isPlatformSupport("Backlight") != 0) { + if (result != tvERROR_NONE) { returnResponse(false); } - if (getParamIndex("Backlight", inputInfo,indexInfo) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); - } + response["platformSupport"] = true; - err = getLocalparam("Backlight",indexInfo,backlight, PQ_PARAM_BACKLIGHT); - if( err == 0 ) { - response["backlight"] = backlight; - LOGINFO("Exit : Backlight Value: %d \n", backlight); - returnResponse(true); + if (max_value > 0) { + JsonObject rangeInfo; + rangeInfo["from"] = 0; + rangeInfo["to"] = max_value; + response["rangeInfo"] = rangeInfo; } - else { - returnResponse(false); - } - } - uint32_t AVOutputTV::setBacklight(const JsonObject& parameters, JsonObject& response) - { - LOGINFO("Entry\n"); + response["context"] = parseContextCaps(context_caps); - std::string value; - capDetails_t inputInfo; - int backlight = 0; - tvError_t ret = tvERROR_NONE; + returnResponse(true); + } - value = parameters.HasLabel("backlight") ? parameters["backlight"].String() : ""; - returnIfParamNotFound(parameters,"backlight"); - backlight = std::stoi(value); - if (validateIntegerInputParameter("Backlight",backlight) != 0) { - LOGERR("Failed in Backlight range validation:%s", __FUNCTION__); - returnResponse(false); - } + JsonObject AVOutputTV::parseContextCaps(tvContextCaps_t* context_caps) { + JsonObject contextObj; + if (context_caps && context_caps->num_contexts > 0) { + for (size_t i = 0; i < context_caps->num_contexts; ++i) { + int pqMode = context_caps->contexts[i].pq_mode; + int videoFormat = context_caps->contexts[i].videoFormatType; + int videoSource = context_caps->contexts[i].videoSrcType; - if (parsingSetInputArgument(parameters,"Backlight",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + auto pqModeIt = AVOutputTV::pqModeMap.find(pqMode); + auto videoFormatIt = AVOutputTV::videoFormatMap.find(videoFormat); + auto videoSrcIt = AVOutputTV::videoSrcMap.find(videoSource); - if (isPlatformSupport("Backlight") != 0 ) { - returnResponse(false); - } + if (pqModeIt != AVOutputTV::pqModeMap.end() && + videoFormatIt != AVOutputTV::videoFormatMap.end() && + videoSrcIt != AVOutputTV::videoSrcMap.end()) { - if( !isCapablityCheckPassed( "Backlight" , inputInfo )) { - LOGERR("%s: CapablityCheck failed for Backlight\n", __FUNCTION__); - returnResponse(false); - } + const char* pqModeStr = pqModeIt->second.c_str(); + const char* videoFormatStr = videoFormatIt->second.c_str(); + const char* videoSrcStr = videoSrcIt->second.c_str(); - if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { - LOGINFO("Proceed with setBacklight\n"); - ret = SetBacklight(backlight); - } + if (!contextObj.HasLabel(pqModeStr)) { + contextObj[pqModeStr] = JsonObject(); + } + JsonObject pqModeObj = contextObj[pqModeStr].Object(); - if(ret != tvERROR_NONE) { - LOGERR("Failed to set Backlight\n"); - returnResponse(false); - } - else { - int retval= updateAVoutputTVParam("set","Backlight",inputInfo,PQ_PARAM_BACKLIGHT,backlight); - if(retval != 0 ) { - LOGERR("Failed to Save Backlight to ssm_data\n"); - returnResponse(false); + if (!pqModeObj.HasLabel(videoFormatStr)) { + pqModeObj[videoFormatStr] = JsonArray(); + } + JsonArray formatArray = pqModeObj[videoFormatStr].Array(); + // **Manually check for existence before adding** + bool exists = false; + for (size_t j = 0; j < formatArray.Length(); ++j) { + if (strcmp(formatArray[j].String().c_str(), videoSrcStr) == 0) { + exists = true; + break; + } + } + if (!exists) { + formatArray.Add(videoSrcStr); + } + // Update objects + pqModeObj[videoFormatStr] = formatArray; + contextObj[pqModeStr] = pqModeObj; + } } - LOGINFO("Exit : setBacklight successful to value: %d\n", backlight); - returnResponse(true); } + return contextObj; + } + uint32_t AVOutputTV::getBacklightCapsV2(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this]( tvContextCaps_t** context_caps, int* max_backlight) { +#if HAL_NOT_READY + return this->GetBacklightCaps(max_backlight, context_caps); +#else + return GetBacklightCaps(max_backlight, context_caps); +#endif + }, parameters, response); } - uint32_t AVOutputTV::resetBacklight(const JsonObject& parameters, JsonObject& response) - { + uint32_t AVOutputTV::getBrightnessCapsV2(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this]( tvContextCaps_t** context_caps, int* max_brightness) { +#if HAL_NOT_READY + return this->GetBrightnessCaps(max_brightness, context_caps); +#else + return GetBrightnessCaps(max_brightness, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getContrastCapsV2(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_contrast) { +#if HAL_NOT_READY + return this->GetContrastCaps(max_contrast, context_caps); +#else + return GetContrastCaps(max_contrast, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getSharpnessCapsV2(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_sharpness) { +#if HAL_NOT_READY + return this->GetSharpnessCaps(max_sharpness, context_caps); +#else + return GetSharpnessCaps(max_sharpness, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getSaturationCapsV2(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_saturation) { +#if HAL_NOT_READY + return this->GetSaturationCaps(max_saturation, context_caps); +#else + return GetSaturationCaps(max_saturation, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getHueCapsV2(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this]( tvContextCaps_t** context_caps, int* max_hue) { +#if HAL_NOT_READY + return this->GetHueCaps(max_hue, context_caps); +#else + return GetHueCaps(max_hue, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getPrecisionDetailCaps(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_precision) { +#if HAL_NOT_READY + return this->GetPrecisionDetailCaps(max_precision, context_caps); +#else + return GetPrecisionDetailCaps(max_precision, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getLocalContrastEnhancementCaps(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_val) { +#if HAL_NOT_READY + return this->GetLocalContrastEnhancementCaps(max_val, context_caps); +#else + return GetLocalContrastEnhancementCaps(max_val, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getMPEGNoiseReductionCaps(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_val) { +#if HAL_NOT_READY + return this->GetMPEGNoiseReductionCaps(max_val, context_caps); +#else + return GetMPEGNoiseReductionCaps(max_val, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getDigitalNoiseReductionCaps(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_val) { +#if HAL_NOT_READY + return this->GetDigitalNoiseReductionCaps(max_val, context_caps); +#else + return GetDigitalNoiseReductionCaps(max_val, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getAISuperResolutionCaps(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_val) { +#if HAL_NOT_READY + return this->GetAISuperResolutionCaps(max_val, context_caps); +#else + return GetAISuperResolutionCaps(max_val, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getMultiPointWBCaps(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + + int num_hal_matrix_points = 0; + int rgb_min = 0, rgb_max = 0; + int num_ui_matrix_points = 0; + double* ui_matrix_positions = nullptr; + tvContextCaps_t* context_caps = nullptr; + + tvError_t ret = GetMultiPointWBCaps( + &num_hal_matrix_points, + &rgb_min, + &rgb_max, + &num_ui_matrix_points, + &ui_matrix_positions, + &context_caps + ); + + if (ret != tvERROR_NONE) { + LOGWARN("GetMultiPointWBCaps failed: %s", getErrorString(ret).c_str()); + returnResponse(false); + } + response["platformSupport"] = true; + + response["numHalMatrixPoints"] = num_hal_matrix_points; + response["rgbMin"] = rgb_min; + response["rgbMax"] = rgb_max; + response["numUiMatrixPoints"] = num_ui_matrix_points; + + // Add UI matrix positions + JsonArray uiPosArray; + for (int i = 0; i < num_ui_matrix_points; ++i) { + uiPosArray.Add(ui_matrix_positions[i]); + } + response["uiMatrixPositions"] = uiPosArray; + response["context"] = parseContextCaps(context_caps); +#if HAL_NOT_READY + // TODO:: Review cleanup once HAL is available, as memory will be allocated in HAL. + delete[] ui_matrix_positions; +#endif + LOGINFO("Exit\n"); + returnResponse(true); + } + + uint32_t AVOutputTV::getMEMCCaps(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_val) { +#if HAL_NOT_READY + return this->GetMEMCCaps(max_val, context_caps); +#else + return GetMEMCCaps(max_val, context_caps); +#endif + }, + parameters, response); + } + + uint32_t AVOutputTV::getLowLatencyStateCapsV2(const JsonObject& parameters, JsonObject& response) { + return getPQCapabilityWithContext([this](tvContextCaps_t** context_caps, int* max_latency) { +#if HAL_NOT_READY + return this->GetLowLatencyStateCaps(max_latency, context_caps); +#else + return GetLowLatencyStateCaps(max_latency, context_caps); +#endif + }, + parameters, response); + } + + // Forward lookup: string → enum + const std::unordered_map colorTempMap = { + {"Standard", tvColorTemp_STANDARD}, + {"Warm", tvColorTemp_WARM}, + {"Cold", tvColorTemp_COLD}, + {"UserDefined", tvColorTemp_USER}, + {"Supercold", tvColorTemp_SUPERCOLD}, + {"BoostStandard", tvColorTemp_BOOST_STANDARD}, + {"BoostWarm", tvColorTemp_BOOST_WARM}, + {"BoostCold", tvColorTemp_BOOST_COLD}, + {"BoostUserDefined", tvColorTemp_BOOST_USER}, + {"BoostSupercold", tvColorTemp_BOOST_SUPERCOLD} + }; + + // Reverse lookup: enum → string + const std::unordered_map colorTempReverseMap = { + {tvColorTemp_STANDARD, "Standard"}, + {tvColorTemp_WARM, "Warm"}, + {tvColorTemp_COLD, "Cold"}, + {tvColorTemp_USER, "UserDefined"}, + {tvColorTemp_SUPERCOLD, "Supercold"}, + {tvColorTemp_BOOST_STANDARD, "BoostStandard"}, + {tvColorTemp_BOOST_WARM, "BoostWarm"}, + {tvColorTemp_BOOST_COLD, "BoostCold"}, + {tvColorTemp_BOOST_USER, "BoostUserDefined"}, + {tvColorTemp_BOOST_SUPERCOLD, "BoostSupercold"} + }; + + uint32_t AVOutputTV::getColorTemperatureCapsV2(const JsonObject& parameters, JsonObject& response) { + tvColorTemp_t* color_temp = nullptr; + size_t num_color_temp = 0; + tvContextCaps_t* context_caps = nullptr; + + tvError_t err = GetColorTemperatureCaps(&color_temp, &num_color_temp, &context_caps); + if (err != tvERROR_NONE) { + return err; + } + + response["platformSupport"] = true; + + JsonArray optionsArray; + for (size_t i = 0; i < num_color_temp; ++i) { + auto it = colorTempReverseMap.find(color_temp[i]); + if (it != colorTempReverseMap.end()) { + optionsArray.Add(it->second); + } + } + response["options"] = optionsArray; + response["context"] = parseContextCaps(context_caps); + + #if HAL_NOT_READY + free(color_temp); + #endif + + returnResponse(true); + } + + uint32_t AVOutputTV::getSDRGammaCaps(const JsonObject& parameters, JsonObject& response) + { + tvSdrGamma_t* sdr_gamma = nullptr; + size_t num_sdr_gamma = 0; + tvContextCaps_t* context_caps = nullptr; + + tvError_t err = GetSdrGammaCaps(&sdr_gamma, &num_sdr_gamma, &context_caps); + if (err != tvERROR_NONE) { + return err; + } + + response["platformSupport"] = true; + + JsonArray optionsArray; + for (size_t i = 0; i < num_sdr_gamma; ++i) { + switch (sdr_gamma[i]) { + case tvSdrGamma_1_8: optionsArray.Add("1.8"); break; + case tvSdrGamma_1_9: optionsArray.Add("1.9"); break; + case tvSdrGamma_2_0: optionsArray.Add("2.0"); break; + case tvSdrGamma_2_1: optionsArray.Add("2.1"); break; + case tvSdrGamma_2_2: optionsArray.Add("2.2"); break; + case tvSdrGamma_2_3: optionsArray.Add("2.3"); break; + case tvSdrGamma_2_4: optionsArray.Add("2.4"); break; + case tvSdrGamma_BT_1886: optionsArray.Add("BT.1886"); break; + default: break; + } + } + response["options"] = optionsArray; + + response["context"] = parseContextCaps(context_caps); + + #if HAL_NOT_READY + free(sdr_gamma); + #endif + + returnResponse(true); + } + + uint32_t AVOutputTV::getBacklightDimmingModeCapsV2(const JsonObject& parameters, JsonObject& response) + { + tvDimmingMode_t* dimming_mode = nullptr; + size_t num_dimming_mode = 0; + tvContextCaps_t* context_caps = nullptr; + + tvError_t err = GetTVDimmingModeCaps(&dimming_mode, &num_dimming_mode, &context_caps); + if (err != tvERROR_NONE) { + return err; + } + + response["platformSupport"] = true; + + JsonArray optionsArray; + for (size_t i = 0; i < num_dimming_mode; ++i) { + auto it = dimmingModeReverseMap.find(dimming_mode[i]); + if (it != dimmingModeReverseMap.end()) { + optionsArray.Add(it->second); + } + } + response["options"] = optionsArray; + + response["context"] = parseContextCaps(context_caps); + + #if HAL_NOT_READY + free(dimming_mode); + #endif + + returnResponse(true); + } + + uint32_t AVOutputTV::getZoomModeCapsV2(const JsonObject& parameters, JsonObject& response) + { + response["platformSupport"] = true; + + JsonArray optionsArray; + for (size_t i = 0; i < m_numAspectRatio; ++i) { + auto it = zoomModeReverseMap.find(m_aspectRatio[i]); + if (it != zoomModeReverseMap.end()) { + optionsArray.Add(it->second); + } + } + response["options"] = optionsArray; + + response["context"] = parseContextCaps(m_aspectRatioCaps); + + #if HAL_NOT_READY + free(m_aspectRatio); + #endif + + returnResponse(true); + } + + uint32_t AVOutputTV::getPictureModeCapsV2(const JsonObject& parameters, JsonObject& response) + { + response["platformSupport"] = true; + + JsonArray optionsArray; + for (size_t i = 0; i < m_numPictureModes; ++i) { + auto it = pqModeMap.find(m_pictureModes[i]); + if (it != pqModeMap.end()) { + optionsArray.Add(it->second); + } + } + response["options"] = optionsArray; + + response["context"] = parseContextCaps(m_pictureModeCaps); + + #if HAL_NOT_READY + free(m_pictureModes); + #endif + + returnResponse(true); + } + + uint32_t AVOutputTV::getAutoBacklightModeCapsV2(const JsonObject& parameters, JsonObject& response) + { + response["platformSupport"] = true; + + JsonArray optionsArray; + for (size_t i = 0; i < m_numBacklightModes; ++i) { + switch (m_backlightModes[i]) { + case tvBacklightMode_MANUAL: + optionsArray.Add("Manual"); + break; + case tvBacklightMode_AMBIENT: + optionsArray.Add("Ambient"); + break; + case tvBacklightMode_ECO: + optionsArray.Add("Eco"); + break; + default: + LOGINFO("Unknown backlightMode option\n"); + break; + } + } + response["options"] = optionsArray; + + response["context"] = parseContextCaps(m_backlightModeCaps); + + #if HAL_NOT_READY + // TODO: Review cleanup once HAL is available, as memory will be allocated in HAL. + free(m_backlightModes); + #endif + + returnResponse(true); + } + + uint32_t AVOutputTV::getDolbyVisionCalibrationCaps(const JsonObject& parameters, JsonObject& response) + { + tvDVCalibrationSettings_t* min_values = nullptr; + tvDVCalibrationSettings_t* max_values = nullptr; + tvContextCaps_t* context_caps = nullptr; + + if (GetDVCalibrationCaps(&min_values, &max_values, &context_caps) != tvERROR_NONE) { + returnResponse(false); + } + + // Set platform support + response["platformSupport"] = true; + + // Add all range fields (flattened as per expected JSON) + response["rangeTmax"] = JsonObject({{"from", min_values->Tmax}, {"to", max_values->Tmax}}); + response["rangeTmin"] = JsonObject({{"from", min_values->Tmin}, {"to", max_values->Tmin}}); + response["rangeTgamma"] = JsonObject({{"from", min_values->Tgamma}, {"to", max_values->Tgamma}}); + response["rangeRx"] = JsonObject({{"from", min_values->Rx}, {"to", max_values->Rx}}); + response["rangeRy"] = JsonObject({{"from", min_values->Ry}, {"to", max_values->Ry}}); + response["rangeGx"] = JsonObject({{"from", min_values->Gx}, {"to", max_values->Gx}}); + response["rangeGy"] = JsonObject({{"from", min_values->Gy}, {"to", max_values->Gy}}); + response["rangeBx"] = JsonObject({{"from", min_values->Bx}, {"to", max_values->Bx}}); + response["rangeBy"] = JsonObject({{"from", min_values->By}, {"to", max_values->By}}); + response["rangeWx"] = JsonObject({{"from", min_values->Wx}, {"to", max_values->Wx}}); + response["rangeWy"] = JsonObject({{"from", min_values->Wy}, {"to", max_values->Wy}}); + + // Add context list + response["context"] = parseContextCaps(context_caps); + + // Indicate success + response["success"] = true; + +#if HAL_NOT_READY + // TODO: Clean up when HAL handles memory + delete min_values; + delete max_values; +#endif + + returnResponse(true); + } + + + uint32_t AVOutputTV::getZoomModeCaps(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + capVectors_t info; + + JsonArray rangeArray; + JsonArray pqmodeArray; + JsonArray formatArray; + JsonArray sourceArray; + + unsigned int index = 0; + + tvError_t ret = getParamsCaps("AspectRatio",info); + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + for (index = 0; index < info.rangeVector.size(); index++) { + rangeArray.Add(info.rangeVector[index]); + } + + response["options"]=rangeArray; + + if (info.pqmodeVector.front().compare("none") != 0) { + for (index = 0; index < info.pqmodeVector.size(); index++) { + pqmodeArray.Add(info.pqmodeVector[index]); + } + response["pictureModeInfo"]=pqmodeArray; + } + if ((info.sourceVector.front()).compare("none") != 0) { + for (index = 0; index < info.sourceVector.size(); index++) { + sourceArray.Add(info.sourceVector[index]); + } + response["videoSourceInfo"]=sourceArray; + } + if ((info.formatVector.front()).compare("none") != 0) { + for (index = 0; index < info.formatVector.size(); index++) { + formatArray.Add(info.formatVector[index]); + } + response["videoFormatInfo"]=formatArray; + } + LOGINFO("Exit\n"); + returnResponse(true); + } + } + + uint32_t AVOutputTV::setZoomMode(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + if(m_aspectRatioStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + std::string value; + tvDisplayMode_t mode = tvDisplayMode_16x9; + capDetails_t inputInfo; + + + value = parameters.HasLabel("zoomMode") ? parameters["zoomMode"].String() : ""; + returnIfParamNotFound(parameters,"zoomMode"); + + if (validateInputParameter("AspectRatio",value) != 0) { + LOGERR("%s: Range validation failed for AspectRatio\n", __FUNCTION__); + returnResponse(false); + } + + if (parsingSetInputArgument(parameters,"AspectRatio",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if( !isCapablityCheckPassed( "AspectRatio",inputInfo )) { + LOGERR("%s: CapablityCheck failed for AspectRatio\n", __FUNCTION__); + returnResponse(false); + } + + if(!value.compare("TV 16X9 STRETCH")) { + mode = tvDisplayMode_16x9; + } + else if (!value.compare("TV 4X3 PILLARBOX")) { + mode = tvDisplayMode_4x3; + } + else if (!value.compare("TV NORMAL")) { + mode = tvDisplayMode_NORMAL; + } + else if (!value.compare("TV DIRECT")) { + mode = tvDisplayMode_DIRECT; + } + else if (!value.compare("TV AUTO")) { + mode = tvDisplayMode_AUTO; + } + else if (!value.compare("TV ZOOM")) { + mode = tvDisplayMode_ZOOM; + } + else { + returnResponse(false); + } + m_videoZoomMode = mode; + tvError_t ret = setAspectRatioZoomSettings (mode); + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + //Save DisplayMode to localstore and ssm_data + int retval=updateAVoutputTVParam("set","AspectRatio",inputInfo,PQ_PARAM_ASPECT_RATIO,mode); + + if(retval != 0) { + LOGERR("Failed to Save DisplayMode to ssm_data\n"); + returnResponse(false); + } + + tr181ErrorCode_t err = setLocalParam(rfc_caller_id, AVOUTPUT_ASPECTRATIO_RFC_PARAM, value.c_str()); + if ( err != tr181Success ) { + LOGERR("setLocalParam for %s Failed : %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, getTR181ErrorString(err)); + returnResponse(false); + } + else { + LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, value.c_str()); + } + LOGINFO("Exit : SetAspectRatio() value : %s\n",value.c_str()); + returnResponse(true); + } + } + else + { + std::string value = parameters.HasLabel("zoomMode") ? parameters["zoomMode"].String() : ""; + returnIfParamNotFound(parameters, "zoomMode"); + + auto it = zoomModeMap.find(value); + if (it == zoomModeMap.end()) { + LOGERR("Invalid zoom mode: %s. Not in supported options.", value.c_str()); + returnResponse(false); + } + tvDisplayMode_t mode = it->second; + tvError_t ret = setAspectRatioZoomSettings(mode); + if (ret != tvERROR_NONE) { + returnResponse(false); + } + else + { + // Save DisplayMode to local store and ssm_data + int retval = updateAVoutputTVParamV2("set", "AspectRatio", parameters, PQ_PARAM_ASPECT_RATIO, mode); + if (retval != 0) { + LOGERR("Failed to Save DisplayMode to ssm_data\n"); + returnResponse(false); + } + tr181ErrorCode_t err = setLocalParam(rfc_caller_id, AVOUTPUT_ASPECTRATIO_RFC_PARAM, value.c_str()); + if (err != tr181Success) { + LOGERR("setLocalParam for %s Failed : %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, getTR181ErrorString(err)); + returnResponse(false); + } else { + LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, value.c_str()); + } + LOGINFO("Exit : SetAspectRatio() value : %s\n", value.c_str()); + returnResponse(true); + } + } + } + + uint32_t AVOutputTV::getZoomMode(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + tvDisplayMode_t mode; + + tvError_t ret = getUserSelectedAspectRatio (&mode); + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + switch(mode) { + case tvDisplayMode_16x9: + LOGINFO("Aspect Ratio: TV 16X9 STRETCH\n"); + response["zoomMode"] = "TV 16X9 STRETCH"; + break; + + case tvDisplayMode_4x3: + LOGINFO("Aspect Ratio: TV 4X3 PILLARBOX\n"); + response["zoomMode"] = "TV 4X3 PILLARBOX"; + break; + + case tvDisplayMode_NORMAL: + LOGINFO("Aspect Ratio: TV Normal\n"); + response["zoomMode"] = "TV NORMAL"; + break; + + case tvDisplayMode_AUTO: + LOGINFO("Aspect Ratio: TV AUTO\n"); + response["zoomMode"] = "TV AUTO"; + break; + + case tvDisplayMode_DIRECT: + LOGINFO("Aspect Ratio: TV DIRECT\n"); + response["zoomMode"] = "TV DIRECT"; + break; + + case tvDisplayMode_ZOOM: + LOGINFO("Aspect Ratio: TV ZOOM\n"); + response["zoomMode"] = "TV ZOOM"; + break; + + default: + LOGINFO("Aspect Ratio: TV AUTO\n"); + response["zoomMode"] = "TV AUTO"; + break; + } + returnResponse(true); + } + } + + uint32_t AVOutputTV::resetZoomMode(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + capDetails_t inputInfo; + tvError_t ret = tvERROR_NONE; + + if (parsingSetInputArgument(parameters, "AspectRatio",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if( !isCapablityCheckPassed( "AspectRatio",inputInfo )) { + LOGERR("%s: CapablityCheck failed for AspectRatio\n", __FUNCTION__); + returnResponse(false); + } + + tr181ErrorCode_t err = clearLocalParam(rfc_caller_id,AVOUTPUT_ASPECTRATIO_RFC_PARAM); + if ( err != tr181Success ) { + LOGERR("clearLocalParam for %s Failed : %s\n", AVOUTPUT_ASPECTRATIO_RFC_PARAM, getTR181ErrorString(err)); + ret = tvERROR_GENERAL; + } + else { + ret = setDefaultAspectRatio(inputInfo.pqmode,inputInfo.source,inputInfo.format); + } + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetDefaultAspectRatio()\n"); + returnResponse(true); + } + } + + uint32_t AVOutputTV::getVideoFormat(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + tvVideoFormatType_t videoFormat; + tvError_t ret = GetCurrentVideoFormat(&videoFormat); + if(ret != tvERROR_NONE) { + response["currentVideoFormat"] = "NONE"; + returnResponse(false); + } + else { + response["currentVideoFormat"] = getVideoFormatTypeToString(videoFormat); + LOGINFO("Exit: getVideoFormat :%d success \n",videoFormat); + returnResponse(true); + } + } + + uint32_t AVOutputTV::getVideoResolution(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + tvResolutionParam_t videoResolution; + tvError_t ret = GetCurrentVideoResolution(&videoResolution); + if(ret != tvERROR_NONE) { + response["currentVideoResolution"] = "NONE"; + returnResponse(false); + } + else { + response["currentVideoResolution"] = getVideoResolutionTypeToString(videoResolution); + LOGINFO("Exit: getVideoResolution :%d success \n",videoResolution.resolutionValue); + returnResponse(true); + } + } + + uint32_t AVOutputTV::getVideoFrameRate(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + tvVideoFrameRate_t videoFramerate; + tvError_t ret = GetCurrentVideoFrameRate(&videoFramerate); + if(ret != tvERROR_NONE) { + response["currentVideoFrameRate"] = "NONE"; + returnResponse(false); + } + else { + response["currentVideoFrameRate"] = getVideoFrameRateTypeToString(videoFramerate); + LOGINFO("Exit: videoFramerate :%d success \n",videoFramerate); + returnResponse(true); + } + } + + uint32_t AVOutputTV::resetPrecisionDetail(const JsonObject& parameters, JsonObject& response) + { + #if HAL_NOT_READY + bool success = false; + #else + bool success = resetPQParamToDefault(parameters, "PrecisionDetail", + PQ_PARAM_PRECISION_DETAIL, SetPrecisionDetail); + #endif + returnResponse(success); + } + + uint32_t AVOutputTV::resetLocalContrastEnhancement(const JsonObject& parameters, JsonObject& response) + { + #if HAL_NOT_READY + bool success = false; + #else + #if ENABLE_PQ_PARAM + bool success = resetPQParamToDefault(parameters, "LocalContrastEnhancement", + PQ_PARAM_LOCAL_CONTRAST_ENHANCEMENT, SetLocalContrastEnhancement); + #else + bool success = true; + #endif + #endif + returnResponse(success); + } + + uint32_t AVOutputTV::resetMPEGNoiseReduction(const JsonObject& parameters, JsonObject& response) + { + #if HAL_NOT_READY + bool success = false; + #else + #if ENABLE_PQ_PARAM + bool success = resetPQParamToDefault(parameters, "MPEGNoiseReduction", + PQ_PARAM_MPEG_NOISE_REDUCTION, SetMPEGNoiseReduction); + #else + bool success = true; + #endif + #endif + returnResponse(success); + } + + uint32_t AVOutputTV::resetDigitalNoiseReduction(const JsonObject& parameters, JsonObject& response) + { + #if HAL_NOT_READY + bool success = false; + #else + #if ENABLE_PQ_PARAM + bool success = resetPQParamToDefault(parameters, "DigitalNoiseReduction", + PQ_PARAM_DIGITAL_NOISE_REDUCTION, SetDigitalNoiseReduction); + #else + bool success = true; + #endif + + #endif + returnResponse(success); + } + + uint32_t AVOutputTV::resetMEMC(const JsonObject& parameters, JsonObject& response) + { + #if HAL_NOT_READY + bool success = false; + #else + bool success = resetPQParamToDefault(parameters, "MEMC", + PQ_PARAM_MEMC, SetMEMC); + #endif + returnResponse(success); + } + + uint32_t AVOutputTV::resetAISuperResolution(const JsonObject& parameters, JsonObject& response) + { +#if HAL_NOT_READY + bool success= false; +#else + bool success= resetPQParamToDefault(parameters,"AISuperResolution", + PQ_PARAM_AI_SUPER_RESOLUTION, SetAISuperResolution); +#endif + returnResponse(success); + } + + uint32_t AVOutputTV::getPrecisionDetail(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + int precisionDetail = 0; + bool success = getPQParamFromContext(parameters, + "PrecisionDetail", + PQ_PARAM_PRECISION_DETAIL, + precisionDetail); + if (success) { + response["precisionDetail"] = precisionDetail; + } + returnResponse(success); + } + + uint32_t AVOutputTV::getLocalContrastEnhancement(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + int localContraseEnhancement = 0; + bool success = getPQParamFromContext(parameters, + "LocalContrastEnhancement", + PQ_PARAM_LOCAL_CONTRAST_ENHANCEMENT, + localContraseEnhancement); + if (success) { + response["localContrastEnhancement"] = localContraseEnhancement; + } + returnResponse(success); + } + + uint32_t AVOutputTV::getMPEGNoiseReduction(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + int MPEGNoiseReduction = 0; + bool success = getPQParamFromContext(parameters, + "MPEGNoiseReduction", + PQ_PARAM_MPEG_NOISE_REDUCTION, + MPEGNoiseReduction); + if (success) { + response["mpegNoiseReduction"] = MPEGNoiseReduction; + } + returnResponse(success); + } + + uint32_t AVOutputTV::getDigitalNoiseReduction(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + int digitalNoiseReduction = 0; + bool success = getPQParamFromContext(parameters, + "DigitalNoiseReduction", + PQ_PARAM_DIGITAL_NOISE_REDUCTION, + digitalNoiseReduction); + if (success) { + response["digitalNoiseReduction"] = digitalNoiseReduction; + } + returnResponse(success); + } + + uint32_t AVOutputTV::getMEMC(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + int MEMC = 0; + bool success = getPQParamFromContext(parameters, + "MEMC", + PQ_PARAM_MEMC, + MEMC); + if (success) { + response["memc"] = MEMC; + } + returnResponse(success); + } + + uint32_t AVOutputTV::getAISuperResolution(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + int aiSuperResolution = 0; + bool success = getPQParamFromContext(parameters, + "AISuperResolution", + PQ_PARAM_AI_SUPER_RESOLUTION, + aiSuperResolution); + if (success) { + response["aiSuperResolution"] = aiSuperResolution; + } + returnResponse(success); + } + + uint32_t AVOutputTV::setContextPQParam(const JsonObject& parameters, JsonObject& response, + const std::string& inputParamName, + const std::string& tr181ParamName, + int maxAllowedValue, + tvPQParameterIndex_t pqParamType, + std::function halSetter) + { + LOGINFO("Entry"); + + if (!parameters.HasLabel(inputParamName.c_str())) { + LOGERR("Missing parameter: %s", inputParamName.c_str()); + returnResponse(false); + } + + std::string valueStr = parameters[inputParamName.c_str()].String(); + int value = std::stoi(valueStr); + + if (value < 0 || value > maxAllowedValue) { + LOGERR("Input value %d is out of range for %s", value, inputParamName.c_str()); + returnResponse(false); + } + + // Get current context + tvVideoSrcType_t currentSrc = VIDEO_SOURCE_IP; + tvVideoFormatType_t currentFmt = VIDEO_FORMAT_SDR; + tvPQModeIndex_t currentPQMode = PQ_MODE_STANDARD; + + GetCurrentVideoSource(¤tSrc); + GetCurrentVideoFormat(¤tFmt); + if (currentFmt == VIDEO_FORMAT_NONE) + currentFmt = VIDEO_FORMAT_SDR; + + char picMode[PIC_MODE_NAME_MAX] = {0}; + if (getCurrentPictureMode(picMode)) + { + auto it = pqModeReverseMap.find(picMode); + if (it != pqModeReverseMap.end()) + { + currentPQMode = static_cast(it->second); + } + else + { + LOGERR("Unknown picture mode"); + } + } + else + { + LOGERR("Failed to get current picture mode"); + } + + LOGINFO("currentPQMode: %d, currentFmt: %d, currentSrc: %d", currentPQMode, currentFmt, currentSrc); + + if (isSetRequiredForParam(parameters, tr181ParamName)) { + #if HAL_NOT_READY + #else + tvError_t ret = halSetter(currentSrc, currentPQMode, currentFmt, value); + if (ret != tvERROR_NONE) { + LOGERR("HAL setter failed for %s", inputParamName.c_str()); + returnResponse(false); + } + #endif + } + + // Persist + int retval = updateAVoutputTVParamV2("set", tr181ParamName, parameters, pqParamType, value); + if (retval != 0) { + LOGERR("Failed to save %s to ssm_data", inputParamName.c_str()); + returnResponse(false); + } + + LOGINFO("Exit: %s set successfully to %d", inputParamName.c_str(), value); + returnResponse(true); + } + + uint32_t AVOutputTV::setAISuperResolution(const JsonObject& parameters, JsonObject& response) + { + return setContextPQParam( + parameters, response, + "aiSuperResolution", + "AISuperResolution", + m_maxAISuperResolution, + PQ_PARAM_AI_SUPER_RESOLUTION, + [](tvVideoSrcType_t src, tvPQModeIndex_t mode, tvVideoFormatType_t fmt, int val) { + return SetAISuperResolution(src, mode, fmt, val); + } + ); + } + + uint32_t AVOutputTV::setMEMC(const JsonObject& parameters, JsonObject& response) + { + return setContextPQParam( + parameters, response, + "memc", "MEMC", + m_maxMEMC, + PQ_PARAM_MEMC, + [](tvVideoSrcType_t src, tvPQModeIndex_t mode, tvVideoFormatType_t fmt, int val) { + return SetMEMC(src, mode, fmt, val); + } + ); + } + + uint32_t AVOutputTV::setPrecisionDetail(const JsonObject& parameters, JsonObject& response) + { + return setContextPQParam( + parameters, response, + "precisionDetail", "PrecisionDetail", + m_maxPrecisionDetail, + PQ_PARAM_PRECISION_DETAIL, + [](tvVideoSrcType_t src, tvPQModeIndex_t mode, tvVideoFormatType_t fmt, int val) { + return SetPrecisionDetail(src, mode, fmt, val); + } + ); + } + + uint32_t AVOutputTV::setLocalContrastEnhancement(const JsonObject& parameters, JsonObject& response) + { +#if ENABLE_PQ_PARAM + return setContextPQParam( + parameters, response, + "localContrastEnhancement", "LocalContrastEnhancement", + m_maxLocalContrastEnhancement, + PQ_PARAM_LOCAL_CONTRAST_ENHANCEMENT, + [](tvVideoSrcType_t src, tvPQModeIndex_t mode, tvVideoFormatType_t fmt, int val) { + return SetLocalContrastEnhancement(src, mode, fmt, val); + } + ); +#else + returnResponse(true); +#endif + } + + uint32_t AVOutputTV::setMPEGNoiseReduction(const JsonObject& parameters, JsonObject& response) + { +#if ENABLE_PQ_PARAM + return setContextPQParam( + parameters, response, + "mpegNoiseReduction", "MPEGNoiseReduction", + m_maxMPEGNoiseReduction, + PQ_PARAM_MPEG_NOISE_REDUCTION, + [](tvVideoSrcType_t src, tvPQModeIndex_t mode, tvVideoFormatType_t fmt, int val) { + return SetMPEGNoiseReduction(src, mode, fmt, val); + } + ); +#else + returnResponse(true); +#endif + } + + uint32_t AVOutputTV::setDigitalNoiseReduction(const JsonObject& parameters, JsonObject& response) + { +#if ENABLE_PQ_PARAM + return setContextPQParam( + parameters, response, + "digitalNoiseReduction", "DigitalNoiseReduction", + m_maxDigitalNoiseReduction, + PQ_PARAM_DIGITAL_NOISE_REDUCTION, + [](tvVideoSrcType_t src, tvPQModeIndex_t mode, tvVideoFormatType_t fmt, int val) { + return SetDigitalNoiseReduction(src, mode, fmt, val); + } + ); +#else + returnResponse(true); +#endif + } + + uint32_t AVOutputTV::getBacklight(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry"); + if(m_backlightStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + std::string key; + paramIndex_t indexInfo; + int backlight = 0,err = 0; + + if (parsingGetInputArgument(parameters, "Backlight",inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } + + if (isPlatformSupport("Backlight") != 0) { + returnResponse(false); + } + + if (getParamIndex("Backlight", inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } + + err = getLocalparam("Backlight",indexInfo,backlight, PQ_PARAM_BACKLIGHT); + if( err == 0 ) { + response["backlight"] = backlight; + LOGINFO("Exit : Backlight Value: %d \n", backlight); + returnResponse(true); + } + else { + returnResponse(false); + } + } + else + { + int backlight = 0; + bool success = getPQParamFromContext(parameters, + "Backlight", + PQ_PARAM_BACKLIGHT, + backlight); + if (success) { + response["backlight"] = backlight; + } + returnResponse(success); + + } + } + + uint32_t AVOutputTV::setBacklight(const JsonObject& parameters, JsonObject& response) + { LOGINFO("Entry\n"); + if(m_backlightStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + std::string value; + capDetails_t inputInfo; + int backlight = 0; + tvError_t ret = tvERROR_NONE; + + value = parameters.HasLabel("backlight") ? parameters["backlight"].String() : ""; + returnIfParamNotFound(parameters,"backlight"); + backlight = std::stoi(value); + + if (validateIntegerInputParameter("Backlight",backlight) != 0) { + LOGERR("Failed in Backlight range validation:%s", __FUNCTION__); + returnResponse(false); + } + + if (parsingSetInputArgument(parameters,"Backlight",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if (isPlatformSupport("Backlight") != 0 ) { + returnResponse(false); + } + + if( !isCapablityCheckPassed( "Backlight" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for Backlight\n", __FUNCTION__); + returnResponse(false); + } + + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with setBacklight\n"); + ret = SetBacklight(backlight); + } + + if(ret != tvERROR_NONE) { + LOGERR("Failed to set Backlight\n"); + returnResponse(false); + } + else { + int retval= updateAVoutputTVParam("set","Backlight",inputInfo,PQ_PARAM_BACKLIGHT,backlight); + if(retval != 0 ) { + LOGERR("Failed to Save Backlight to ssm_data\n"); + returnResponse(false); + } + LOGINFO("Exit : setBacklight successful to value: %d\n", backlight); + returnResponse(true); + } + } + else + { + bool success = setIntPQParam(parameters, "Backlight", PQ_PARAM_BACKLIGHT, SetBacklight, m_maxBacklight); + returnResponse(success); + } + + } + bool AVOutputTV::resetEnumPQParamToDefault( + const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqIndex, + const std::unordered_map& valueMap, + std::function&)> halSetter) + { + LOGINFO("Entry: %s\n", paramName.c_str()); + capDetails_t inputInfo; - int backlight=0; paramIndex_t indexInfo; + int intVal = 0; tvError_t ret = tvERROR_NONE; - if (parsingSetInputArgument(parameters, "Backlight",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); + // Step 1: Save reset state using V2 persistence + LOGINFO("Updating AVOutputTVParamV2 for: %s\n", paramName.c_str()); + int retval = updateAVoutputTVParamV2("reset", paramName, parameters, pqIndex, intVal); + if (retval != 0) { + LOGERR("Failed to reset %s via updateAVoutputTVParamV2. retval: %d\n", paramName.c_str(), retval); + return false; } - if (isPlatformSupport("Backlight") != 0) { - returnResponse(false); - } + // Step 2: Apply value from persisted config to HAL if needed + if (isSetRequiredForParam(parameters, paramName)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; - if( !isCapablityCheckPassed( "Backlight",inputInfo )) { - LOGERR("%s: CapablityCheck failed for Backlight\n", __FUNCTION__); - returnResponse(false); + if (getParamIndex(paramName, inputInfo, indexInfo) == 0 && + getLocalparam(paramName, indexInfo, intVal, pqIndex) == 0) + { + LOGINFO("%s: getLocalparam success for %s [format=%d, source=%d, mode=%d] → value=%d\n", + __FUNCTION__, paramName.c_str(), indexInfo.formatIndex, + indexInfo.sourceIndex, indexInfo.pqmodeIndex, intVal); + + if (valueMap.find(intVal) == valueMap.end()) { + LOGERR("%s: Invalid enum value %d for %s\n", __FUNCTION__, intVal, paramName.c_str()); + return false; + } + + ret = halSetter(intVal, valueMap); + if (ret != tvERROR_NONE) { + LOGERR("%s: HAL setter failed for value %d\n", paramName.c_str(), intVal); + return false; + } + } + else { + LOGERR("%s: Failed to get local param for %s\n", __FUNCTION__, paramName.c_str()); + return false; + } } - int retval= updateAVoutputTVParam("reset","Backlight",inputInfo,PQ_PARAM_BACKLIGHT,backlight); - if(retval != 0 ) { - LOGERR("Failed to reset Backlight\n"); - returnResponse(false); + LOGINFO("Exit: resetEnumPQParamToDefault for %s successful (value: %d)\n", paramName.c_str(), intVal); + return true; + } + + bool AVOutputTV::resetPQParamToDefault(const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqIndex, + tvSetFunctionV2 halSetter) + { + LOGINFO("Entry: %s\n", paramName.c_str()); + + capDetails_t inputInfo; + paramIndex_t indexInfo; + int level = 0; + tvError_t ret = tvERROR_NONE; + + // Save reset state using V2 path + LOGINFO("Updating AVOutputTVParamV2 for: %s\n", paramName.c_str()); + int retval = updateAVoutputTVParamV2("reset", paramName, parameters, pqIndex, level); + if (retval != 0) + { + LOGERR("Failed to update %s via updateAVoutputTVParamV2. retval: %d\n", paramName.c_str(), retval); + return false; } - else { - if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { - inputInfo.pqmode = "Current"; - inputInfo.source = "Current"; - inputInfo.format = "Current"; - getParamIndex("Backlight", inputInfo,indexInfo); - int err = getLocalparam("Backlight",indexInfo,backlight, PQ_PARAM_BACKLIGHT); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,backlight); - ret = SetBacklight(backlight); - } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; + + // If update succeeded, apply value from local config to HAL + if (isSetRequiredForParam(parameters, paramName)) + { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + + if (getParamIndex(paramName, inputInfo, indexInfo) == 0 && + getLocalparam(paramName, indexInfo, level, pqIndex) == 0) + { + LOGINFO("%s: getLocalparam success for %s: format=%d, source=%d, mode=%d, value=%d\n", + __FUNCTION__, paramName.c_str(), indexInfo.formatIndex, + indexInfo.sourceIndex, indexInfo.pqmodeIndex, level); + if (halSetter) { + ret = halSetter( + static_cast(indexInfo.sourceIndex), + static_cast(indexInfo.pqmodeIndex), + static_cast(indexInfo.formatIndex), + level); + LOGINFO("%s halSetter return value: %d\n", paramName.c_str(), ret); + } else { + LOGERR("halSetter is null for %s\n", paramName.c_str()); + return false; } } + else + { + LOGERR("%s: Failed to get local param for %s\n", __FUNCTION__, paramName.c_str()); + return false; + } } - if(ret != tvERROR_NONE) { - returnResponse(false); + LOGINFO("Exit: reset%s successful to value: %d\n", paramName.c_str(), level); + return true; + } + + bool AVOutputTV::resetPQParamToDefault(const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqIndex, + tvSetFunction halSetter) + { + LOGINFO("Entry: %s\n", paramName.c_str()); + + capDetails_t inputInfo; + paramIndex_t indexInfo; + int level = 0; + tvError_t ret = tvERROR_NONE; + + // Save reset state using V2 path + LOGINFO("Updating AVOutputTVParamV2 for: %s\n", paramName.c_str()); + int retval = updateAVoutputTVParamV2("reset", paramName, parameters, pqIndex, level); + if (retval != 0) + { + LOGERR("Failed to update %s via updateAVoutputTVParamV2. retval: %d\n", paramName.c_str(), retval); + return false; } - else { - LOGINFO("Exit : resetBacklight Successful to value : %d \n",backlight); - returnResponse(true); + + // If update succeeded, apply value from local config to HAL + if (isSetRequiredForParam(parameters, paramName)) + { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + + if (getParamIndex(paramName, inputInfo, indexInfo) == 0 && + getLocalparam(paramName, indexInfo, level, pqIndex) == 0) + { + LOGINFO("%s: getLocalparam success for %s: format=%d, source=%d, mode=%d, value=%d\n", + __FUNCTION__, paramName.c_str(), indexInfo.formatIndex, + indexInfo.sourceIndex, indexInfo.pqmodeIndex, level); + ret = halSetter(level); + LOGINFO("%s halSetter return value: %d\n", paramName.c_str(), ret); + } + else + { + LOGERR("%s: Failed to get local param for %s\n", __FUNCTION__, paramName.c_str()); + return false; + } + } + + LOGINFO("Exit: reset%s successful to value: %d\n", paramName.c_str(), level); + return true; + } + + uint32_t AVOutputTV::resetBacklight(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry\n"); + if(m_backlightStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + int backlight=0; + paramIndex_t indexInfo; + tvError_t ret = tvERROR_NONE; + + if (parsingSetInputArgument(parameters, "Backlight",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } + + if (isPlatformSupport("Backlight") != 0) { + returnResponse(false); + } + + if( !isCapablityCheckPassed( "Backlight",inputInfo )) { + LOGERR("%s: CapablityCheck failed for Backlight\n", __FUNCTION__); + returnResponse(false); + } + + int retval= updateAVoutputTVParam("reset","Backlight",inputInfo,PQ_PARAM_BACKLIGHT,backlight); + if(retval != 0 ) { + LOGERR("Failed to reset Backlight\n"); + returnResponse(false); + } + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("Backlight", inputInfo,indexInfo); + int err = getLocalparam("Backlight",indexInfo,backlight, PQ_PARAM_BACKLIGHT); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,backlight); + ret = SetBacklight(backlight); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } + } + } + + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetBacklight Successful to value : %d \n",backlight); + returnResponse(true); + } + } + else + { + bool success= resetPQParamToDefault(parameters, "Backlight", PQ_PARAM_BACKLIGHT, SetBacklight); + returnResponse(success); } } @@ -904,79 +2297,99 @@ namespace Plugin { uint32_t AVOutputTV::getBrightness(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); + if(m_brightnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int brightness = 0; - capDetails_t inputInfo; - paramIndex_t indexInfo; - int brightness = 0; - - if (parsingGetInputArgument(parameters, "Brightness",inputInfo) != 0) { - LOGERR("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } + if (parsingGetInputArgument(parameters, "Brightness",inputInfo) != 0) { + LOGERR("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } - if (getParamIndex("Brightness", inputInfo,indexInfo) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); - } + if (getParamIndex("Brightness", inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - int err = getLocalparam("Brightness",indexInfo,brightness, PQ_PARAM_BRIGHTNESS); - if( err == 0 ) { - response["brightness"] = brightness; - LOGINFO("Exit : Brightness Value: %d \n", brightness); - returnResponse(true); + int err = getLocalparam("Brightness",indexInfo,brightness, PQ_PARAM_BRIGHTNESS); + if( err == 0 ) { + response["brightness"] = brightness; + LOGINFO("Exit : Brightness Value: %d \n", brightness); + returnResponse(true); + } + else { + returnResponse(false); + } } - else { - returnResponse(false); + else + { + int brightness = 0; + bool success = getPQParamFromContext(parameters, + "Brightness", + PQ_PARAM_BRIGHTNESS, + brightness); + if (success) { + response["brightness"] = brightness; + } + returnResponse(success); } } uint32_t AVOutputTV::setBrightness(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_brightnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + std::string value; + capDetails_t inputInfo; + int brightness = 0; + tvError_t ret = tvERROR_NONE; - std::string value; - capDetails_t inputInfo; - int brightness = 0; - tvError_t ret = tvERROR_NONE; - - value = parameters.HasLabel("brightness") ? parameters["brightness"].String() : ""; - returnIfParamNotFound(parameters,"brightness"); - brightness = stoi(value); + value = parameters.HasLabel("brightness") ? parameters["brightness"].String() : ""; + returnIfParamNotFound(parameters,"brightness"); + brightness = stoi(value); - if (validateIntegerInputParameter("Brightness",brightness) != 0) { - LOGERR("Failed in Brightness range validation:%s", __FUNCTION__); - returnResponse(false); - } + if (validateIntegerInputParameter("Brightness",brightness) != 0) { + LOGERR("Failed in Brightness range validation:%s", __FUNCTION__); + returnResponse(false); + } - if (parsingSetInputArgument(parameters, "Brightness",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "Brightness",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "Brightness",inputInfo )) { - LOGERR("%s: CapablityCheck failed for Brightness\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "Brightness",inputInfo )) { + LOGERR("%s: CapablityCheck failed for Brightness\n", __FUNCTION__); + returnResponse(false); + } - if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { - LOGINFO("Proceed with %s \n",__FUNCTION__); - ret = SetBrightness(brightness); - } + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s \n",__FUNCTION__); + ret = SetBrightness(brightness); + } - if(ret != tvERROR_NONE) { - LOGERR("Failed to set Brightness\n"); - returnResponse(false); - } - else { - int retval= updateAVoutputTVParam("set","Brightness",inputInfo,PQ_PARAM_BRIGHTNESS,brightness); - if(retval != 0 ) { - LOGERR("Failed to Save Brightness to ssm_data\n"); + if(ret != tvERROR_NONE) { + LOGERR("Failed to set Brightness\n"); returnResponse(false); } - LOGINFO("Exit : setBrightness successful to value: %d\n", brightness); - returnResponse(true); + else { + int retval= updateAVoutputTVParam("set","Brightness",inputInfo,PQ_PARAM_BRIGHTNESS,brightness); + if(retval != 0 ) { + LOGERR("Failed to Save Brightness to ssm_data\n"); + returnResponse(false); + } + LOGINFO("Exit : setBrightness successful to value: %d\n", brightness); + returnResponse(true); + } + } + else + { + bool success = setIntPQParam(parameters, "Brightness", PQ_PARAM_BRIGHTNESS, SetBrightness, m_maxBrightness); + returnResponse(success); } - } @@ -984,54 +2397,60 @@ namespace Plugin { { LOGINFO("Entry\n"); + if(m_brightnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + std::string value; + capDetails_t inputInfo; + paramIndex_t indexInfo; + int brightness=0; + tvError_t ret = tvERROR_NONE; + + if (parsingSetInputArgument(parameters, "Brightness",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - std::string value; - capDetails_t inputInfo; - paramIndex_t indexInfo; - int brightness=0; - tvError_t ret = tvERROR_NONE; - - if (parsingSetInputArgument(parameters, "Brightness",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } - - if( !isCapablityCheckPassed( "Brightness",inputInfo )) { - LOGERR("%s: CapablityCheck failed for Brightness\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "Brightness",inputInfo )) { + LOGERR("%s: CapablityCheck failed for Brightness\n", __FUNCTION__); + returnResponse(false); + } - int retval= updateAVoutputTVParam("reset","Brightness",inputInfo,PQ_PARAM_BRIGHTNESS,brightness); - if(retval != 0 ) { - LOGWARN("Failed to reset Brightness\n"); - returnResponse(false); - } - else { - if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { - inputInfo.pqmode = "Current"; - inputInfo.source = "Current"; - inputInfo.format = "Current"; - getParamIndex("Brightness", inputInfo,indexInfo); - int err = getLocalparam("Brightness",indexInfo,brightness, PQ_PARAM_BRIGHTNESS); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,brightness); - ret = SetBrightness(brightness); - } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; + int retval= updateAVoutputTVParam("reset","Brightness",inputInfo,PQ_PARAM_BRIGHTNESS,brightness); + if(retval != 0 ) { + LOGWARN("Failed to reset Brightness\n"); + returnResponse(false); + } + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("Brightness", inputInfo,indexInfo); + int err = getLocalparam("Brightness",indexInfo,brightness, PQ_PARAM_BRIGHTNESS); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,brightness); + ret = SetBrightness(brightness); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } } } - } - if(ret != tvERROR_NONE) { - returnResponse(false); + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetBrightness Successful to value : %d \n",brightness); + returnResponse(true); + } } - else { - LOGINFO("Exit : resetBrightness Successful to value : %d \n",brightness); - returnResponse(true); + else + { + bool success = resetPQParamToDefault(parameters, "Brightness", PQ_PARAM_BRIGHTNESS, SetBrightness); + returnResponse(success); } - } uint32_t AVOutputTV::getBrightnessCaps(const JsonObject& parameters, JsonObject& response) @@ -1082,133 +2501,161 @@ namespace Plugin { uint32_t AVOutputTV::getContrast(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); + if(m_contrastStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int contrast = 0; - capDetails_t inputInfo; - paramIndex_t indexInfo; - int contrast = 0; + if (parsingGetInputArgument(parameters, "Contrast",inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } - if (parsingGetInputArgument(parameters, "Contrast",inputInfo) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } + if (getParamIndex("Contrast",inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - if (getParamIndex("Contrast",inputInfo,indexInfo) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); + int err = getLocalparam("Contrast",indexInfo,contrast, PQ_PARAM_CONTRAST); + if( err == 0 ) { + response["contrast"] = contrast; + LOGINFO("Exit : Contrast Value: %d \n", contrast); + returnResponse(true); + } + else { + returnResponse(false); + } } + else + { + int contrast = 0; + bool success = getPQParamFromContext(parameters, + "Contrast", + PQ_PARAM_CONTRAST, + contrast); + if (success) { + response["contrast"] = contrast; + } + returnResponse(success); - int err = getLocalparam("Contrast",indexInfo,contrast, PQ_PARAM_CONTRAST); - if( err == 0 ) { - response["contrast"] = contrast; - LOGINFO("Exit : Contrast Value: %d \n", contrast); - returnResponse(true); - } - else { - returnResponse(false); } } uint32_t AVOutputTV::setContrast(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_contrastStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + int contrast = 0; + tvError_t ret = tvERROR_NONE; + std::string value; - capDetails_t inputInfo; - int contrast = 0; - tvError_t ret = tvERROR_NONE; - std::string value; - - value = parameters.HasLabel("contrast") ? parameters["contrast"].String() : ""; - returnIfParamNotFound(parameters,"contrast"); - contrast = std::stoi(value); + value = parameters.HasLabel("contrast") ? parameters["contrast"].String() : ""; + returnIfParamNotFound(parameters,"contrast"); + contrast = std::stoi(value); - if (validateIntegerInputParameter("Contrast", contrast) != 0) { - LOGERR("Failed in contrast range validation:%s", __FUNCTION__); - returnResponse(false); - } + if (validateIntegerInputParameter("Contrast", contrast) != 0) { + LOGERR("Failed in contrast range validation:%s", __FUNCTION__); + returnResponse(false); + } - if (parsingSetInputArgument(parameters, "Contrast",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "Contrast",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "Contrast" , inputInfo )) { - LOGERR("%s: CapablityCheck failed for Contrast\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "Contrast" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for Contrast\n", __FUNCTION__); + returnResponse(false); + } - if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { - LOGINFO("Proceed with %s \n",__FUNCTION__); - ret = SetContrast(contrast); - } + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s \n",__FUNCTION__); + ret = SetContrast(contrast); + } - if(ret != tvERROR_NONE) { - LOGERR("Failed to set Contrast\n"); - returnResponse(false); - } - else { - int retval= updateAVoutputTVParam("set","Contrast",inputInfo,PQ_PARAM_CONTRAST,contrast); - if(retval != 0 ) { - LOGERR("Failed to Save Contrast to ssm_data\n"); + if(ret != tvERROR_NONE) { + LOGERR("Failed to set Contrast\n"); returnResponse(false); } - LOGINFO("Exit : setContrast successful to value: %d\n", contrast); - returnResponse(true); + else { + int retval= updateAVoutputTVParam("set","Contrast",inputInfo,PQ_PARAM_CONTRAST,contrast); + if(retval != 0 ) { + LOGERR("Failed to Save Contrast to ssm_data\n"); + returnResponse(false); + } + LOGINFO("Exit : setContrast successful to value: %d\n", contrast); + returnResponse(true); + } + } + else + { + bool success = setIntPQParam(parameters, "Contrast", PQ_PARAM_CONTRAST, SetContrast, m_maxContrast); + returnResponse(success); } - } uint32_t AVOutputTV::resetContrast(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_contrastStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int contrast=0; + tvError_t ret = tvERROR_NONE; - capDetails_t inputInfo; - paramIndex_t indexInfo; - int contrast=0; - tvError_t ret = tvERROR_NONE; - - if (parsingSetInputArgument(parameters, "Contrast",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "Contrast",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "Contrast" , inputInfo )) { - LOGERR("%s: CapablityCheck failed for Contrast\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "Contrast" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for Contrast\n", __FUNCTION__); + returnResponse(false); + } - int retval= updateAVoutputTVParam("reset","Contrast",inputInfo,PQ_PARAM_CONTRAST,contrast); + int retval= updateAVoutputTVParam("reset","Contrast",inputInfo,PQ_PARAM_CONTRAST,contrast); - if(retval != 0 ) { - LOGWARN("Failed to reset Contrast\n"); - returnResponse(false); - } - else { - if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { - inputInfo.pqmode = "Current"; - inputInfo.source = "Current"; - inputInfo.format = "Current"; - getParamIndex("Contrast", inputInfo,indexInfo); - int err = getLocalparam("Contrast",indexInfo,contrast, PQ_PARAM_CONTRAST); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,contrast); - ret = SetContrast(contrast); - } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; + if(retval != 0 ) { + LOGWARN("Failed to reset Contrast\n"); + returnResponse(false); + } + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("Contrast", inputInfo,indexInfo); + int err = getLocalparam("Contrast",indexInfo,contrast, PQ_PARAM_CONTRAST); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,contrast); + ret = SetContrast(contrast); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } } } - } - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - LOGINFO("Exit : resetContrast Successful to value : %d \n",contrast); - returnResponse(true); + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetContrast Successful to value : %d \n",contrast); + returnResponse(true); + } } + else + { + bool success= resetPQParamToDefault(parameters, "Contrast", PQ_PARAM_CONTRAST, SetContrast); + returnResponse(success); + } } uint32_t AVOutputTV::getContrastCaps(const JsonObject& parameters, JsonObject& response) @@ -1260,133 +2707,160 @@ namespace Plugin { uint32_t AVOutputTV::getSaturation(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); + if(m_saturationStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int saturation = 0; - capDetails_t inputInfo; - paramIndex_t indexInfo; - int saturation = 0; + if (parsingGetInputArgument(parameters, "Saturation",inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } - if (parsingGetInputArgument(parameters, "Saturation",inputInfo) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } + if (getParamIndex("Saturation", inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - if (getParamIndex("Saturation", inputInfo,indexInfo) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); + int err = getLocalparam("Saturation",indexInfo,saturation, PQ_PARAM_SATURATION); + if( err == 0 ) { + response["saturation"] = saturation; + LOGINFO("Exit : Saturation Value: %d \n", saturation); + returnResponse(true); + } + else { + returnResponse(false); + } } + else + { + int saturation = 0; + bool success = getPQParamFromContext(parameters, + "Saturation", + PQ_PARAM_SATURATION, + saturation); + if (success) { + response["saturation"] = saturation; + } + returnResponse(success); - int err = getLocalparam("Saturation",indexInfo,saturation, PQ_PARAM_SATURATION); - if( err == 0 ) { - response["saturation"] = saturation; - LOGINFO("Exit : Saturation Value: %d \n", saturation); - returnResponse(true); - } - else { - returnResponse(false); } } uint32_t AVOutputTV::setSaturation(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_saturationStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + std::string value; + int saturation = 0; + tvError_t ret = tvERROR_NONE; - capDetails_t inputInfo; - std::string value; - int saturation = 0; - tvError_t ret = tvERROR_NONE; - - value = parameters.HasLabel("saturation") ? parameters["saturation"].String() : ""; - returnIfParamNotFound(parameters,"saturation"); - saturation = std::stoi(value); + value = parameters.HasLabel("saturation") ? parameters["saturation"].String() : ""; + returnIfParamNotFound(parameters,"saturation"); + saturation = std::stoi(value); - if (validateIntegerInputParameter("Saturation",saturation) != 0) { - LOGERR("Failed in saturation range validation:%s", __FUNCTION__); - returnResponse(false); - } + if (validateIntegerInputParameter("Saturation",saturation) != 0) { + LOGERR("Failed in saturation range validation:%s", __FUNCTION__); + returnResponse(false); + } - if (parsingSetInputArgument(parameters, "Saturation",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "Saturation",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "Saturation" , inputInfo )) { - LOGERR("%s: CapablityCheck failed for Saturation\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "Saturation" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for Saturation\n", __FUNCTION__); + returnResponse(false); + } - if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { - LOGINFO("Proceed with %s\n",__FUNCTION__); - ret = SetSaturation(saturation); - } + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + ret = SetSaturation(saturation); + } - if(ret != tvERROR_NONE) { - LOGERR("Failed to set Saturation\n"); - returnResponse(false); - } - else { - int retval= updateAVoutputTVParam("set","Saturation",inputInfo,PQ_PARAM_SATURATION,saturation); - if(retval != 0 ) { - LOGERR("Failed to Save Saturation to ssm_data\n"); + if(ret != tvERROR_NONE) { + LOGERR("Failed to set Saturation\n"); returnResponse(false); } - LOGINFO("Exit : setSaturation successful to value: %d\n", saturation); - returnResponse(true); + else { + int retval= updateAVoutputTVParam("set","Saturation",inputInfo,PQ_PARAM_SATURATION,saturation); + if(retval != 0 ) { + LOGERR("Failed to Save Saturation to ssm_data\n"); + returnResponse(false); + } + LOGINFO("Exit : setSaturation successful to value: %d\n", saturation); + returnResponse(true); + } + } + else + { + bool success = setIntPQParam(parameters, "Saturation", PQ_PARAM_SATURATION, SetSaturation, m_maxSaturation); + returnResponse(success); } - } uint32_t AVOutputTV::resetSaturation(const JsonObject& parameters, JsonObject& response) { - LOGINFO("Entry\n"); + if(m_saturationStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int saturation=0; + tvError_t ret = tvERROR_NONE; - capDetails_t inputInfo; - paramIndex_t indexInfo; - int saturation=0; - tvError_t ret = tvERROR_NONE; - - if (parsingSetInputArgument(parameters, "Saturation", inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "Saturation", inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "Saturation", inputInfo )) { - LOGERR("%s: CapablityCheck failed for Saturation\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "Saturation", inputInfo )) { + LOGERR("%s: CapablityCheck failed for Saturation\n", __FUNCTION__); + returnResponse(false); + } - int retval= updateAVoutputTVParam("reset","Saturation",inputInfo,PQ_PARAM_SATURATION,saturation); + int retval= updateAVoutputTVParam("reset","Saturation",inputInfo,PQ_PARAM_SATURATION,saturation); - if(retval != 0 ) { - LOGERR("Failed to reset Saturation\n"); - returnResponse(false); - } - else { - if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { - inputInfo.pqmode = "Current"; - inputInfo.source = "Current"; - inputInfo.format = "Current"; - getParamIndex("Saturation",inputInfo,indexInfo); - int err = getLocalparam("Saturation",indexInfo, saturation, PQ_PARAM_SATURATION); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,saturation); - ret = SetSaturation(saturation); - } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; + if(retval != 0 ) { + LOGERR("Failed to reset Saturation\n"); + returnResponse(false); + } + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("Saturation",inputInfo,indexInfo); + int err = getLocalparam("Saturation",indexInfo, saturation, PQ_PARAM_SATURATION); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,saturation); + ret = SetSaturation(saturation); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } } } - } - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - LOGINFO("Exit : resetSaturation Successful to value : %d \n",saturation); - returnResponse(true); + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetSaturation Successful to value : %d \n",saturation); + returnResponse(true); + } } + else + { + bool success= resetPQParamToDefault(parameters, "Saturation", PQ_PARAM_SATURATION, SetSaturation); + returnResponse(success); + } } uint32_t AVOutputTV::getSaturationCaps(const JsonObject& parameters, JsonObject& response) @@ -1439,131 +2913,160 @@ namespace Plugin { uint32_t AVOutputTV::getSharpness(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); + if(m_sharpnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int sharpness = 0; - capDetails_t inputInfo; - paramIndex_t indexInfo; - int sharpness = 0; + if (parsingGetInputArgument(parameters, "Sharpness",inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } - if (parsingGetInputArgument(parameters, "Sharpness",inputInfo) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } + if (getParamIndex("Sharpness",inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - if (getParamIndex("Sharpness",inputInfo,indexInfo) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); + int err = getLocalparam("Sharpness",indexInfo,sharpness, PQ_PARAM_SHARPNESS); + if( err == 0 ) { + response["sharpness"] = sharpness; + LOGINFO("Exit : Sharpness Value: %d \n", sharpness); + returnResponse(true); + } + else { + returnResponse(false); + } } + else + { + int sharpness = 0; + bool success = getPQParamFromContext(parameters, + "Sharpness", + PQ_PARAM_SHARPNESS, + sharpness); + if (success) { + response["sharpness"] = sharpness; + } + returnResponse(success); - int err = getLocalparam("Sharpness",indexInfo,sharpness, PQ_PARAM_SHARPNESS); - if( err == 0 ) { - response["sharpness"] = sharpness; - LOGINFO("Exit : Sharpness Value: %d \n", sharpness); - returnResponse(true); - } - else { - returnResponse(false); } } uint32_t AVOutputTV::setSharpness(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_sharpnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + int sharpness = 0; + tvError_t ret = tvERROR_NONE; + std::string value; - capDetails_t inputInfo; - int sharpness = 0; - tvError_t ret = tvERROR_NONE; - std::string value; - - value = parameters.HasLabel("sharpness") ? parameters["sharpness"].String() : ""; - returnIfParamNotFound(parameters,"sharpness"); - sharpness = std::stoi(value); + value = parameters.HasLabel("sharpness") ? parameters["sharpness"].String() : ""; + returnIfParamNotFound(parameters,"sharpness"); + sharpness = std::stoi(value); - if (validateIntegerInputParameter("Sharpness",sharpness) != 0) { - LOGERR("Failed in sharpness range validation:%s", __FUNCTION__); - returnResponse(false); - } + if (validateIntegerInputParameter("Sharpness",sharpness) != 0) { + LOGERR("Failed in sharpness range validation:%s", __FUNCTION__); + returnResponse(false); + } - if (parsingSetInputArgument(parameters, "Sharpness", inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "Sharpness", inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "Sharpness", inputInfo )) { - LOGERR("%s: CapablityCheck failed for Sharpness\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "Sharpness", inputInfo )) { + LOGERR("%s: CapablityCheck failed for Sharpness\n", __FUNCTION__); + returnResponse(false); + } - if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { - LOGINFO("Proceed with %s\n",__FUNCTION__); - ret = SetSharpness(sharpness); - } + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + ret = SetSharpness(sharpness); + } - if(ret != tvERROR_NONE) { - LOGERR("Failed to set Sharpness\n"); - returnResponse(false); - } - else { - int retval= updateAVoutputTVParam("set","Sharpness",inputInfo,PQ_PARAM_SHARPNESS,sharpness); - if(retval != 0 ) { - LOGERR("Failed to Save Sharpness to ssm_data\n"); + if(ret != tvERROR_NONE) { + LOGERR("Failed to set Sharpness\n"); returnResponse(false); } - LOGINFO("Exit : setSharpness successful to value: %d\n", sharpness); - returnResponse(true); + else { + int retval= updateAVoutputTVParam("set","Sharpness",inputInfo,PQ_PARAM_SHARPNESS,sharpness); + if(retval != 0 ) { + LOGERR("Failed to Save Sharpness to ssm_data\n"); + returnResponse(false); + } + LOGINFO("Exit : setSharpness successful to value: %d\n", sharpness); + returnResponse(true); + } + } + else + { + bool success = setIntPQParam(parameters, "Sharpness", PQ_PARAM_SHARPNESS, SetSharpness, m_maxSharpness); + returnResponse(success); } - } uint32_t AVOutputTV::resetSharpness(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_sharpnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int sharpness=0; + tvError_t ret = tvERROR_NONE; - capDetails_t inputInfo; - paramIndex_t indexInfo; - int sharpness=0; - tvError_t ret = tvERROR_NONE; - - if (parsingSetInputArgument(parameters, "Sharpness",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "Sharpness",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "Sharpness" , inputInfo)) { - LOGERR("%s: CapablityCheck failed for Sharpness\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "Sharpness" , inputInfo)) { + LOGERR("%s: CapablityCheck failed for Sharpness\n", __FUNCTION__); + returnResponse(false); + } - int retval= updateAVoutputTVParam("reset","Sharpness", inputInfo,PQ_PARAM_SHARPNESS,sharpness); + int retval= updateAVoutputTVParam("reset","Sharpness", inputInfo,PQ_PARAM_SHARPNESS,sharpness); - if(retval != 0 ) { - LOGERR("Failed to reset Sharpness\n"); - returnResponse(false); - } - else { - if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { - inputInfo.pqmode = "Current"; - inputInfo.source = "Current"; - inputInfo.format = "Current"; - getParamIndex("Sharpness",inputInfo,indexInfo); - int err = getLocalparam("Sharpness",indexInfo, sharpness, PQ_PARAM_SHARPNESS); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,sharpness); - ret = SetSharpness(sharpness); - } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; + if(retval != 0 ) { + LOGERR("Failed to reset Sharpness\n"); + returnResponse(false); + } + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("Sharpness",inputInfo,indexInfo); + int err = getLocalparam("Sharpness",indexInfo, sharpness, PQ_PARAM_SHARPNESS); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,sharpness); + ret = SetSharpness(sharpness); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } } } - } - if(ret != tvERROR_NONE) { - returnResponse(false); + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetSharpness Successful to value : %d \n",sharpness); + returnResponse(true); + } } - else { - LOGINFO("Exit : resetSharpness Successful to value : %d \n",sharpness); - returnResponse(true); + else + { + bool success= resetPQParamToDefault(parameters, "Sharpness", PQ_PARAM_SHARPNESS, SetSharpness); + returnResponse(success); + } } @@ -1617,131 +3120,160 @@ namespace Plugin { uint32_t AVOutputTV::getHue(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); + if(m_hueStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int hue = 0; - capDetails_t inputInfo; - paramIndex_t indexInfo; - int hue = 0; + if (parsingGetInputArgument(parameters, "Hue", inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } - if (parsingGetInputArgument(parameters, "Hue", inputInfo) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } + if (getParamIndex("Hue",inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - if (getParamIndex("Hue",inputInfo,indexInfo) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); + int err = getLocalparam("Hue",indexInfo,hue, PQ_PARAM_HUE); + if( err == 0 ) { + response["hue"] = hue; + LOGINFO("Exit : Hue Value: %d \n", hue); + returnResponse(true); + } + else { + returnResponse(false); + } } + else + { + int hue = 0; + bool success = getPQParamFromContext(parameters, + "Hue", + PQ_PARAM_HUE, + hue); + if (success) { + response["hue"] = hue; + } + returnResponse(success); - int err = getLocalparam("Hue",indexInfo,hue, PQ_PARAM_HUE); - if( err == 0 ) { - response["hue"] = hue; - LOGINFO("Exit : Hue Value: %d \n", hue); - returnResponse(true); - } - else { - returnResponse(false); } } uint32_t AVOutputTV::setHue(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_hueStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + int hue = 0; + tvError_t ret = tvERROR_NONE; + std::string value; - capDetails_t inputInfo; - int hue = 0; - tvError_t ret = tvERROR_NONE; - std::string value; - - value = parameters.HasLabel("hue") ? parameters["hue"].String() : ""; - returnIfParamNotFound(parameters,"hue"); - hue = std::stoi(value); + value = parameters.HasLabel("hue") ? parameters["hue"].String() : ""; + returnIfParamNotFound(parameters,"hue"); + hue = std::stoi(value); - if (validateIntegerInputParameter("Hue",hue) != 0) { - LOGERR("Failed in hue range validation:%s", __FUNCTION__); - returnResponse(false); - } + if (validateIntegerInputParameter("Hue",hue) != 0) { + LOGERR("Failed in hue range validation:%s", __FUNCTION__); + returnResponse(false); + } - if (parsingSetInputArgument(parameters, "Hue",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "Hue",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "Hue", inputInfo )) { - LOGERR("%s: CapablityCheck failed for Hue\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "Hue", inputInfo )) { + LOGERR("%s: CapablityCheck failed for Hue\n", __FUNCTION__); + returnResponse(false); + } - if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { - LOGINFO("Proceed with %s\n",__FUNCTION__); - ret = SetHue(hue); - } + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + ret = SetHue(hue); + } - if(ret != tvERROR_NONE) { - LOGERR("Failed to set Hue\n"); - returnResponse(false); - } - else { - int retval= updateAVoutputTVParam("set","Hue",inputInfo,PQ_PARAM_HUE,hue); - if(retval != 0 ) { - LOGERR("Failed to Save Hue to ssm_data\n"); + if(ret != tvERROR_NONE) { + LOGERR("Failed to set Hue\n"); returnResponse(false); } - LOGINFO("Exit : setHue successful to value: %d\n", hue); - returnResponse(true); + else { + int retval= updateAVoutputTVParam("set","Hue",inputInfo,PQ_PARAM_HUE,hue); + if(retval != 0 ) { + LOGERR("Failed to Save Hue to ssm_data\n"); + returnResponse(false); + } + LOGINFO("Exit : setHue successful to value: %d\n", hue); + returnResponse(true); + } + } + else + { + bool success = setIntPQParam(parameters, "Hue", PQ_PARAM_HUE, SetHue, m_maxHue); + returnResponse(success); } - } uint32_t AVOutputTV::resetHue(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_hueStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int hue=0; + tvError_t ret = tvERROR_NONE; - capDetails_t inputInfo; - paramIndex_t indexInfo; - int hue=0; - tvError_t ret = tvERROR_NONE; - - if (parsingSetInputArgument(parameters, "Hue",inputInfo)!= 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "Hue",inputInfo)!= 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "Hue" , inputInfo )) { - LOGERR("%s: CapablityCheck failed for Hue\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "Hue" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for Hue\n", __FUNCTION__); + returnResponse(false); + } - int retval= updateAVoutputTVParam("reset","Hue", inputInfo,PQ_PARAM_HUE,hue); + int retval= updateAVoutputTVParam("reset","Hue", inputInfo,PQ_PARAM_HUE,hue); - if(retval != 0 ) { - LOGERR("Failed to reset Hue\n"); - returnResponse(false); - } - else { - if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { - inputInfo.pqmode = "Current"; - inputInfo.source = "Current"; - inputInfo.format = "Current"; - getParamIndex("Hue",inputInfo,indexInfo); - int err = getLocalparam("Hue",indexInfo, hue, PQ_PARAM_HUE); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,hue); - ret = SetHue(hue); - } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; + if(retval != 0 ) { + LOGERR("Failed to reset Hue\n"); + returnResponse(false); + } + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("Hue",inputInfo,indexInfo); + int err = getLocalparam("Hue",indexInfo, hue, PQ_PARAM_HUE); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex,hue); + ret = SetHue(hue); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } } } - } - if(ret != tvERROR_NONE) { - returnResponse(false); + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetHue Successful to value : %d \n",hue); + returnResponse(true); + } } - else { - LOGINFO("Exit : resetHue Successful to value : %d \n",hue); - returnResponse(true); + else + { + bool success= resetPQParamToDefault(parameters, "Hue", PQ_PARAM_HUE, SetHue); + returnResponse(success); + } } @@ -1795,110 +3327,146 @@ namespace Plugin { uint32_t AVOutputTV::getColorTemperature(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); + if(m_colorTempStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int colortemp = 0; - capDetails_t inputInfo; - paramIndex_t indexInfo; - int colortemp = 0; - - if (parsingGetInputArgument(parameters, "ColorTemperature", inputInfo) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } - - if (getParamIndex("ColorTemperature",inputInfo,indexInfo) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); - } - - int err = getLocalparam("ColorTemp",indexInfo,colortemp,PQ_PARAM_COLOR_TEMPERATURE); - if( err == 0 ) { - switch(colortemp) { - case tvColorTemp_STANDARD: - LOGINFO("Color Temp Value: Standard\n"); - response["colorTemperature"] = "Standard"; - break; - - case tvColorTemp_WARM: - LOGINFO("Color Temp Value: Warm\n"); - response["colorTemperature"] = "Warm"; - break; - - case tvColorTemp_COLD: - LOGINFO("Color Temp Value: Cold\n"); - response["colorTemperature"] = "Cold"; - break; + if (parsingGetInputArgument(parameters, "ColorTemperature", inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } - case tvColorTemp_USER: - LOGINFO("Color Temp Value: User Defined\n"); - response["colorTemperature"] = "UserDefined"; - break; + if (getParamIndex("ColorTemperature",inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - default: - LOGINFO("Color Temp Value: Standard\n"); - response["colorTemperature"] = "Standard"; - break; + int err = getLocalparam("ColorTemp",indexInfo,colortemp,PQ_PARAM_COLOR_TEMPERATURE); + if( err == 0 ) { + switch(colortemp) { + case tvColorTemp_STANDARD: + LOGINFO("Color Temp Value: Standard\n"); + response["colorTemperature"] = "Standard"; + break; + + case tvColorTemp_WARM: + LOGINFO("Color Temp Value: Warm\n"); + response["colorTemperature"] = "Warm"; + break; + + case tvColorTemp_COLD: + LOGINFO("Color Temp Value: Cold\n"); + response["colorTemperature"] = "Cold"; + break; + + case tvColorTemp_USER: + LOGINFO("Color Temp Value: User Defined\n"); + response["colorTemperature"] = "UserDefined"; + break; + + default: + LOGINFO("Color Temp Value: Standard\n"); + response["colorTemperature"] = "Standard"; + break; + } + LOGINFO("Exit : ColorTemperature Value: %d \n", colortemp); + returnResponse(true); + } + else { + returnResponse(false); } - LOGINFO("Exit : ColorTemperature Value: %d \n", colortemp); - returnResponse(true); } - else { - returnResponse(false); + else + { + std::string outMode; + if (getEnumPQParamString(parameters, "ColorTemp", + PQ_PARAM_COLOR_TEMPERATURE, colorTempReverseMap, outMode)) { + response["colorTemperature"] = outMode; + returnResponse(true); + } else { + returnResponse(false); + } + } } uint32_t AVOutputTV::setColorTemperature(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_colorTempStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + std::string value; + tvColorTemp_t colortemp = tvColorTemp_MAX; + tvError_t ret = tvERROR_NONE; + + value = parameters.HasLabel("colorTemperature") ? parameters["colorTemperature"].String() : ""; + returnIfParamNotFound(parameters,"colorTemperature"); + if(!value.compare("Standard")) { + colortemp = tvColorTemp_STANDARD; + } + else if (!value.compare("Warm")) { + colortemp = tvColorTemp_WARM; + } + else if (!value.compare("Cold")) { + colortemp = tvColorTemp_COLD; + } + else if (!value.compare("UserDefined")) { + colortemp = tvColorTemp_USER; + } + else { + returnResponse(false); + } - capDetails_t inputInfo; - std::string value; - tvColorTemp_t colortemp = tvColorTemp_MAX; - tvError_t ret = tvERROR_NONE; - - value = parameters.HasLabel("colorTemperature") ? parameters["colorTemperature"].String() : ""; - returnIfParamNotFound(parameters,"colorTemperature"); - if(!value.compare("Standard")) { - colortemp = tvColorTemp_STANDARD; - } - else if (!value.compare("Warm")) { - colortemp = tvColorTemp_WARM; - } - else if (!value.compare("Cold")) { - colortemp = tvColorTemp_COLD; - } - else if (!value.compare("UserDefined")) { - colortemp = tvColorTemp_USER; - } - else { - returnResponse(false); - } - - if (parsingSetInputArgument(parameters, "ColorTemperature",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "ColorTemperature",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "ColorTemperature", inputInfo )) { - LOGERR("%s: CapablityCheck failed for colorTemperature\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "ColorTemperature", inputInfo )) { + LOGERR("%s: CapablityCheck failed for colorTemperature\n", __FUNCTION__); + returnResponse(false); + } - if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { - LOGINFO("Proceed with %s\n",__FUNCTION__); - ret = SetColorTemperature((tvColorTemp_t)colortemp); - } + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + ret = SetColorTemperature((tvColorTemp_t)colortemp); + } - if(ret != tvERROR_NONE) { - LOGERR("Failed to set ColorTemperature\n"); - returnResponse(false); - } - else { - int retval= updateAVoutputTVParam("set","ColorTemp", inputInfo,PQ_PARAM_COLOR_TEMPERATURE,(int)colortemp); - if(retval != 0 ) { - LOGERR("Failed to Save ColorTemperature to ssm_data\n"); + if(ret != tvERROR_NONE) { + LOGERR("Failed to set ColorTemperature\n"); + returnResponse(false); + } + else { + int retval= updateAVoutputTVParam("set","ColorTemp", inputInfo,PQ_PARAM_COLOR_TEMPERATURE,(int)colortemp); + if(retval != 0 ) { + LOGERR("Failed to Save ColorTemperature to ssm_data\n"); + returnResponse(false); + } + LOGINFO("Exit : setColorTemperature successful to value: %d\n", colortemp); + returnResponse(true); + } + } + else + { + bool success = setEnumPQParam( + parameters, + "colorTemperature", + "ColorTemp", + colorTempMap, + PQ_PARAM_COLOR_TEMPERATURE, + [](int val) { + return SetColorTemperature(static_cast(val)); + }); + + if (!success) { + LOGERR("setColorTemperature failed"); returnResponse(false); } - LOGINFO("Exit : setColorTemperature successful to value: %d\n", colortemp); + + LOGINFO("setColorTemperature: Success"); returnResponse(true); } } @@ -1907,52 +3475,67 @@ namespace Plugin { { LOGINFO("Entry\n"); + if(m_colorTempStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int colortemp=0; + tvError_t ret = tvERROR_NONE; - capDetails_t inputInfo; - paramIndex_t indexInfo; - int colortemp=0; - tvError_t ret = tvERROR_NONE; - - if (parsingSetInputArgument(parameters, "ColorTemperature", inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "ColorTemperature", inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "ColorTemperature", inputInfo )) { - LOGERR("%s: CapablityCheck failed for colorTemperature\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "ColorTemperature", inputInfo )) { + LOGERR("%s: CapablityCheck failed for colorTemperature\n", __FUNCTION__); + returnResponse(false); + } - int retval= updateAVoutputTVParam("reset","ColorTemp", inputInfo,PQ_PARAM_COLOR_TEMPERATURE,colortemp); + int retval= updateAVoutputTVParam("reset","ColorTemp", inputInfo,PQ_PARAM_COLOR_TEMPERATURE,colortemp); - if(retval != 0 ) { - LOGERR("Failed to reset ColorTemperature\n"); - returnResponse(false); - } - else { - if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { - inputInfo.pqmode = "Current"; - inputInfo.source = "Current"; - inputInfo.format = "Current"; - getParamIndex("ColorTemperature",inputInfo,indexInfo); - int err = getLocalparam("ColorTemp",indexInfo, colortemp, PQ_PARAM_COLOR_TEMPERATURE); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex, colortemp); - ret = SetColorTemperature((tvColorTemp_t)colortemp); - } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; + if(retval != 0 ) { + LOGERR("Failed to reset ColorTemperature\n"); + returnResponse(false); + } + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("ColorTemperature",inputInfo,indexInfo); + int err = getLocalparam("ColorTemp",indexInfo, colortemp, PQ_PARAM_COLOR_TEMPERATURE); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex, colortemp); + ret = SetColorTemperature((tvColorTemp_t)colortemp); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } } } - } - if(ret != tvERROR_NONE) { - returnResponse(false); + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetColorTemperature Successful to value : %d \n",colortemp); + returnResponse(true); + } } - else { - LOGINFO("Exit : resetColorTemperature Successful to value : %d \n",colortemp); - returnResponse(true); + else + { + bool success = resetEnumPQParamToDefault( + parameters, + "ColorTemp", + PQ_PARAM_COLOR_TEMPERATURE, + colorTempReverseMap, + [](int val, const std::unordered_map&) { + return SetColorTemperature(static_cast(val)); + }); + + returnResponse(success); } } @@ -2006,150 +3589,222 @@ namespace Plugin { uint32_t AVOutputTV::getBacklightDimmingMode(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); + if(m_dimmingModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int dimmingMode = 0; + if (parsingGetInputArgument(parameters, "DimmingMode", inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } - capDetails_t inputInfo; - paramIndex_t indexInfo; - int dimmingMode = 0; - - if (parsingGetInputArgument(parameters, "DimmingMode", inputInfo) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } - - if (getParamIndex("DimmingMode",inputInfo,indexInfo) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); - } + if (getParamIndex("DimmingMode",inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - int err = getLocalparam("DimmingMode",indexInfo,dimmingMode, PQ_PARAM_DIMMINGMODE); - if( err == 0 ) { - switch(dimmingMode) { - case tvDimmingMode_Fixed: - LOGINFO("DimmingMode Value: Fixed\n"); - response["DimmingMode"] = "fixed"; - break; + int err = getLocalparam("DimmingMode",indexInfo,dimmingMode, PQ_PARAM_DIMMINGMODE); + if( err == 0 ) { + switch(dimmingMode) { + case tvDimmingMode_Fixed: + LOGINFO("DimmingMode Value: Fixed\n"); + response["dimmingMode"] = "Fixed"; + break; - case tvDimmingMode_Local: - LOGINFO("DimmingMode Value: Local\n"); - response["DimmingMode"] = "local"; - break; + case tvDimmingMode_Local: + LOGINFO("DimmingMode Value: Local\n"); + response["dimmingMode"] = "Local"; + break; - case tvDimmingMode_Global: - LOGINFO("DimmingMode Value: Global\n"); - response["DimmingMode"] = "global"; - break; + case tvDimmingMode_Global: + LOGINFO("DimmingMode Value: Global\n"); + response["dimmingMode"] = "Global"; + break; + } + LOGINFO("Exit : DimmingMode Value: %d \n", dimmingMode); + returnResponse(true); + } + else { + returnResponse(false); } - LOGINFO("Exit : DimmingMode Value: %d \n", dimmingMode); - returnResponse(true); } - else { - returnResponse(false); + else + { + std::string mode; + if (getEnumPQParamString(parameters, "DimmingMode", + PQ_PARAM_DIMMINGMODE, dimmingModeReverseMap, mode)) { + response["dimmingMode"] = mode; + returnResponse(true); + } else { + returnResponse(false); + } } } uint32_t AVOutputTV::setBacklightDimmingMode(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_dimmingModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { - capDetails_t inputInfo; - int dimmingMode = 0; - tvError_t ret = tvERROR_NONE; - std::string value; + capDetails_t inputInfo; + int dimmingMode = 0; + tvError_t ret = tvERROR_NONE; + std::string value; - value = parameters.HasLabel("DimmingMode") ? parameters["DimmingMode"].String() : ""; - returnIfParamNotFound(parameters,"DimmingMode"); + value = parameters.HasLabel("dimmingMode") ? parameters["dimmingMode"].String() : ""; + returnIfParamNotFound(parameters,"dimmingMode"); - if (validateInputParameter("DimmingMode",value) != 0) { - LOGERR("%s: Range validation failed for DimmingMode\n", __FUNCTION__); - returnResponse(false); - } - dimmingMode = getDimmingModeIndex(value); + if (validateInputParameter("DimmingMode",value) != 0) { + LOGERR("%s: Range validation failed for DimmingMode\n", __FUNCTION__); + returnResponse(false); + } + dimmingMode = getDimmingModeIndex(value); - if (parsingSetInputArgument(parameters, "DimmingMode",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "DimmingMode",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "DimmingMode" , inputInfo )) { - LOGERR("%s: CapablityCheck failed for DimmingMode\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "DimmingMode" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for DimmingMode\n", __FUNCTION__); + returnResponse(false); + } - if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { - LOGINFO("Proceed with %s\n",__FUNCTION__); - ret = SetTVDimmingMode(value.c_str()); - } + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + ret = SetTVDimmingMode(value.c_str()); + } - if(ret != tvERROR_NONE) { - LOGERR("Failed to set DimmingMode\n"); - returnResponse(false); - } - else { - int retval= updateAVoutputTVParam("set","DimmingMode",inputInfo,PQ_PARAM_DIMMINGMODE,(int)dimmingMode); - if(retval != 0 ) { - LOGERR("Failed to Save DimmingMode to ssm_data\n"); + if(ret != tvERROR_NONE) { + LOGERR("Failed to set DimmingMode\n"); returnResponse(false); } + else { + int retval= updateAVoutputTVParam("set","DimmingMode",inputInfo,PQ_PARAM_DIMMINGMODE,(int)dimmingMode); + if(retval != 0 ) { + LOGERR("Failed to Save DimmingMode to ssm_data\n"); + returnResponse(false); + } - LOGINFO("Exit : setDimmingMode successful to value: %d\n", dimmingMode); - returnResponse(true); + LOGINFO("Exit : setDimmingMode successful to value: %d\n", dimmingMode); + returnResponse(true); + } + } + else + { + int dimmingMode = 0; + tvError_t ret = tvERROR_NONE; + std::string value; + + value = parameters.HasLabel("dimmingMode") ? parameters["dimmingMode"].String() : ""; + returnIfParamNotFound(parameters,"dimmingMode"); + + dimmingMode = getDimmingModeIndex(value); + if (dimmingMode < 0 || dimmingMode > tvDimmingMode_MAX) { + LOGERR("Input value %d is out of range (0 - %d) for DimmingMode", dimmingMode, tvDimmingMode_MAX); + returnResponse(false); + } + if( isSetRequiredForParam(parameters, "DimmingMode" ) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + ret = SetTVDimmingMode(value.c_str()); + } + if(ret != tvERROR_NONE) { + LOGERR("Failed to set DimmingMode\n"); + returnResponse(false); + } + else + { + // Update the TV parameter + int retval = updateAVoutputTVParamV2("set", "DimmingMode", parameters, PQ_PARAM_DIMMINGMODE, (int)dimmingMode); + if (retval != 0) { + LOGERR("Failed to Save DimmingMode to ssm_data. retval: %d \n", retval); + returnResponse(false); + } + LOGINFO("Exit : setDimmingMode successful to value: %d \n", dimmingMode); + returnResponse(true); + } } } uint32_t AVOutputTV::resetBacklightDimmingMode(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_dimmingModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { - capDetails_t inputInfo; - paramIndex_t indexInfo; - std::string dimmingMode; - int dMode=0; - tvError_t ret = tvERROR_NONE; + capDetails_t inputInfo; + paramIndex_t indexInfo; + std::string dimmingMode; + int dMode=0; + tvError_t ret = tvERROR_NONE; - if (parsingSetInputArgument(parameters, "DimmingMode", inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "DimmingMode", inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "DimmingMode" , inputInfo )) { - LOGERR("%s: CapablityCheck failed for DimmingMode\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "DimmingMode" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for DimmingMode\n", __FUNCTION__); + returnResponse(false); + } - int retval= updateAVoutputTVParam("reset","DimmingMode", inputInfo,PQ_PARAM_DIMMINGMODE,dMode); + int retval= updateAVoutputTVParam("reset","DimmingMode", inputInfo,PQ_PARAM_DIMMINGMODE,dMode); - if(retval != 0 ) { - LOGERR("Failed to reset ldim\n"); - returnResponse(false); - } + if(retval != 0 ) { + LOGERR("Failed to reset ldim\n"); + returnResponse(false); + } - else { - if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { - inputInfo.pqmode = "Current"; - inputInfo.source = "Current"; - inputInfo.format = "Current"; - getParamIndex("DimmingMode",inputInfo,indexInfo); - int err = getLocalparam("DimmingMode",indexInfo, dMode, PQ_PARAM_DIMMINGMODE); - if( err == 0 ) { - LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex, dMode); - getDimmingModeStringFromEnum(dMode,dimmingMode); - ret = SetTVDimmingMode(dimmingMode.c_str()); - } - else { - LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); - ret = tvERROR_GENERAL; + else { + if (isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format)) { + inputInfo.pqmode = "Current"; + inputInfo.source = "Current"; + inputInfo.format = "Current"; + getParamIndex("DimmingMode",inputInfo,indexInfo); + int err = getLocalparam("DimmingMode",indexInfo, dMode, PQ_PARAM_DIMMINGMODE); + if( err == 0 ) { + LOGINFO("%s : getLocalparam success format :%d source : %d format : %d value : %d\n",__FUNCTION__,indexInfo.formatIndex, indexInfo.sourceIndex, indexInfo.pqmodeIndex, dMode); + getDimmingModeStringFromEnum(dMode,dimmingMode); + ret = SetTVDimmingMode(dimmingMode.c_str()); + } + else { + LOGERR("%s : GetLocalParam Failed \n",__FUNCTION__); + ret = tvERROR_GENERAL; + } } } - } - if(ret != tvERROR_NONE) { - returnResponse(false); + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + LOGINFO("Exit : resetBacklightDimmingMode Successful to value : %s \n",dimmingMode.c_str()); + returnResponse(true); + } } - else { - LOGINFO("Exit : resetBacklightDimmingMode Successful to value : %s \n",dimmingMode.c_str()); - returnResponse(true); + else + { + bool success = resetEnumPQParamToDefault( + parameters, + "DimmingMode", + PQ_PARAM_DIMMINGMODE, + dimmingModeReverseMap, + [](int val, const std::unordered_map& enumMap) -> tvError_t { + auto it = enumMap.find(val); + if (it != enumMap.end()) { + return SetTVDimmingMode(it->second.c_str()); + } else { + LOGERR("Invalid enum value: %d for DimmingMode\n", val); + return tvERROR_GENERAL; + } + }); + + returnResponse(success); + } } @@ -2560,199 +4215,529 @@ namespace Plugin { } } + bool AVOutputTV::getPictureModeV2(const JsonObject& parameters, std::string& outMode) + { + LOGINFO("Entry"); + + tvVideoSrcType_t source = VIDEO_SOURCE_IP; + tvVideoFormatType_t format = VIDEO_FORMAT_SDR; + + // Parse videoSource + if (!parameters.HasLabel("videoSource") || parameters["videoSource"].String() == "Current") { + GetCurrentVideoSource(&source); + } else { + std::string srcStr = parameters["videoSource"].String(); + if (videoSrcReverseMap.count(srcStr)) { + source = static_cast(videoSrcReverseMap.at(srcStr)); + } else { + LOGERR("Invalid videoSource: %s", srcStr.c_str()); + return false; + } + } + + // Parse videoFormat + if (!parameters.HasLabel("videoFormat") || parameters["videoFormat"].String() == "Current") { + GetCurrentVideoFormat(&format); + if (format == VIDEO_FORMAT_NONE) format = VIDEO_FORMAT_SDR; + } else { + std::string fmtStr = parameters["videoFormat"].String(); + if (videoFormatReverseMap.count(fmtStr)) { + format = static_cast(videoFormatReverseMap.at(fmtStr)); + } else { + LOGERR("Invalid videoFormat: %s", fmtStr.c_str()); + return false; + } + } + + // Directly use TR-181 to fetch active picture mode + std::string tr181_param_name = std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM) + + "." + convertSourceIndexToStringV2(source) + + ".Format." + convertVideoFormatToStringV2(format) + + ".PictureModeString"; + + LOGINFO("TR181 Param Name = %s", tr181_param_name.c_str()); + + TR181_ParamData_t param = {0}; + tr181ErrorCode_t err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); + if (err != tr181Success) { + LOGERR("getLocalParam failed: %d", err); + return false; + } + + outMode = param.value; + LOGINFO("Exit: PictureMode = %s", outMode.c_str()); + return true; + } + uint32_t AVOutputTV::getPictureMode(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); - capDetails_t inputInfo; - paramIndex_t indexInfo; - std::string tr181_param_name; - TR181_ParamData_t param = {0}; - tr181ErrorCode_t err = tr181Success; + std::string pictureModeStr; + if (m_pictureModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + TR181_ParamData_t param = {0}; - if (parsingGetInputArgument(parameters, "PictureMode",inputInfo) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); + if (parsingGetInputArgument(parameters, "PictureMode", inputInfo) != 0) { + LOGERR("%s: Failed to parse input argument", __FUNCTION__); + returnResponse(false); + } + + if (getParamIndex("PictureMode", inputInfo, indexInfo) == -1) { + LOGERR("%s: getParamIndex failed", __FUNCTION__); + returnResponse(false); + } + + std::string tr181_param_name = std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM) + + "." + convertSourceIndexToString(indexInfo.sourceIndex) + + ".Format." + convertVideoFormatToString(indexInfo.formatIndex) + + ".PictureModeString"; + + tr181ErrorCode_t err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); + if (err != tr181Success) { + returnResponse(false); + } + + pictureModeStr = param.value; + } + else + { + if (!getPictureModeV2(parameters, pictureModeStr)) { + returnResponse(false); + } + } + response["pictureMode"] = pictureModeStr; + LOGINFO("Exit: getPictureMode() : %s", pictureModeStr.c_str()); + returnResponse(true); + } + + bool AVOutputTV::setPictureModeV2(const JsonObject& parameters) + { + LOGINFO("Entry %s", __FUNCTION__); + + if (!parameters.HasLabel("pictureMode")) { + LOGERR("Missing 'pictureMode' in parameters."); + return false; } - if (getParamIndex("PictureMode",inputInfo,indexInfo) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); + std::string mode = parameters["pictureMode"].String(); + + // Validate against m_pictureModes + int modeIndex = -1; + for (size_t i = 0; i < m_numPictureModes; ++i) { + auto it = pqModeMap.find(m_pictureModes[i]); + if (it != pqModeMap.end()) { + if (it->second == mode) { + modeIndex = static_cast(i); + LOGINFO("Matched pictureMode '%s' at index %d", mode.c_str(), modeIndex); + break; + } + } else { + LOGERR("pqModeMap does not contain m_pictureModes[%zu] = %d", i, m_pictureModes[i]); + } + } + + if (modeIndex == -1) { + LOGERR("Invalid pictureMode: %s", mode.c_str()); + return false; + } + + // Extract videoSource + std::vector sources; + if (parameters.HasLabel("videoSource")) { + const JsonArray& sourceParam = parameters["videoSource"].Array(); + for (uint32_t i = 0; i < sourceParam.Length(); ++i) { + std::string source = sourceParam[i].Value(); + if (!source.empty()) { + sources.push_back(source); + } + } + } else { + sources.push_back("Global"); + LOGINFO("videoSource not provided, defaulting to 'Global'"); + } + + // Extract videoFormat + std::vector formats; + if (parameters.HasLabel("videoFormat")) { + const JsonArray& formatParam = parameters["videoFormat"].Array(); + for (uint32_t i = 0; i < formatParam.Length(); ++i) { + std::string format = formatParam[i].Value(); + if (!format.empty()) { + formats.push_back(format); + } + } + } else { + formats.push_back("Global"); + LOGINFO("videoFormat not provided, defaulting to 'Global'"); + } + + // Expand 'Global' sources + if (std::find(sources.begin(), sources.end(), "Global") != sources.end()) { + std::unordered_set sourceSet; + for (size_t j = 0; j < m_pictureModeCaps->num_contexts; ++j) { + if (m_pictureModeCaps->contexts[j].pq_mode == m_pictureModes[modeIndex]) { + std::string srcStr = convertSourceIndexToStringV2(m_pictureModeCaps->contexts[j].videoSrcType); + sourceSet.insert(srcStr); + } + } + sources.insert(sources.end(), sourceSet.begin(), sourceSet.end()); } - tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); - tr181_param_name += "." + convertSourceIndexToString(indexInfo.sourceIndex) + "." + "Format."+convertVideoFormatToString(indexInfo.formatIndex)+"."+"PictureModeString"; - err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); + // Expand 'Global' formats + if (std::find(formats.begin(), formats.end(), "Global") != formats.end()) { + std::unordered_set formatSet; + for (size_t j = 0; j < m_pictureModeCaps->num_contexts; ++j) { + if (m_pictureModeCaps->contexts[j].pq_mode == m_pictureModes[modeIndex]) { + std::string fmtStr = convertVideoFormatToStringV2(m_pictureModeCaps->contexts[j].videoFormatType); + formatSet.insert(fmtStr); + } + } + formats.insert(formats.end(), formatSet.begin(), formatSet.end()); + } + + // Get current context + tvVideoSrcType_t currentSrc = VIDEO_SOURCE_IP; + tvVideoFormatType_t currentFmt = VIDEO_FORMAT_SDR; + GetCurrentVideoSource(¤tSrc); + GetCurrentVideoFormat(¤tFmt); + if (currentFmt == VIDEO_FORMAT_NONE) + currentFmt = VIDEO_FORMAT_SDR; + + LOGINFO("Current video source: %s, format: %s", + convertSourceIndexToStringV2(currentSrc).c_str(), + convertVideoFormatToStringV2(currentFmt).c_str()); + + bool contextHandled = false; + + // Iterate through contexts and apply mode + for (size_t i = 0; i < m_pictureModeCaps->num_contexts; ++i) { + const tvConfigContext_t& ctx = m_pictureModeCaps->contexts[i]; + + if (ctx.pq_mode != m_pictureModes[modeIndex]) + continue; + + if (!isValidFormat(formats, ctx.videoFormatType)) + continue; + + if (!isValidSource(sources, ctx.videoSrcType)) + continue; + + std::string srcStr = convertSourceIndexToStringV2(ctx.videoSrcType); + std::string fmtStr = convertVideoFormatToStringV2(ctx.videoFormatType); + + if (ctx.videoSrcType == currentSrc && ctx.videoFormatType == currentFmt) { + if (SetTVPictureMode(mode.c_str()) != tvERROR_NONE) { + LOGERR("SetTVPictureMode failed for mode: %s", mode.c_str()); + continue; + } + } +//TODO:: Revisit this logic. Have to revert if HAL call fails. + std::string tr181Param = std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM) + "." + + srcStr + ".Format." + fmtStr + ".PictureModeString"; + + tr181ErrorCode_t err = setLocalParam(rfc_caller_id, tr181Param.c_str(), mode.c_str()); + if (err != tr181Success) { + LOGERR("setLocalParam failed: %s => %s", tr181Param.c_str(), getTR181ErrorString(err)); + continue; + } + else { + LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM, mode.c_str()); + int pqmodeindex = (int)getPictureModeIndex(mode); + SaveSourcePictureMode(ctx.videoSrcType, ctx.videoFormatType, pqmodeindex); + } - if ( tr181Success != err ) { - returnResponse(false); + contextHandled = true; } - else { - std::string s; - s+=param.value; - response["pictureMode"] = s; - LOGINFO("Exit : getPictureMode() : %s\n",s.c_str()); - returnResponse(true); + + if (!contextHandled) { + LOGERR("No valid context found to apply pictureMode: %s", mode.c_str()); + return false; } + + LOGINFO("Exit %s: PictureMode '%s' applied successfully.", __FUNCTION__, mode.c_str()); + return true; } + uint32_t AVOutputTV::setPictureMode(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); - capDetails_t inputInfo; - char prevmode[PIC_MODE_NAME_MAX]={0}; - std::string value; - GetTVPictureMode(prevmode); + if (m_pictureModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + char prevmode[PIC_MODE_NAME_MAX]={0}; + std::string value; + GetTVPictureMode(prevmode); + + tvError_t ret = tvERROR_NONE; + value = parameters.HasLabel("pictureMode") ? parameters["pictureMode"].String() : ""; + returnIfParamNotFound(parameters,"pictureMode"); + + // As only source need to validate, so pqmode and formate passing as currrent + if (parsingSetInputArgument(parameters, "PictureMode",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - tvError_t ret = tvERROR_NONE; - value = parameters.HasLabel("pictureMode") ? parameters["pictureMode"].String() : ""; - returnIfParamNotFound(parameters,"pictureMode"); + if (validateInputParameter("PictureMode",value) != 0) { + LOGERR("%s: Range validation failed for PictureMode\n", __FUNCTION__); + returnResponse(false); + } + if( !isCapablityCheckPassed( "PictureMode" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for PictureMode\n", __FUNCTION__); + returnResponse(false); + } - // As only source need to validate, so pqmode and formate passing as currrent - if (parsingSetInputArgument(parameters, "PictureMode",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if( isSetRequired("Current",inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with SetTVPictureMode\n"); + ret = SetTVPictureMode(value.c_str()); + } + if(ret != tvERROR_NONE) { + returnResponse(false); + } + else { + valueVectors_t values; + inputInfo.pqmode = "Current"; - if (validateInputParameter("PictureMode",value) != 0) { - LOGERR("%s: Range validation failed for PictureMode\n", __FUNCTION__); - returnResponse(false); - } - if( !isCapablityCheckPassed( "PictureMode" , inputInfo )) { - LOGERR("%s: CapablityCheck failed for PictureMode\n", __FUNCTION__); - returnResponse(false); - } + getSaveConfig("PictureMode" ,inputInfo, values); + + for (int sourceType : values.sourceValues) { + tvVideoSrcType_t source = (tvVideoSrcType_t)sourceType; + for (int formatType : values.formatValues) { + tvVideoFormatType_t format = (tvVideoFormatType_t)formatType; + std::string tr181_param_name = ""; + tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); + // framing Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.AVOutput.Source.source_index[x].Format.format_index[x].PictureModeString.value + tr181_param_name += "."+convertSourceIndexToString(source)+"."+"Format."+ + convertVideoFormatToString(format)+"."+"PictureModeString"; + tr181ErrorCode_t err = setLocalParam(rfc_caller_id, tr181_param_name.c_str(), value.c_str()); + if ( err != tr181Success ) { + LOGERR("setLocalParam for %s Failed : %s\n", AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM, getTR181ErrorString(err)); + returnResponse(false); + } + else { + LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM, value.c_str()); + int pqmodeindex = (int)getPictureModeIndex(value); + SaveSourcePictureMode(source, format, pqmodeindex); + } + } + } - if( isSetRequired("Current",inputInfo.source,inputInfo.format) ) { - LOGINFO("Proceed with SetTVPictureMode\n"); - ret = SetTVPictureMode(value.c_str()); - } - if(ret != tvERROR_NONE) { - returnResponse(false); + //Filmmaker mode telemetry + if(!strncmp(value.c_str(),"filmmaker",strlen(value.c_str())) && strncmp(prevmode,"filmmaker",strlen(prevmode))) { + LOGINFO("%s mode has been enabled",value.c_str()); + } + else if(!strncmp(prevmode,"filmmaker",strlen(prevmode)) && strncmp(value.c_str(),"filmmaker",strlen(value.c_str()))) { + LOGINFO("%s mode has been disabled",prevmode); + } + + LOGINFO("Broadcasting the low latency change event \n"); + + if(m_isDalsEnabled) { + //GameModebroadcast + if(!strncmp(value.c_str(),"game",strlen(value.c_str())) && strncmp(prevmode,"game",strlen(prevmode))) { + broadcastLowLatencyModeChangeEvent(1); + } + else if(!strncmp(prevmode,"game",strlen(prevmode)) && strncmp(value.c_str(),"game",strlen(value.c_str()))) { + broadcastLowLatencyModeChangeEvent(0); + } + } + + LOGINFO("Exit : Value : %s \n",value.c_str()); + returnResponse(true); + } } else { - valueVectors_t values; - inputInfo.pqmode = "Current"; + bool success = false; + try { + success = setPictureModeV2(parameters); + } catch (const std::exception& e) { + LOGERR("Exception in setPictureModeV2: %s", e.what()); + } catch (...) { + LOGERR("Unknown exception in setPictureModeV2"); + } + returnResponse(success); + } + } + bool AVOutputTV::resetPictureModeV2(const JsonObject& parameters) + { + LOGINFO("Entry %s\n", __FUNCTION__); + + auto extractList = [](const JsonObject& params, const std::string& key) -> std::vector { + std::vector result; + if (params.HasLabel(key.c_str())) { + const JsonArray& array = params[key.c_str()].Array(); + for (uint32_t i = 0; i < array.Length(); ++i) { + result.push_back(array[i].Value()); + } + } else { + result.push_back("Global"); + } + return result; + }; - getSaveConfig("PictureMode" ,inputInfo, values); + std::vector sources = extractList(parameters, "videoSource"); + std::vector formats = extractList(parameters, "videoFormat"); - for (int sourceType : values.sourceValues) { - tvVideoSrcType_t source = (tvVideoSrcType_t)sourceType; - for (int formatType : values.formatValues) { - tvVideoFormatType_t format = (tvVideoFormatType_t)formatType; - std::string tr181_param_name = ""; - tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); - // framing Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.AVOutput.Source.source_index[x].Format.format_index[x].PictureModeString.value - tr181_param_name += "."+convertSourceIndexToString(source)+"."+"Format."+ - convertVideoFormatToString(format)+"."+"PictureModeString"; - tr181ErrorCode_t err = setLocalParam(rfc_caller_id, tr181_param_name.c_str(), value.c_str()); - if ( err != tr181Success ) { - LOGERR("setLocalParam for %s Failed : %s\n", AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM, getTR181ErrorString(err)); - returnResponse(false); - } - else { - LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM, value.c_str()); - int pqmodeindex = (int)getPictureModeIndex(value); - SaveSourcePictureMode(source, format, pqmodeindex); - } - } + auto expandGlobal = [](std::vector& vec, const std::unordered_set& fullSet) { + if (std::find(vec.begin(), vec.end(), "Global") != vec.end()) { + vec.erase(std::remove(vec.begin(), vec.end(), "Global"), vec.end()); + vec.insert(vec.end(), fullSet.begin(), fullSet.end()); + } + std::unordered_set unique(vec.begin(), vec.end()); + vec.assign(unique.begin(), unique.end()); + }; + + // Expand "Global" values + std::unordered_set allSources, allFormats; + for (size_t j = 0; j < m_pictureModeCaps->num_contexts; ++j) { + allSources.insert(convertSourceIndexToStringV2(m_pictureModeCaps->contexts[j].videoSrcType)); + allFormats.insert(convertVideoFormatToStringV2(m_pictureModeCaps->contexts[j].videoFormatType)); + } + expandGlobal(sources, allSources); + expandGlobal(formats, allFormats); + + // Get current source & format + tvVideoSrcType_t currentSrc = VIDEO_SOURCE_IP; + tvVideoFormatType_t currentFmt = VIDEO_FORMAT_SDR; + GetCurrentVideoSource(¤tSrc); + GetCurrentVideoFormat(¤tFmt); + if (currentFmt == VIDEO_FORMAT_NONE) + currentFmt = VIDEO_FORMAT_SDR; + + bool contextHandled = false; + + for (size_t i = 0; i < m_pictureModeCaps->num_contexts; ++i) { + const tvConfigContext_t& ctx = m_pictureModeCaps->contexts[i]; + + if (!isValidSource(sources, ctx.videoSrcType) || !isValidFormat(formats, ctx.videoFormatType)) + continue; + + std::string tr181Param = std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM) + "." + + convertSourceIndexToStringV2(ctx.videoSrcType) + ".Format." + + convertVideoFormatToStringV2(ctx.videoFormatType) + ".PictureModeString"; + + // Clear override + tr181ErrorCode_t err = clearLocalParam(rfc_caller_id, tr181Param.c_str()); + if (err != tr181Success) { + LOGERR("clearLocalParam failed for %s: %s", tr181Param.c_str(), getTR181ErrorString(err)); + continue; } - //Filmmaker mode telemetry - if(!strncmp(value.c_str(),"filmmaker",strlen(value.c_str())) && strncmp(prevmode,"filmmaker",strlen(prevmode))) { - LOGINFO("%s mode has been enabled",value.c_str()); - } - else if(!strncmp(prevmode,"filmmaker",strlen(prevmode)) && strncmp(value.c_str(),"filmmaker",strlen(value.c_str()))) { - LOGINFO("%s mode has been disabled",prevmode); - } + // Read saved TR-181 value + TR181_ParamData_t param = {0}; + err = getLocalParam(rfc_caller_id, tr181Param.c_str(), ¶m); + if (err != tr181Success || strlen(param.value) == 0) { + LOGWARN("getLocalParam failed or empty for %s", tr181Param.c_str()); + continue; + } - LOGINFO("Broadcasting the low latency change event \n"); + // Apply to hardware if current context matches + if (ctx.videoSrcType == currentSrc && ctx.videoFormatType == currentFmt) { - if(m_isDalsEnabled) { - //GameModebroadcast - if(!strncmp(value.c_str(),"game",strlen(value.c_str())) && strncmp(prevmode,"game",strlen(prevmode))) { - broadcastLowLatencyModeChangeEvent(1); - } - else if(!strncmp(prevmode,"game",strlen(prevmode)) && strncmp(value.c_str(),"game",strlen(value.c_str()))) { - broadcastLowLatencyModeChangeEvent(0); - } + tvError_t ret = SetTVPictureMode(param.value); + if (ret != tvERROR_NONE) { + LOGERR("SetTVPictureMode failed for %s", param.value); + continue; + } } - LOGINFO("Exit : Value : %s \n",value.c_str()); - returnResponse(true); + // Save to internal config + int pqmodeIndex = static_cast(getPictureModeIndex(param.value)); + SaveSourcePictureMode(ctx.videoSrcType, ctx.videoFormatType, pqmodeIndex); + contextHandled = true; + } + + if (!contextHandled) { + LOGERR("No valid pictureMode context matched to reset.\n"); + return false; } + + LOGINFO("resetPictureModeV2: Exit - PictureMode reset successfully.\n"); + return true; } uint32_t AVOutputTV::resetPictureMode(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); - tr181ErrorCode_t err = tr181Success; - TR181_ParamData_t param = {0}; + if (m_pictureModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + tr181ErrorCode_t err = tr181Success; + TR181_ParamData_t param = {0}; - valueVectors_t values; - capDetails_t inputInfo; + valueVectors_t values; + capDetails_t inputInfo; - // As only source need to validate, so pqmode and formate passing as currrent - if (parsingSetInputArgument(parameters, "PictureMode",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + // As only source need to validate, so pqmode and formate passing as currrent + if (parsingSetInputArgument(parameters, "PictureMode",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "PictureMode",inputInfo )) { - LOGERR("%s: CapablityCheck failed for PictureMode\n", __FUNCTION__); - returnResponse(false); - } - inputInfo.pqmode = "Current"; - getSaveConfig("PictureMode", inputInfo, values); + if( !isCapablityCheckPassed( "PictureMode",inputInfo )) { + LOGERR("%s: CapablityCheck failed for PictureMode\n", __FUNCTION__); + returnResponse(false); + } + inputInfo.pqmode = "Current"; + getSaveConfig("PictureMode", inputInfo, values); - for (int source : values.sourceValues) { - tvVideoSrcType_t sourceType = (tvVideoSrcType_t)source; - for (int format : values.formatValues) { - tvVideoFormatType_t formatType = (tvVideoFormatType_t)format; - std::string tr181_param_name = ""; - tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); - tr181_param_name += "."+convertSourceIndexToString(sourceType)+"."+"Format."+ - convertVideoFormatToString(formatType)+"."+"PictureModeString"; + for (int source : values.sourceValues) { + tvVideoSrcType_t sourceType = (tvVideoSrcType_t)source; + for (int format : values.formatValues) { + tvVideoFormatType_t formatType = (tvVideoFormatType_t)format; + std::string tr181_param_name = ""; + tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); + tr181_param_name += "."+convertSourceIndexToString(sourceType)+"."+"Format."+ + convertVideoFormatToString(formatType)+"."+"PictureModeString"; - err = clearLocalParam(rfc_caller_id, tr181_param_name.c_str()); - if ( err != tr181Success ) { - LOGWARN("clearLocalParam for %s Failed : %s\n", tr181_param_name.c_str(), getTR181ErrorString(err)); - returnResponse(false); - } - else { - err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); - if ( tr181Success == err ) { - //get curren source and if matches save for that alone - tvVideoSrcType_t current_source = VIDEO_SOURCE_IP; - GetCurrentVideoSource(¤t_source); - - tvVideoFormatType_t current_format = VIDEO_FORMAT_NONE; - GetCurrentVideoFormat(¤t_format); - if( current_format == VIDEO_FORMAT_NONE) { - current_format = VIDEO_FORMAT_SDR; - } + err = clearLocalParam(rfc_caller_id, tr181_param_name.c_str()); + if ( err != tr181Success ) { + LOGWARN("clearLocalParam for %s Failed : %s\n", tr181_param_name.c_str(), getTR181ErrorString(err)); + returnResponse(false); + } + else { + err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); + if ( tr181Success == err ) { + //get curren source and if matches save for that alone + tvVideoSrcType_t current_source = VIDEO_SOURCE_IP; + GetCurrentVideoSource(¤t_source); + + tvVideoFormatType_t current_format = VIDEO_FORMAT_NONE; + GetCurrentVideoFormat(¤t_format); + if( current_format == VIDEO_FORMAT_NONE) { + current_format = VIDEO_FORMAT_SDR; + } - if (current_source == sourceType && current_format == formatType) { + if (current_source == sourceType && current_format == formatType) { - tvError_t ret = SetTVPictureMode(param.value); - if(ret != tvERROR_NONE) { - LOGWARN("Picture Mode set failed: %s\n",getErrorString(ret).c_str()); - returnResponse(false); - } - else { - LOGINFO("Exit : Picture Mode reset successfully, value: %s\n", param.value); + tvError_t ret = SetTVPictureMode(param.value); + if(ret != tvERROR_NONE) { + LOGWARN("Picture Mode set failed: %s\n",getErrorString(ret).c_str()); + returnResponse(false); + } + else { + LOGINFO("Exit : Picture Mode reset successfully, value: %s\n", param.value); + } } + int pqmodeindex = (int)getPictureModeIndex(param.value); + SaveSourcePictureMode(sourceType, formatType, pqmodeindex); + } + else { + LOGWARN("getLocalParam for %s failed\n", AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); + returnResponse(false); } - int pqmodeindex = (int)getPictureModeIndex(param.value); - SaveSourcePictureMode(sourceType, formatType, pqmodeindex); - } - else { - LOGWARN("getLocalParam for %s failed\n", AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); - returnResponse(false); } } } + returnResponse(true); + } + else + { + bool success = resetPictureModeV2(parameters); + returnResponse(success); } - returnResponse(true) } uint32_t AVOutputTV::signalFilmMakerMode(const JsonObject& parameters, JsonObject& response) @@ -2779,87 +4764,148 @@ namespace Plugin { uint32_t AVOutputTV::setLowLatencyState(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_lowLatencyStateStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + std::string value; + capDetails_t inputInfo; + int lowLatencyIndex = 0,prevLowLatencyIndex = 0; + tvError_t ret = tvERROR_NONE; - std::string value; - capDetails_t inputInfo; - int lowLatencyIndex = 0,prevLowLatencyIndex = 0; - tvError_t ret = tvERROR_NONE; + ret = GetLowLatencyState(&prevLowLatencyIndex); + if(ret != tvERROR_NONE) { + LOGERR("Get previous low latency state failed\n"); + returnResponse(false); + } - ret = GetLowLatencyState(&prevLowLatencyIndex); - if(ret != tvERROR_NONE) { - LOGERR("Get previous low latency state failed\n"); - returnResponse(false); - } + value = parameters.HasLabel("LowLatencyState") ? parameters["LowLatencyState"].String() : ""; + returnIfParamNotFound(parameters,"LowLatencyState"); + lowLatencyIndex = std::stoi(value); - value = parameters.HasLabel("LowLatencyState") ? parameters["LowLatencyState"].String() : ""; - returnIfParamNotFound(parameters,"LowLatencyState"); - lowLatencyIndex = std::stoi(value); + if (validateIntegerInputParameter("LowLatencyState",lowLatencyIndex) != 0) { + LOGERR("Failed in Brightness range validation:%s", __FUNCTION__); + returnResponse(false); + } - if (validateIntegerInputParameter("LowLatencyState",lowLatencyIndex) != 0) { - LOGERR("Failed in Brightness range validation:%s", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters, "LowLatencyState",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if (parsingSetInputArgument(parameters, "LowLatencyState",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "LowLatencyState" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for LowLatencyState\n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "LowLatencyState" , inputInfo )) { - LOGERR("%s: CapablityCheck failed for LowLatencyState\n", __FUNCTION__); - returnResponse(false); - } + int retval= updateAVoutputTVParam("set","LowLatencyState",inputInfo,PQ_PARAM_LOWLATENCY_STATE,lowLatencyIndex); + if(retval != 0 ) { + LOGERR("Failed to SaveLowLatency to ssm_data\n"); + returnResponse(false); + } else { - int retval= updateAVoutputTVParam("set","LowLatencyState",inputInfo,PQ_PARAM_LOWLATENCY_STATE,lowLatencyIndex); - if(retval != 0 ) { - LOGERR("Failed to SaveLowLatency to ssm_data\n"); - returnResponse(false); - } else { + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with setLowLatencyState\n"); + ret = SetLowLatencyState( lowLatencyIndex ); + } - if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { - LOGINFO("Proceed with setLowLatencyState\n"); - ret = SetLowLatencyState( lowLatencyIndex ); + if(ret != tvERROR_NONE) { + LOGERR("Failed to set low latency. Fallback to previous state %d\n", prevLowLatencyIndex); + retval=updateAVoutputTVParam("set","LowLatencyState",inputInfo,PQ_PARAM_LOWLATENCY_STATE,prevLowLatencyIndex); + if(retval != 0 ){ + LOGERR("Fallback to previous low latency state %d failed.\n", prevLowLatencyIndex); + } + returnResponse(false); + } + + LOGINFO("Exit : setLowLatency successful to value: %d\n", lowLatencyIndex); + returnResponse(true); } + } + else + { + std::string value; + int lowLatencyIndex = 0,prevLowLatencyIndex = 0; + tvError_t ret = tvERROR_NONE; + ret = GetLowLatencyState(&prevLowLatencyIndex); if(ret != tvERROR_NONE) { - LOGERR("Failed to set low latency. Fallback to previous state %d\n", prevLowLatencyIndex); - retval=updateAVoutputTVParam("set","LowLatencyState",inputInfo,PQ_PARAM_LOWLATENCY_STATE,prevLowLatencyIndex); - if(retval != 0 ){ - LOGERR("Fallback to previous low latency state %d failed.\n", prevLowLatencyIndex); - } + LOGERR("Get previous low latency state failed\n"); returnResponse(false); } - LOGINFO("Exit : setLowLatency successful to value: %d\n", lowLatencyIndex); - returnResponse(true); + value = parameters.HasLabel("LowLatencyState") ? parameters["LowLatencyState"].String() : ""; + returnIfParamNotFound(parameters,"LowLatencyState"); + lowLatencyIndex = std::stoi(value); + if (lowLatencyIndex < 0 || lowLatencyIndex > m_maxlowLatencyState) { + LOGERR("Input value %d is out of range (0 - %d) for LowLatencyState", lowLatencyIndex, m_maxlowLatencyState); + returnResponse(false); + } + + int retval= updateAVoutputTVParamV2("set","LowLatencyState",parameters,PQ_PARAM_LOWLATENCY_STATE,lowLatencyIndex); + if(retval != 0 ) { + LOGERR("Failed to SaveLowLatency to ssm_data\n"); + returnResponse(false); + } + else + { + if(isSetRequiredForParam(parameters, "LowLatencyState")) + { + LOGINFO("Proceed with setLowLatencyState\n"); + ret = SetLowLatencyState( lowLatencyIndex ); + } + if(ret != tvERROR_NONE) { + LOGERR("Failed to set low latency. Fallback to previous state %d\n", prevLowLatencyIndex); + retval=updateAVoutputTVParamV2("set","LowLatencyState",parameters,PQ_PARAM_LOWLATENCY_STATE, prevLowLatencyIndex); + if(retval != 0 ){ + LOGERR("Fallback to previous low latency state %d failed.\n", prevLowLatencyIndex); + } + returnResponse(false); + } + + LOGINFO("Exit : setLowLatency successful to value: %d\n", lowLatencyIndex); + returnResponse(true); + } } } uint32_t AVOutputTV::getLowLatencyState(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); + if(m_lowLatencyStateStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int lowlatencystate = 0; - capDetails_t inputInfo; - paramIndex_t indexInfo; - int lowlatencystate = 0; - - if (parsingGetInputArgument(parameters, "LowLatencyState",inputInfo) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } - if (getParamIndex("LowLatencyState",inputInfo,indexInfo) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); - } + if (parsingGetInputArgument(parameters, "LowLatencyState",inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } + if (getParamIndex("LowLatencyState",inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - int err = getLocalparam("LowLatencyState", indexInfo ,lowlatencystate, PQ_PARAM_LOWLATENCY_STATE); - if( err == 0 ) { - response["lowLatencyState"] = std::to_string(lowlatencystate); - LOGINFO("Exit : LowLatencyState Value: %d \n", lowlatencystate); - returnResponse(true); + int err = getLocalparam("LowLatencyState", indexInfo ,lowlatencystate, PQ_PARAM_LOWLATENCY_STATE); + if( err == 0 ) { + response["lowLatencyState"] = std::to_string(lowlatencystate); + LOGINFO("Exit : LowLatencyState Value: %d \n", lowlatencystate); + returnResponse(true); + } + else { + returnResponse(false); + } } - else { - returnResponse(false); + else + { + int lowlatencystate = 0; + if (getPQParamFromContext(parameters, "LowLatencyState", PQ_PARAM_LOWLATENCY_STATE, lowlatencystate)) { + response["lowLatencyState"] = std::to_string(lowlatencystate); + LOGINFO("Exit : LowLatencyState Value: %d", lowlatencystate); + returnResponse(true); + } else { + LOGERR("Failed to get LowLatencyState"); + returnResponse(false); + } } } @@ -2963,245 +5009,380 @@ namespace Plugin { uint32_t AVOutputTV::getCMS(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); + if(m_cmsStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + paramIndex_t indexInfo; + int level = 0; + tvPQParameterIndex_t tvPQEnum; - capDetails_t inputInfo; - paramIndex_t indexInfo; - int level = 0; - tvPQParameterIndex_t tvPQEnum; + inputInfo.color = parameters.HasLabel("color") ? parameters["color"].String() : ""; + inputInfo.component = parameters.HasLabel("component") ? parameters["component"].String() : ""; - inputInfo.color = parameters.HasLabel("color") ? parameters["color"].String() : ""; - inputInfo.component = parameters.HasLabel("component") ? parameters["component"].String() : ""; - - if( inputInfo.color.empty() || inputInfo.component.empty() ) { - LOGERR("%s : Color/Component param not found!!!\n",__FUNCTION__); - returnResponse(false); - } + if( inputInfo.color.empty() || inputInfo.component.empty() ) { + LOGERR("%s : Color/Component param not found!!!\n",__FUNCTION__); + returnResponse(false); + } - if (isPlatformSupport("CMS") != 0) { - returnResponse(false); - } + if (isPlatformSupport("CMS") != 0) { + returnResponse(false); + } - if (parsingGetInputArgument(parameters, "CMS", inputInfo) != 0) { - LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); - returnResponse(false); - } + if (parsingGetInputArgument(parameters, "CMS", inputInfo) != 0) { + LOGINFO("%s: Failed to parse argument\n", __FUNCTION__); + returnResponse(false); + } - if (getParamIndex("CMS",inputInfo,indexInfo) == -1) { - LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); - returnResponse(false); - } + if (getParamIndex("CMS",inputInfo,indexInfo) == -1) { + LOGERR("%s: getParamIndex failed to get \n", __FUNCTION__); + returnResponse(false); + } - if ( convertCMSParamToPQEnum(inputInfo.component,inputInfo.color,tvPQEnum) != 0 ) { - LOGINFO("%s: Component/Color Param Not Found \n",__FUNCTION__); - returnResponse(false); - } + if ( convertCMSParamToPQEnum(inputInfo.component,inputInfo.color,tvPQEnum) != 0 ) { + LOGINFO("%s: Component/Color Param Not Found \n",__FUNCTION__); + returnResponse(false); + } - int err = getLocalparam("CMS",indexInfo,level,tvPQEnum); - if( err == 0 ) { - response["level"] = level; - LOGINFO("Exit : params Value: %d \n", level); - returnResponse(true); + int err = getLocalparam("CMS",indexInfo,level,tvPQEnum); + if( err == 0 ) { + response["level"] = level; + LOGINFO("Exit : params Value: %d \n", level); + returnResponse(true); + } + else { + returnResponse(false); + } } - else { - returnResponse(false); + else + { + // Extract color and component from input parameters + std::string color = parameters.HasLabel("color") ? parameters["color"].String() : ""; + std::string component = parameters.HasLabel("component") ? parameters["component"].String() : ""; + + if (color.empty() || component.empty()) { + LOGERR("%s: Missing color/component parameter", __FUNCTION__); + returnResponse(false); + } + + tvPQParameterIndex_t pqEnum; + if (convertCMSParamToPQEnum(component, color, pqEnum) != 0) { + LOGERR("%s: Invalid color/component combination", __FUNCTION__); + returnResponse(false); + } + + // Get valid context from parameters using your existing context helper + tvConfigContext_t validContext = getValidContextFromGetParameters(parameters, "CMS"); + + if ((validContext.videoSrcType == VIDEO_SOURCE_ALL && + validContext.videoFormatType == VIDEO_FORMAT_NONE && + validContext.pq_mode == PQ_MODE_INVALID)) + { + LOGERR("No valid context found for CMS get"); + returnResponse(false); + } + + // Prepare paramIndex from context + paramIndex_t indexInfo = { + .sourceIndex = static_cast(validContext.videoSrcType), + .pqmodeIndex = static_cast(validContext.pq_mode), + .formatIndex = static_cast(validContext.videoFormatType) + }; + + int level = 0; + int err = getLocalparam("CMS", indexInfo, level, pqEnum); + if (err == 0) { + response["level"] = level; + LOGINFO("Exit: getCMS success, value: %d", level); + returnResponse(true); + } else { + LOGERR("Failed to get CMS param from local storage"); + returnResponse(false); + } } } uint32_t AVOutputTV::setCMS(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_cmsStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + int level = 0,retVal = 0; + tvPQParameterIndex_t tvPQEnum; + tvDataComponentColor_t colorEnum=tvDataColor_NONE; + std::string color,component; + tvError_t ret = tvERROR_NONE; + std::string value; + + inputInfo.color = parameters.HasLabel("color") ? parameters["color"].String() : ""; + inputInfo.component = parameters.HasLabel("component") ? parameters["component"].String() : ""; + if( inputInfo.color.empty() || inputInfo.component.empty() ) { + LOGERR("%s : Color/Component param not found!!!\n",__FUNCTION__); + returnResponse(false); + } - capDetails_t inputInfo; - int level = 0,retVal = 0; - tvPQParameterIndex_t tvPQEnum; - tvDataComponentColor_t colorEnum=tvDataColor_NONE; - std::string color,component; - tvError_t ret = tvERROR_NONE; - std::string value; - - inputInfo.color = parameters.HasLabel("color") ? parameters["color"].String() : ""; - inputInfo.component = parameters.HasLabel("component") ? parameters["component"].String() : ""; + if (isPlatformSupport("CMS") != 0) { + returnResponse(false); + } - if( inputInfo.color.empty() || inputInfo.component.empty() ) { - LOGERR("%s : Color/Component param not found!!!\n",__FUNCTION__); - returnResponse(false); - } + value = parameters.HasLabel("level") ? parameters["level"].String() : ""; + returnIfParamNotFound(parameters,"level"); + level = std::stoi(value); - if (isPlatformSupport("CMS") != 0) { - returnResponse(false); - } + if (validateCMSParameter(inputInfo.component,level) != 0) { + LOGERR("%s: CMS Failed in range validation", __FUNCTION__); + returnResponse(false); + } - value = parameters.HasLabel("level") ? parameters["level"].String() : ""; - returnIfParamNotFound(parameters,"level"); - level = std::stoi(value); + if (parsingSetInputArgument(parameters,"CMS",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if (validateCMSParameter(inputInfo.component,level) != 0) { - LOGERR("%s: CMS Failed in range validation", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "CMS",inputInfo )) { + LOGERR("%s: CapablityCheck failed for CMS\n", __FUNCTION__); + returnResponse(false); + } - if (parsingSetInputArgument(parameters,"CMS",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if ( convertCMSParamToPQEnum(inputInfo.component,inputInfo.color,tvPQEnum) != 0 ) { + LOGERR("%s: %s/%s Param Not Found \n",__FUNCTION__,inputInfo.component.c_str(),inputInfo.color.c_str()); + returnResponse(false); + } - if( !isCapablityCheckPassed( "CMS",inputInfo )) { - LOGERR("%s: CapablityCheck failed for CMS\n", __FUNCTION__); - returnResponse(false); - } + retVal = getCMSColorEnumFromString(inputInfo.color,colorEnum); + if( retVal == -1) { + LOGERR("%s: Invalid Color : %s\n",__FUNCTION__,inputInfo.color.c_str()); + returnResponse(false); + } - if ( convertCMSParamToPQEnum(inputInfo.component,inputInfo.color,tvPQEnum) != 0 ) { - LOGERR("%s: %s/%s Param Not Found \n",__FUNCTION__,inputInfo.component.c_str(),inputInfo.color.c_str()); - returnResponse(false); - } + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + tvError_t ret = SetCMSState(true); + if(ret != tvERROR_NONE) { + LOGWARN("CMS enable failed\n"); + returnResponse(false); + } - retVal = getCMSColorEnumFromString(inputInfo.color,colorEnum); - if( retVal == -1) { - LOGERR("%s: Invalid Color : %s\n",__FUNCTION__,inputInfo.color.c_str()); - returnResponse(false); - } + if(inputInfo.component.compare("Saturation") == 0) + ret = SetCurrentComponentSaturation(colorEnum, level); + else if(inputInfo.component.compare("Hue") == 0 ) + ret = SetCurrentComponentHue(colorEnum,level); + else if( inputInfo.component.compare("Luma") == 0 ) + ret = SetCurrentComponentLuma(colorEnum,level); + } - if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { - LOGINFO("Proceed with %s\n",__FUNCTION__); - tvError_t ret = SetCMSState(true); if(ret != tvERROR_NONE) { - LOGWARN("CMS enable failed\n"); + LOGERR("Failed to set CMS\n"); returnResponse(false); } - - if(inputInfo.component.compare("Saturation") == 0) - ret = SetCurrentComponentSaturation(colorEnum, level); - else if(inputInfo.component.compare("Hue") == 0 ) - ret = SetCurrentComponentHue(colorEnum,level); - else if( inputInfo.component.compare("Luma") == 0 ) - ret = SetCurrentComponentLuma(colorEnum,level); - - } + else { + std::string cmsParam; + cmsParam = inputInfo.color+"."+inputInfo.component; - if(ret != tvERROR_NONE) { - LOGERR("Failed to set CMS\n"); - returnResponse(false); + retVal= updateAVoutputTVParam("set","CMS",inputInfo,tvPQEnum,level); + if(retVal != 0 ) { + LOGERR("%s : Failed to Save CMS %s/%s(%s) to ssm_data\n",__FUNCTION__,inputInfo.component.c_str(),inputInfo.color.c_str(),cmsParam.c_str()); + returnResponse(false); + } + LOGINFO("Exit : setCMS %s/%s successful to value: %d\n", inputInfo.component.c_str(),inputInfo.color.c_str(),level); + returnResponse(true); + } } - else { - std::string cmsParam; - cmsParam = inputInfo.color+"."+inputInfo.component; - - retVal= updateAVoutputTVParam("set","CMS",inputInfo,tvPQEnum,level); - if(retVal != 0 ) { - LOGERR("%s : Failed to Save CMS %s/%s(%s) to ssm_data\n",__FUNCTION__,inputInfo.component.c_str(),inputInfo.color.c_str(),cmsParam.c_str()); + else + { + bool status = setCMSParam(parameters); + if (status) { + LOGINFO("setCMS success"); + returnResponse(true); + } else { + LOGERR("setCMS failed"); returnResponse(false); } - LOGINFO("Exit : setCMS %s/%s successful to value: %d\n", inputInfo.component.c_str(),inputInfo.color.c_str(),level); - returnResponse(true); } } uint32_t AVOutputTV::resetCMS(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_cmsStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + int retVal = 0; + std::string color,component; + tvError_t ret = tvERROR_NONE; + JsonArray sourceArray; + JsonArray pqmodeArray; + JsonArray formatArray; + JsonArray colorArray; + JsonArray componentArray; + + if (isPlatformSupport("CMS") != 0) { + returnResponse(false); + } - capDetails_t inputInfo; - int retVal = 0; - std::string color,component; - tvError_t ret = tvERROR_NONE; - JsonArray sourceArray; - JsonArray pqmodeArray; - JsonArray formatArray; - JsonArray colorArray; - JsonArray componentArray; - - if (isPlatformSupport("CMS") != 0) { - returnResponse(false); - } - - pqmodeArray = parameters.HasLabel("pictureMode") ? parameters["pictureMode"].Array() : JsonArray(); - for (int i = 0; i < pqmodeArray.Length(); ++i) { - inputInfo.pqmode += pqmodeArray[i].String(); - if (i != (pqmodeArray.Length() - 1) ) { - inputInfo.pqmode += ","; + pqmodeArray = parameters.HasLabel("pictureMode") ? parameters["pictureMode"].Array() : JsonArray(); + for (int i = 0; i < pqmodeArray.Length(); ++i) { + inputInfo.pqmode += pqmodeArray[i].String(); + if (i != (pqmodeArray.Length() - 1) ) { + inputInfo.pqmode += ","; + } } - } - sourceArray = parameters.HasLabel("videoSource") ? parameters["videoSource"].Array() : JsonArray(); - for (int i = 0; i < sourceArray.Length(); ++i) { - inputInfo.source += sourceArray[i].String(); - if (i != (sourceArray.Length() - 1) ) { - inputInfo.source += ","; - } - } + sourceArray = parameters.HasLabel("videoSource") ? parameters["videoSource"].Array() : JsonArray(); + for (int i = 0; i < sourceArray.Length(); ++i) { + inputInfo.source += sourceArray[i].String(); + if (i != (sourceArray.Length() - 1) ) { + inputInfo.source += ","; + } + } - formatArray = parameters.HasLabel("videoFormat") ? parameters["videoFormat"].Array() : JsonArray(); - for (int i = 0; i < formatArray.Length(); ++i) { - inputInfo.format += formatArray[i].String(); - if (i != (formatArray.Length() - 1) ) { - inputInfo.format += ","; + formatArray = parameters.HasLabel("videoFormat") ? parameters["videoFormat"].Array() : JsonArray(); + for (int i = 0; i < formatArray.Length(); ++i) { + inputInfo.format += formatArray[i].String(); + if (i != (formatArray.Length() - 1) ) { + inputInfo.format += ","; + } } - } - colorArray = parameters.HasLabel("color") ? parameters["color"].Array() : JsonArray(); - for (int i = 0; i < colorArray.Length(); ++i) { - inputInfo.color += colorArray[i].String(); - if (i != (colorArray.Length() - 1) ) { - inputInfo.color += ","; + colorArray = parameters.HasLabel("color") ? parameters["color"].Array() : JsonArray(); + for (int i = 0; i < colorArray.Length(); ++i) { + inputInfo.color += colorArray[i].String(); + if (i != (colorArray.Length() - 1) ) { + inputInfo.color += ","; + } } - } - componentArray = parameters.HasLabel("component") ? parameters["component"].Array() : JsonArray(); - for (int i = 0; i < componentArray.Length(); ++i) { - inputInfo.component += componentArray[i].String(); - if (i != (componentArray.Length() - 1) ) { - inputInfo.component += ","; + componentArray = parameters.HasLabel("component") ? parameters["component"].Array() : JsonArray(); + for (int i = 0; i < componentArray.Length(); ++i) { + inputInfo.component += componentArray[i].String(); + if (i != (componentArray.Length() - 1) ) { + inputInfo.component += ","; + } + } + if (inputInfo.source.empty()) { + inputInfo.source = "Global"; + } + if (inputInfo.pqmode.empty()) { + inputInfo.pqmode = "Global"; + } + if (inputInfo.format.empty()) { + inputInfo.format = "Global"; + } + if (inputInfo.color.empty()) { + inputInfo.color = "Global"; + } + if (inputInfo.component.empty()) { + inputInfo.component = "Global"; } - } - if (inputInfo.source.empty()) { - inputInfo.source = "Global"; - } - if (inputInfo.pqmode.empty()) { - inputInfo.pqmode = "Global"; - } - if (inputInfo.format.empty()) { - inputInfo.format = "Global"; - } - if (inputInfo.color.empty()) { - inputInfo.color = "Global"; - } - if (inputInfo.component.empty()) { - inputInfo.component = "Global"; - } - if (convertToValidInputParameter("CMS", inputInfo) != 0) { - LOGERR("%s: Failed to convert the input paramters. \n", __FUNCTION__); - returnResponse(false); - } + if (convertToValidInputParameter("CMS", inputInfo) != 0) { + LOGERR("%s: Failed to convert the input paramters. \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "CMS" , inputInfo )) { - LOGERR("%s: CapablityCheck failed for CMS\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "CMS" , inputInfo )) { + LOGERR("%s: CapablityCheck failed for CMS\n", __FUNCTION__); + returnResponse(false); + } + + if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { + LOGINFO("Proceed with %s\n",__FUNCTION__); + tvError_t ret = SetCMSState(false); + if(ret != tvERROR_NONE) { + LOGWARN("CMS disable failed\n"); + returnResponse(false); + } + } - if( isSetRequired(inputInfo.pqmode,inputInfo.source,inputInfo.format) ) { - LOGINFO("Proceed with %s\n",__FUNCTION__); - tvError_t ret = SetCMSState(false); if(ret != tvERROR_NONE) { - LOGWARN("CMS disable failed\n"); + LOGERR("%s : Failed to setCMSState\n",__FUNCTION__); returnResponse(false); - } - } - - if(ret != tvERROR_NONE) { - LOGERR("%s : Failed to setCMSState\n",__FUNCTION__); - returnResponse(false); + } + else { + int cms = 0; + retVal= updateAVoutputTVParam("reset","CMS",inputInfo,PQ_PARAM_CMS_SATURATION_RED,cms); + if(retVal != 0 ) { + LOGERR("%s : Failed to Save CMS %s/%s to ssm_data\n",__FUNCTION__,inputInfo.component.c_str(),inputInfo.color.c_str() ); + returnResponse(false); + } + returnResponse(true); + } } - else { + else + { + if (isSetRequiredForParam(parameters, "CMS")) { + LOGINFO("Proceed with SetCMSState \n"); + tvError_t ret = SetCMSState(false); + if(ret != tvERROR_NONE) { + LOGWARN("CMS disable failed\n"); + returnResponse(false); + } + } int cms = 0; - retVal= updateAVoutputTVParam("reset","CMS",inputInfo,PQ_PARAM_CMS_SATURATION_RED,cms); + int retVal= updateAVoutputTVParamV2("reset","CMS",parameters,PQ_PARAM_CMS,cms); if(retVal != 0 ) { - LOGERR("%s : Failed to Save CMS %s/%s to ssm_data\n",__FUNCTION__,inputInfo.component.c_str(),inputInfo.color.c_str() ); + LOGERR("%s : Failed to Save CMS to ssm_data\n",__FUNCTION__); returnResponse(false); } returnResponse(true); } } + uint32_t AVOutputTV::getCMSCapsV2(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry: getCMSCapsV2"); + + int max_hue = 0, max_saturation = 0, max_luma = 0; + tvDataComponentColor_t* colorArray = nullptr; + tvComponentType_t* componentArray = nullptr; + size_t num_color = 0, num_component = 0; + tvContextCaps_t* context_caps = nullptr; + + tvError_t ret = GetCMSCaps(&max_hue, &max_saturation, &max_luma, + &colorArray, &componentArray, + &num_color, &num_component, &context_caps); + + if (ret != tvERROR_NONE) { + LOGERR("GetCMSCaps failed with error: %d", ret); + returnResponse(false); + } + response["platformSupport"] = true; + + // Range Info + JsonObject rangeHue, rangeSaturation, rangeLuma; + rangeHue["from"] = 0; + rangeHue["to"] = max_hue; + rangeSaturation["from"] = 0; + rangeSaturation["to"] = max_saturation; + rangeLuma["from"] = 0; + rangeLuma["to"] = max_luma; + + response["rangeHue"] = rangeHue; + response["rangeSaturation"] = rangeSaturation; + response["rangeLuma"] = rangeLuma; + + // Color Info + JsonArray colorJson; + for (size_t i = 0; i < num_color; ++i) { + colorJson.Add(getCMSColorStringFromEnum(colorArray[i])); + } + response["color"] = colorJson; + + // Component Info + JsonArray componentJson; + for (size_t i = 0; i < num_component; ++i) { + componentJson.Add(getCMSComponentStringFromEnum(componentArray[i])); + } + response["component"] = componentJson; + response["context"] = parseContextCaps(context_caps); +#if HAL_NOT_READY + // Clean up dynamic memory + delete[] colorArray; + delete[] componentArray; +#endif + LOGINFO("Exit: getCMSCapsV2"); + returnResponse(true); + } + uint32_t AVOutputTV::getCMSCaps(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry"); @@ -3320,7 +5501,7 @@ namespace Plugin { capDetails_t inputInfo; tvError_t ret = tvERROR_NONE; std::string value; - int retval = 0; + int retval = 0; value = parameters.HasLabel("HDRMode") ? parameters["HDRMode"].String() : ""; returnIfParamNotFound(parameters,"HDRMode"); @@ -3656,6 +5837,62 @@ namespace Plugin { } } + uint32_t AVOutputTV::get2PointWBCapsV2(const JsonObject& parameters, JsonObject& response) + { + LOGINFO("Entry: get2PointWBCapsV2"); + + int min_gain = 0, min_offset = 0, max_gain = 0, max_offset = 0; + tvWBColor_t* colorArray = nullptr; + tvWBControl_t* controlArray = nullptr; + size_t num_color = 0, num_control = 0; + tvContextCaps_t* context_caps = nullptr; + + tvError_t ret = GetCustom2PointWhiteBalanceCaps(&min_gain, &min_offset, &max_gain, &max_offset, + &colorArray, &controlArray, + &num_color, &num_control, &context_caps); + + if (ret != tvERROR_NONE) { + LOGERR("GetCustom2PointWhiteBalanceCaps failed with error: %d", ret); + returnResponse(false); + } + + response["platformSupport"] = true; + + // Range Info + JsonObject rangeGain, rangeOffset; + rangeGain["from"] = min_gain; + rangeGain["to"] = max_gain; + rangeOffset["from"] = min_offset; + rangeOffset["to"] = max_offset; + + response["rangeGain"] = rangeGain; + response["rangeOffset"] = rangeOffset; + + // Control Info + JsonArray controlJson; + for (size_t i = 0; i < num_control; ++i) { + controlJson.Add(getWBControlStringFromEnum(controlArray[i])); + } + response["control"] = controlJson; + + // Color Info + JsonArray colorJson; + for (size_t i = 0; i < num_color; ++i) { + colorJson.Add(getWBColorStringFromEnum(colorArray[i])); + } + response["color"] = colorJson; + response["context"] = parseContextCaps(context_caps); + +#if HAL_NOT_READY + delete[] colorArray; + delete[] controlArray; +#endif + + LOGINFO("Exit: get2PointWBCapsV2"); + returnResponse(true); + } + + uint32_t AVOutputTV::get2PointWBCaps(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); @@ -3782,83 +6019,113 @@ namespace Plugin { uint32_t AVOutputTV::setAutoBacklightMode(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); - std::string value; - tvBacklightMode_t mode = tvBacklightMode_AMBIENT; - capDetails_t inputInfo; - + if(m_backlightModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + std::string value; + tvBacklightMode_t mode = tvBacklightMode_AMBIENT; + capDetails_t inputInfo; - value = parameters.HasLabel("mode") ? parameters["mode"].String() : ""; - returnIfParamNotFound(parameters,"mode"); + value = parameters.HasLabel("mode") ? parameters["mode"].String() : ""; + returnIfParamNotFound(parameters,"mode"); - if (validateInputParameter("AutoBacklightMode",value) != 0) { - LOGERR("%s: Range validation failed for AutoBacklightMode\n", __FUNCTION__); - returnResponse(false); - } + if (validateInputParameter("AutoBacklightMode",value) != 0) { + LOGERR("%s: Range validation failed for AutoBacklightMode\n", __FUNCTION__); + returnResponse(false); + } - if (isPlatformSupport("AutoBacklightMode") != 0) { - returnResponse(false); - } + if (isPlatformSupport("AutoBacklightMode") != 0) { + returnResponse(false); + } - if (parsingSetInputArgument(parameters,"AutoBacklightMode",inputInfo) != 0) { - LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); - returnResponse(false); - } + if (parsingSetInputArgument(parameters,"AutoBacklightMode",inputInfo) != 0) { + LOGERR("%s: Failed to parse the input arguments \n", __FUNCTION__); + returnResponse(false); + } - if( !isCapablityCheckPassed( "AutoBacklightMode",inputInfo )) { - LOGERR("%s: CapablityCheck failed for AutoBacklightMode\n", __FUNCTION__); - returnResponse(false); - } + if( !isCapablityCheckPassed( "AutoBacklightMode",inputInfo )) { + LOGERR("%s: CapablityCheck failed for AutoBacklightMode\n", __FUNCTION__); + returnResponse(false); + } - if(!value.compare("Manual")) { - mode = tvBacklightMode_MANUAL; - } - else if (!value.compare("Ambient")) { - mode = tvBacklightMode_AMBIENT; - } - else { - returnResponse(false); - } - - tvError_t ret = SetCurrentBacklightMode (mode); + if(!value.compare("Manual")) { + mode = tvBacklightMode_MANUAL; + } + else if (!value.compare("Ambient")) { + mode = tvBacklightMode_AMBIENT; + } + else { + returnResponse(false); + } - if(ret != tvERROR_NONE) { - returnResponse(false); - } - else { - //Save AutoBacklightMode to localstore + tvError_t ret = SetCurrentBacklightMode (mode); - tr181ErrorCode_t err = setLocalParam(rfc_caller_id, AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, value.c_str()); - if ( err != tr181Success ) { - LOGERR("setLocalParam for %s Failed : %s\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, getTR181ErrorString(err)); + if(ret != tvERROR_NONE) { returnResponse(false); } else { - LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, value.c_str()); + //Save AutoBacklightMode to localstore + + tr181ErrorCode_t err = setLocalParam(rfc_caller_id, AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, value.c_str()); + if ( err != tr181Success ) { + LOGERR("setLocalParam for %s Failed : %s\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, getTR181ErrorString(err)); + returnResponse(false); + } + else { + LOGINFO("setLocalParam for %s Successful, Value: %s\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, value.c_str()); + } + LOGINFO("Exit : SetAutoBacklightMode() value : %s\n",value.c_str()); + returnResponse(true); } - LOGINFO("Exit : SetAutoBacklightMode() value : %s\n",value.c_str()); - returnResponse(true); + } + else + { + bool success = false; + success = setEnumPQParam( + parameters, + "mode", + "BacklightMode", + backlightModeReverseMap, + PQ_PARAM_BACKLIGHT_MODE, + [](int val) { + return SetCurrentBacklightMode(static_cast(val)); + }); + + returnResponse(success); } } uint32_t AVOutputTV::getAutoBacklightMode(const JsonObject& parameters, JsonObject& response) { + if(m_backlightModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + TR181_ParamData_t param; - TR181_ParamData_t param; - - if (isPlatformSupport("AutoBacklightMode") != 0) { - returnResponse(false); - } + if (isPlatformSupport("AutoBacklightMode") != 0) { + returnResponse(false); + } - tr181ErrorCode_t err = getLocalParam(rfc_caller_id, AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, ¶m); - if (err!= tr181Success) { - returnResponse(false); + tr181ErrorCode_t err = getLocalParam(rfc_caller_id, AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, ¶m); + if (err!= tr181Success) { + returnResponse(false); + } + else { + std::string s; + s+=param.value; + response["mode"] = s; + LOGINFO("Exit getAutoBacklightMode(): %s\n",s.c_str()); + returnResponse(true); + } } - else { - std::string s; - s+=param.value; - response["mode"] = s; - LOGINFO("Exit getAutoBacklightMode(): %s\n",s.c_str()); - returnResponse(true); + else + { + std::string mode; + if (getEnumPQParamString(parameters, "BacklightMode", + PQ_PARAM_BACKLIGHT_MODE, backlightModeMap, mode)) { + response["mode"] = mode; + returnResponse(true); + } else { + returnResponse(false); + } } } @@ -3866,63 +6133,77 @@ namespace Plugin { uint32_t AVOutputTV::resetAutoBacklightMode(const JsonObject& parameters, JsonObject& response) { LOGINFO("Entry\n"); + if(m_backlightModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + tvError_t ret = tvERROR_NONE; - tvError_t ret = tvERROR_NONE; - - if (isPlatformSupport("AutoBacklightMode") != 0) { - returnResponse(false); - } - - tr181ErrorCode_t err = clearLocalParam(rfc_caller_id,AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM); - if ( err != tr181Success ) { - LOGWARN("clearLocalParam for %s Failed : %s\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, getTR181ErrorString(err)); - ret = tvERROR_GENERAL; - } - else { - LOGINFO("clearLocalParam for %s Successful\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM); - - TR181_ParamData_t param; - memset(¶m, 0, sizeof(param)); + if (isPlatformSupport("AutoBacklightMode") != 0) { + returnResponse(false); + } - tr181ErrorCode_t err = getLocalParam(rfc_caller_id, AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM,¶m); + tr181ErrorCode_t err = clearLocalParam(rfc_caller_id,AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM); if ( err != tr181Success ) { - LOGWARN("getLocalParam for %s Failed : %s\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, getTR181ErrorString(err)); + LOGWARN("clearLocalParam for %s Failed : %s\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, getTR181ErrorString(err)); ret = tvERROR_GENERAL; } else { - tvBacklightMode_t blMode = tvBacklightMode_NONE; + LOGINFO("clearLocalParam for %s Successful\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM); - if(!std::string(param.value).compare("none")) { - blMode = tvBacklightMode_NONE; - } - else if (!std::string(param.value).compare("Manual")){ - blMode = tvBacklightMode_MANUAL; - } - else if (!std::string(param.value).compare("Ambient")){ - blMode = tvBacklightMode_AMBIENT; - } - else if (!std::string(param.value).compare("Eco")){ - blMode = tvBacklightMode_ECO; - } - else { - blMode = tvBacklightMode_NONE; - } - ret = SetCurrentBacklightMode(blMode); - if(ret != tvERROR_NONE) { - LOGWARN("Autobacklight Mode set failed: %s\n",getErrorString(ret).c_str()); + TR181_ParamData_t param; + memset(¶m, 0, sizeof(param)); + + tr181ErrorCode_t err = getLocalParam(rfc_caller_id, AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM,¶m); + if ( err != tr181Success ) { + LOGWARN("getLocalParam for %s Failed : %s\n", AVOUTPUT_AUTO_BACKLIGHT_MODE_RFC_PARAM, getTR181ErrorString(err)); + ret = tvERROR_GENERAL; } else { - LOGINFO("Exit : Autobacklight Mode set successfully, value: %s\n", param.value); + tvBacklightMode_t blMode = tvBacklightMode_NONE; + + if(!std::string(param.value).compare("none")) { + blMode = tvBacklightMode_NONE; + } + else if (!std::string(param.value).compare("Manual")){ + blMode = tvBacklightMode_MANUAL; + } + else if (!std::string(param.value).compare("Ambient")){ + blMode = tvBacklightMode_AMBIENT; + } + else if (!std::string(param.value).compare("Eco")){ + blMode = tvBacklightMode_ECO; + } + else { + blMode = tvBacklightMode_NONE; + } + ret = SetCurrentBacklightMode(blMode); + if(ret != tvERROR_NONE) { + LOGWARN("Autobacklight Mode set failed: %s\n",getErrorString(ret).c_str()); + } + else { + LOGINFO("Exit : Autobacklight Mode set successfully, value: %s\n", param.value); + } } - } - } - if(ret != tvERROR_NONE) - { - returnResponse(false); + } + if(ret != tvERROR_NONE) + { + returnResponse(false); + } + else + { + returnResponse(true); + } } else { - returnResponse(true); + bool success = resetEnumPQParamToDefault( + parameters, + "BacklightMode", + PQ_PARAM_BACKLIGHT_MODE, + backlightModeMap, + [](int value, const std::unordered_map&) -> tvError_t { + return SetCurrentBacklightMode(static_cast(value)); + }); + returnResponse(success); } } @@ -3937,7 +6218,7 @@ namespace Plugin { returnResponse(false); } else { - response["currentVideoSource"] = convertSourceIndexToString(currentSource); + response["currentVideoSource"] = convertSourceIndexToStringV2(currentSource); LOGINFO("Exit: getVideoSource :%d success \n", currentSource); returnResponse(true); } diff --git a/AVOutput/AVOutputTV.h b/AVOutput/AVOutputTV.h index 612aebf8..84c89526 100644 --- a/AVOutput/AVOutputTV.h +++ b/AVOutput/AVOutputTV.h @@ -204,6 +204,13 @@ class AVOutputTV : public AVOutputBase { DECLARE_JSON_RPC_METHOD(getHDRMode) DECLARE_JSON_RPC_METHOD(get2PointWB) DECLARE_JSON_RPC_METHOD(getAutoBacklightMode) + DECLARE_JSON_RPC_METHOD(getAISuperResolution) + DECLARE_JSON_RPC_METHOD(getPrecisionDetail) + DECLARE_JSON_RPC_METHOD(getLocalContrastEnhancement) + DECLARE_JSON_RPC_METHOD(getMPEGNoiseReduction) + DECLARE_JSON_RPC_METHOD(getDigitalNoiseReduction) + DECLARE_JSON_RPC_METHOD(getMEMC) + /*Get Capability API's*/ @@ -227,6 +234,29 @@ class AVOutputTV : public AVOutputBase { DECLARE_JSON_RPC_METHOD(get2PointWBCaps) DECLARE_JSON_RPC_METHOD(getHDRModeCaps) DECLARE_JSON_RPC_METHOD(getAutoBacklightModeCaps) + DECLARE_JSON_RPC_METHOD(getBacklightCapsV2) + DECLARE_JSON_RPC_METHOD(getBrightnessCapsV2) + DECLARE_JSON_RPC_METHOD(getContrastCapsV2) + DECLARE_JSON_RPC_METHOD(getSharpnessCapsV2) + DECLARE_JSON_RPC_METHOD(getSaturationCapsV2) + DECLARE_JSON_RPC_METHOD(getHueCapsV2) + DECLARE_JSON_RPC_METHOD(getPrecisionDetailCaps) + DECLARE_JSON_RPC_METHOD(getLowLatencyStateCapsV2) + DECLARE_JSON_RPC_METHOD(getColorTemperatureCapsV2) + DECLARE_JSON_RPC_METHOD(getSDRGammaCaps) + DECLARE_JSON_RPC_METHOD(getBacklightDimmingModeCapsV2) + DECLARE_JSON_RPC_METHOD(getZoomModeCapsV2) + DECLARE_JSON_RPC_METHOD(getCMSCapsV2) + DECLARE_JSON_RPC_METHOD(get2PointWBCapsV2) + DECLARE_JSON_RPC_METHOD(getDolbyVisionCalibrationCaps) + DECLARE_JSON_RPC_METHOD(getPictureModeCapsV2) + DECLARE_JSON_RPC_METHOD(getAutoBacklightModeCapsV2) + DECLARE_JSON_RPC_METHOD(getLocalContrastEnhancementCaps) + DECLARE_JSON_RPC_METHOD(getMPEGNoiseReductionCaps) + DECLARE_JSON_RPC_METHOD(getDigitalNoiseReductionCaps) + DECLARE_JSON_RPC_METHOD(getAISuperResolutionCaps) + DECLARE_JSON_RPC_METHOD(getMEMCCaps) + DECLARE_JSON_RPC_METHOD(getMultiPointWBCaps) /*Set API's*/ DECLARE_JSON_RPC_METHOD(setBacklight) @@ -247,6 +277,12 @@ class AVOutputTV : public AVOutputBase { DECLARE_JSON_RPC_METHOD(set2PointWB ) DECLARE_JSON_RPC_METHOD(signalFilmMakerMode) DECLARE_JSON_RPC_METHOD(setAutoBacklightMode) + DECLARE_JSON_RPC_METHOD(setAISuperResolution) + DECLARE_JSON_RPC_METHOD(setPrecisionDetail) + DECLARE_JSON_RPC_METHOD(setLocalContrastEnhancement) + DECLARE_JSON_RPC_METHOD(setMPEGNoiseReduction) + DECLARE_JSON_RPC_METHOD(setDigitalNoiseReduction) + DECLARE_JSON_RPC_METHOD(setMEMC) /*Reset API's*/ DECLARE_JSON_RPC_METHOD(resetBacklight) @@ -265,6 +301,14 @@ class AVOutputTV : public AVOutputBase { DECLARE_JSON_RPC_METHOD(resetCMS) DECLARE_JSON_RPC_METHOD(reset2PointWB) DECLARE_JSON_RPC_METHOD(resetAutoBacklightMode) + DECLARE_JSON_RPC_METHOD(resetAISuperResolution) + DECLARE_JSON_RPC_METHOD(resetPrecisionDetail) + DECLARE_JSON_RPC_METHOD(resetLocalContrastEnhancement) + DECLARE_JSON_RPC_METHOD(resetMPEGNoiseReduction) + DECLARE_JSON_RPC_METHOD(resetDigitalNoiseReduction) + DECLARE_JSON_RPC_METHOD(resetMEMC) + + private: @@ -340,8 +384,12 @@ class AVOutputTV : public AVOutputBase { tvError_t getParamsCaps(std::string param, capVectors_t &vecInfo); int GetPanelID(char *panelid); int ReadCapablitiesFromConf(std::string param, capDetails_t& info); + void getDimmingModeStringFromEnum(int value, std::string &toStore); void getColorTempStringFromEnum(int value, std::string &toStore); + void getDisplayModeStringFromEnum(int value, std::string &toStore); + void getBacklightModeStringFromEnum(int value, std::string &toStore); + int getCurrentPictureMode(char *picMode); int getDolbyParamToSync(int sourceIndex, int formatIndex, int& value); tvDolbyMode_t GetDolbyVisionEnumFromModeString(const char* modeString); @@ -372,6 +420,145 @@ class AVOutputTV : public AVOutputBase { void broadcastLowLatencyModeChangeEvent(bool lowLatencyMode); tvError_t setAspectRatioZoomSettings(tvDisplayMode_t mode); tvError_t setDefaultAspectRatio(std::string pqmode="none",std::string format="none",std::string source="none"); + template + static int getEnumFromString(const std::map& reverseMap, const std::string& key, T defaultVal) { + auto it = reverseMap.find(key); + return (it != reverseMap.end()) ? it->second : defaultVal; + } + + static const std::map pqModeMap; + static const std::map videoFormatMap; + static const std::map videoSrcMap; + static const std::unordered_map backlightModeMap; + + static std::unordered_map pqModeReverseMap; + static std::unordered_map videoFormatReverseMap; + static std::unordered_map videoSrcReverseMap; + static bool reverseMapsInitialized; + static void initializeReverseMaps(); + static const std::unordered_map backlightModeReverseMap; + + tvError_t ReadJsonFile(JsonObject& root); + tvError_t ExtractContextCaps(const JsonObject& data, tvContextCaps_t** context_caps); + tvError_t ExtractRangeInfo(const JsonObject& data, int* max_value); + std::vector ParseContextCaps(const JsonObject& context); + tvContextCaps_t* AllocateContextCaps(const std::vector& contexts); + tvError_t GetCaps(const std::string& key, int* max_value, tvContextCaps_t** context_caps); + + tvError_t GetDVCalibrationCaps(tvDVCalibrationSettings_t **min_values, tvDVCalibrationSettings_t **max_values, tvContextCaps_t **context_caps); + tvError_t GetBacklightModeCaps(tvBacklightMode_t** backlight_mode, size_t* num_backlight_mode, tvContextCaps_t** context_caps); + tvError_t GetLocalContrastEnhancementCaps(int* maxLocalContrastEnhancement, tvContextCaps_t** context_caps); + tvError_t GetMPEGNoiseReductionCaps(int* maxMPEGNoiseReduction, tvContextCaps_t** context_caps); + tvError_t GetDigitalNoiseReductionCaps(int* maxDigitalNoiseReduction, tvContextCaps_t** context_caps); + tvError_t GetMultiPointWBCaps(int* num_hal_matrix_points, + int* rgb_min, + int* rgb_max, + int* num_ui_matrix_points, + double** ui_matrix_positions, + tvContextCaps_t** context_caps); + tvError_t GetCMSCaps(int* max_hue, + int* max_saturation, + int* max_luma, + tvDataComponentColor_t** color, + tvComponentType_t** component, + size_t* num_color, + size_t* num_component, + tvContextCaps_t** context_caps); + tvError_t GetCustom2PointWhiteBalanceCaps( int *min_gain, + int *min_offset, int *max_gain, + int *max_offset, tvWBColor_t **color, + tvWBControl_t **control, size_t* num_color, + size_t* num_control, tvContextCaps_t ** context_caps); +#define HAL_NOT_READY 0 +#if HAL_NOT_READY +#define CAPABLITY_FILE_NAMEV2 "/opt/panel/pq_capabilities.json" + tvError_t GetBacklightCaps(int *max_backlight, tvContextCaps_t **context_caps); + tvError_t GetBrightnessCaps(int *max_brightness, tvContextCaps_t **context_caps); + tvError_t GetContrastCaps(int* max_contrast, tvContextCaps_t** context_caps); + tvError_t GetSharpnessCaps(int *max_sharpness, tvContextCaps_t **context_caps); + tvError_t GetSaturationCaps(int* max_saturation, tvContextCaps_t** context_caps); + tvError_t GetHueCaps(int* max_hue, tvContextCaps_t** context_caps); + tvError_t GetPrecisionDetailCaps(int* max_precision, tvContextCaps_t** context_caps); + tvError_t GetLowLatencyStateCaps(int* max_latency, tvContextCaps_t ** context_caps); + tvError_t GetColorTemperatureCaps(tvColorTemp_t** color_temp, size_t* num_color_temp, tvContextCaps_t** context_caps); + tvError_t GetSdrGammaCaps(tvSdrGamma_t** sdr_gamma, size_t* num_sdr_gamma, tvContextCaps_t** context_caps); + tvError_t GetTVDimmingModeCaps(tvDimmingMode_t** dimming_mode, size_t* num_dimming_mode, tvContextCaps_t** context_caps); + tvError_t GetAspectRatioCaps(tvDisplayMode_t** aspect_ratio, size_t* num_aspect_ratio, tvContextCaps_t** context_caps); + tvError_t GetTVPictureModeCaps(tvPQModeIndex_t** mode, size_t* num_pic_modes, tvContextCaps_t** context_caps); + tvError_t GetAISuperResolutionCaps(int* maxAISuperResolution, tvContextCaps_t** context_caps); + tvError_t GetMEMCCaps(int* maxMEMC, tvContextCaps_t** context_caps); +#else +#define CAPABLITY_FILE_NAMEV2 "/etc/pq_capabilities.json" +#endif + uint32_t getPQCapabilityWithContext( + const std::function& getCapsFunc, + const JsonObject& parameters, + JsonObject& response); + JsonObject parseContextCaps(tvContextCaps_t* context_caps); + // Helper functions to extract modes/sources/formats from parameters + std::vector extractPQModes(const JsonObject& parameters); + std::vector extractVideoSources(const JsonObject& parameters); + std::vector extractVideoFormats(const JsonObject& parameters); + static bool isGlobalParam(const JsonArray& arr); + JsonArray getJsonArrayIfArray(const JsonObject& obj, const std::string& key); + int updateAVoutputTVParamV2(std::string action, std::string tr181ParamName, + const JsonObject& parameters, tvPQParameterIndex_t pqParamIndex, int level); + std::vector getValidContextsFromParameters(const JsonObject& parameters,const std::string& tr181ParamName ); + typedef tvError_t (*tvSetFunction)(int); + bool resetPQParamToDefault(const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqIndex, + tvSetFunction halSetter); + typedef tvError_t (*tvSetFunctionV2)(tvVideoSrcType_t, tvPQModeIndex_t,tvVideoFormatType_t,int); + bool resetPQParamToDefault(const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqIndex, + tvSetFunctionV2 halSetter); + bool resetEnumPQParamToDefault(const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqIndex, + const std::unordered_map& valueMap, + std::function&)> halSetter); + tvConfigContext_t getValidContextFromGetParameters(const JsonObject& parameters, const std::string& paramName); + bool getPQParamFromContext(const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t paramType, + int& outValue); + bool getEnumPQParamString( + const JsonObject& parameters, + const std::string& paramName, + tvPQParameterIndex_t pqType, + const std::unordered_map& enumToStrMap, + std::string& outStr); + bool setIntPQParam(const JsonObject& parameters, const std::string& paramName, + tvPQParameterIndex_t pqType, tvSetFunction halSetter, int maxCap); + bool setEnumPQParam(const JsonObject& parameters, + const std::string& inputKey, + const std::string& paramName, + const std::unordered_map& valueMap, + tvPQParameterIndex_t paramType, + std::function halSetter); + uint32_t setContextPQParam(const JsonObject& parameters, JsonObject& response, + const std::string& inputParamName, + const std::string& tr181ParamName, + int maxAllowedValue, + tvPQParameterIndex_t pqParamType, + std::function halSetter); + bool setPictureModeV2(const JsonObject& parameters); + bool getPictureModeV2(const JsonObject& parameters, std::string& outMode); + std::string getCurrentPictureModeAsString(); + std::string getCurrentVideoFormatAsString(); + std::string getCurrentVideoSourceAsString(); + bool isSetRequiredForParam(const JsonObject& parameters, const std::string& paramName); + tvContextCaps_t* getCapsForParam(const std::string& paramName); + bool isValidSource(const std::vector& sourceArray, tvVideoSrcType_t sourceIndex); + bool isValidFormat(const std::vector& formatArray, tvVideoFormatType_t formatIndex); + tvError_t updateAVoutputTVParamToHALV2(std::string forParam, paramIndex_t indexInfo, int value, bool setNotDelete); + bool resetPictureModeV2(const JsonObject& parameters); + int syncAvoutputTVPQModeParamsToHALV2(std::string pqmode, std::string source, std::string format); + std::string getCMSNameFromEnum(tvDataComponentColor_t colorEnum); + void syncCMSParamsV2(); + public: int m_currentHdmiInResoluton; @@ -380,6 +567,125 @@ class AVOutputTV : public AVOutputBase { char rfc_caller_id[RFC_BUFF_MAX]; bool appUsesGlobalBackLightFactor; int pic_mode_index[PIC_MODES_SUPPORTED_MAX]; + + + int m_maxBacklight = 0; + tvContextCaps_t* m_backlightCaps = nullptr; + tvError_t m_backlightStatus = tvERROR_NONE; + + int m_maxBrightness = 0; + tvContextCaps_t* m_brightnessCaps = nullptr; + tvError_t m_brightnessStatus = tvERROR_NONE; + + int m_maxContrast = 0; + tvContextCaps_t* m_contrastCaps = nullptr; + tvError_t m_contrastStatus = tvERROR_NONE; + + int m_maxSharpness = 0; + tvContextCaps_t* m_sharpnessCaps = nullptr; + tvError_t m_sharpnessStatus = tvERROR_NONE; + + int m_maxSaturation = 0; + tvContextCaps_t* m_saturationCaps = nullptr; + tvError_t m_saturationStatus = tvERROR_NONE; + + int m_maxHue = 0; + tvContextCaps_t* m_hueCaps = nullptr; + tvError_t m_hueStatus = tvERROR_NONE; + + int m_maxlowLatencyState = 0; + tvContextCaps_t* m_lowLatencyStateCaps = nullptr; + tvError_t m_lowLatencyStateStatus = tvERROR_NONE; + + int m_maxPrecisionDetail = 0; + tvContextCaps_t* m_precisionDetailCaps = nullptr; + tvError_t m_precisionDetailStatus = tvERROR_NONE; + + int m_maxLocalContrastEnhancement = 0; + tvContextCaps_t* m_localContrastEnhancementCaps = nullptr; + tvError_t m_localContrastEnhancementStatus = tvERROR_NONE; + + int m_maxMPEGNoiseReduction = 0; + tvContextCaps_t* m_MPEGNoiseReductionCaps = nullptr; + tvError_t m_MPEGNoiseReductionStatus = tvERROR_NONE; + + int m_maxDigitalNoiseReduction = 0; + tvContextCaps_t* m_digitalNoiseReductionCaps = nullptr; + tvError_t m_digitalNoiseReductionStatus = tvERROR_NONE; + + int m_maxAISuperResolution = 0; + tvContextCaps_t* m_AISuperResolutionCaps = nullptr; + tvError_t m_AISuperResolutionStatus = tvERROR_NONE; + + int m_maxMEMC = 0; + tvContextCaps_t* m_MEMCCaps = nullptr; + tvError_t m_MEMCStatus = tvERROR_NONE; + + tvColorTemp_t* m_colortemp = nullptr; + size_t m_numColortemp = 0; + tvContextCaps_t* m_colortempCaps = nullptr; + tvError_t m_colorTempStatus = tvERROR_NONE; + + tvDisplayMode_t* m_aspectRatio = nullptr; + size_t m_numAspectRatio = 0; + tvContextCaps_t* m_aspectRatioCaps = nullptr; + tvError_t m_aspectRatioStatus = tvERROR_NONE; + + tvDimmingMode_t* m_dimmingModes = nullptr; + size_t m_numdimmingModes = 0; + tvContextCaps_t* m_dimmingModeCaps = nullptr; + tvError_t m_dimmingModeStatus = tvERROR_NONE; + + tvPQModeIndex_t* m_pictureModes = nullptr; + size_t m_numPictureModes = 0; + tvContextCaps_t* m_pictureModeCaps = nullptr; + tvError_t m_pictureModeStatus = tvERROR_NONE; + + tvBacklightMode_t* m_backlightModes = nullptr; + size_t m_numBacklightModes = 0; + tvContextCaps_t* m_backlightModeCaps = nullptr; + tvError_t m_backlightModeStatus = tvERROR_NONE; + + tvSdrGamma_t* m_sdrGammaModes = nullptr; + size_t m_numsdrGammaModes = 0; + tvContextCaps_t* m_sdrGammaModeCaps = nullptr; + tvError_t m_sdrGammaModeStatus = tvERROR_NONE; + + int m_numHalMatrixPoints = 0; + int m_rgbMin = 0; + int m_rgbMax = 0; + int m_numUiMatrixPoints = 0; + double* m_uiMatrixPositions = nullptr; + tvContextCaps_t* m_multiPointWBCaps = nullptr; + tvError_t m_multiPointWBStatus = tvERROR_NONE; + + tvDVCalibrationSettings_t* m_minValues; + tvDVCalibrationSettings_t* m_maxValues; + tvContextCaps_t* m_DVCalibrationCaps = nullptr; + tvError_t m_DVCalibrationStatus = tvERROR_NONE; + + int m_maxCmsHue = 0; + int m_maxCmsSaturation = 0; + int m_maxCmsLuma = 0; + size_t m_numColor = 0; + size_t m_numComponent = 0; + tvDataComponentColor_t* m_cmsColorArr; + tvComponentType_t* m_cmsComponentArr; + std::vector m_cmsColorList; + std::vector m_cmsComponentList; + std::unordered_map m_cmsIndexMap; + tvContextCaps_t* m_cmsCaps = nullptr; + tvError_t m_cmsStatus = tvERROR_NONE; + + bool setCMSParam(const JsonObject& parameters); + + std::string convertPictureIndexToStringV2(int pqmode); + std::string convertVideoFormatToStringV2(int format); + std::string convertSourceIndexToStringV2(int source); + + uint32_t generateStorageIdentifierV2(std::string &key, std::string forParam, paramIndex_t info); + void generateStorageIdentifierCMSV2(std::string &key, std::string forParam, paramIndex_t info); + void generateStorageIdentifierWBV2(std::string &key, std::string forParam, paramIndex_t info); AVOutputTV(); ~AVOutputTV(); @@ -391,7 +697,6 @@ class AVOutputTV : public AVOutputBase { void NotifyFilmMakerModeChange(tvContentType_t mode); void NotifyVideoResolutionChange(tvResolutionParam_t resolution); void NotifyVideoFrameRateChange(tvVideoFrameRate_t frameRate); - //override API static void dsHdmiVideoModeEventHandler(const char *owner, IARM_EventId_t eventId, void *data, size_t len); static void dsHdmiStatusEventHandler(const char *owner, IARM_EventId_t eventId, void *data, size_t len); diff --git a/AVOutput/AVOutputTVHelper.cpp b/AVOutput/AVOutputTVHelper.cpp index 527569a9..22bbdd31 100644 --- a/AVOutput/AVOutputTVHelper.cpp +++ b/AVOutput/AVOutputTVHelper.cpp @@ -296,13 +296,13 @@ namespace Plugin { { tvDimmingMode_t index = tvDimmingMode_MAX; - if(mode.compare("Local") == 0 ) { + if(mode.compare("local") == 0 ) { index=tvDimmingMode_Local; } - else if(mode.compare("Fixed") == 0 ) { + else if(mode.compare("fixed") == 0 ) { index=tvDimmingMode_Fixed; } - else if(mode.compare("Global") == 0 ) { + else if(mode.compare("global") == 0 ) { index=tvDimmingMode_Global; } else { @@ -737,7 +737,7 @@ namespace Plugin { GetCurrentVideoSource(¤t_source); tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); - tr181_param_name += "."+convertSourceIndexToString(current_source)+"."+"Format."+convertVideoFormatToString(current_format)+"."+"PictureModeString"; + tr181_param_name += "."+convertSourceIndexToStringV2(current_source)+"."+"Format."+convertVideoFormatToStringV2(current_format)+"."+"PictureModeString"; tr181ErrorCode_t err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); if ( tr181Success == err ) { ret = SetTVPictureMode(param.value); @@ -900,6 +900,59 @@ namespace Plugin { return ret; } + tvError_t AVOutputTV::updateAVoutputTVParamToHALV2(std::string forParam, paramIndex_t indexInfo, int value, bool setNotDelete) + { + tvError_t ret = tvERROR_NONE; + std::string key; + + // Generate storage key based on parameter type + if (forParam == "CMS") + generateStorageIdentifierCMS(key, forParam, indexInfo); + else if (forParam == "WhiteBalance") + generateStorageIdentifierWB(key, forParam, indexInfo); + else + generateStorageIdentifierV2(key, forParam, indexInfo); + + if (key.empty()) { + LOGERR("%s generateStorageIdentifier failed\n", __FUNCTION__); + return tvERROR_GENERAL; + } + + tr181ErrorCode_t err = tr181Success; + + if (setNotDelete) { + std::string toStore = std::to_string(value); + + // Map parameters to their string transformation logic (if applicable) + std::map> fnMap = { + {"ColorTemp", [this](int v, std::string& s) { getColorTempStringFromEnum(v, s); }}, + {"DimmingMode", [this](int v, std::string& s) { getDimmingModeStringFromEnum(v, s); }}, + {"AspectRatio", [this](int v, std::string& s) { getDisplayModeStringFromEnum(v, s); }}, + {"BacklightMode", [this](int v, std::string& s) { getBacklightModeStringFromEnum(v, s); }} + }; + + // If there's a custom string conversion for this parameter, apply it + auto it = fnMap.find(forParam); + if (it != fnMap.end()) { + it->second(value, toStore); + } + // Set the value using TR-181 + err = setLocalParam(rfc_caller_id, key.c_str(), toStore.c_str()); + } + else + { + // Delete the value using TR-181 + err = clearLocalParam(rfc_caller_id, key.c_str()); + } + + if (err != tr181Success) { + LOGERR("%s: %s for %s Failed : %s\n",__FUNCTION__, setNotDelete ? "Set" : "Delete", key.c_str(), getTR181ErrorString(err)); + ret = tvERROR_GENERAL; + } + + return ret; + } + tvError_t AVOutputTV::updateAVoutputTVParamToHAL(std::string forParam, paramIndex_t indexInfo, int value,bool setNotDelete) { tvError_t ret = tvERROR_NONE; @@ -913,7 +966,7 @@ namespace Plugin { generateStorageIdentifier(key,forParam,indexInfo); if(key.empty()) { - LOGERR("generateStorageIdentifierDirty failed\n"); + LOGERR("%s generateStorageIdentifierDirty failed\n", __FUNCTION__); ret = tvERROR_GENERAL; } else { @@ -932,7 +985,8 @@ namespace Plugin { err = setLocalParam(rfc_caller_id, key.c_str(),toStore.c_str()); } - else { + else + { err = clearLocalParam(rfc_caller_id, key.c_str()); } @@ -1136,139 +1190,344 @@ namespace Plugin { } return ret; } + void AVOutputTV::syncCMSParamsV2() { + JsonObject parameters; + + // Set default values to "none" to indicate all contexts (global sync) + parameters["pictureMode"] = "none"; + parameters["videoSource"] = "none"; + parameters["videoFormat"] = "none"; + + // Use "Global" to trigger syncing for all CMS components and colors + parameters["color"] = "Global"; + parameters["component"] = "Global"; + + // Dummy PQ index; unused for CMS sync but required by function signature + tvPQParameterIndex_t dummyPQIndex = PQ_PARAM_CMS_SATURATION_RED; + + int result = updateAVoutputTVParamV2("sync", "CMS", parameters, dummyPQIndex, 0); + if (result == 0) { + LOGINFO("%s: CMS sync completed successfully", __FUNCTION__); + } else { + LOGERR("%s: CMS sync encountered errors", __FUNCTION__); + } + } - tvError_t AVOutputTV::syncAvoutputTVParamsToHAL(std::string pqmode,std::string source,std::string format) + tvError_t AVOutputTV::syncAvoutputTVParamsToHAL(std::string pqmode, std::string source, std::string format) { - int level={0}; + int level = {0}; capDetails_t info; info.pqmode = pqmode; info.source = source; info.format = format; - LOGINFO("Entry %s : pqmode : %s source : %s format : %s\n",__FUNCTION__,pqmode.c_str(),source.c_str(),format.c_str()); - - if( !updateAVoutputTVParam("sync","Brightness",info,PQ_PARAM_BRIGHTNESS,level)) { - LOGINFO("Brightness Successfully sync to Drive Cache\n"); - } - else { - LOGERR("Brightness Sync to cache Failed !!!\n"); + JsonObject paramJson; + paramJson["pictureMode"] = info.pqmode; + paramJson["videoSource"] = info.source; + paramJson["videoFormat"] = info.format; + LOGINFO("Entry %s : pqmode : %s source : %s format : %s\n", __FUNCTION__, pqmode.c_str(), source.c_str(), format.c_str()); + + // Brightness + m_brightnessStatus = GetBrightnessCaps(&m_maxBrightness, &m_brightnessCaps); + LOGINFO("GetBrightnessCaps returned status: %d, max: %d", m_brightnessStatus, m_maxBrightness); + if (m_brightnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "Brightness", info, PQ_PARAM_BRIGHTNESS, level); + } else { + updateAVoutputTVParamV2("sync", "Brightness", paramJson, PQ_PARAM_BRIGHTNESS,level); } - if( !updateAVoutputTVParam("sync","Contrast",info,PQ_PARAM_CONTRAST,level)) { - LOGINFO("Contrast Successfully Synced to Drive Cache\n"); + // Contrast + m_contrastStatus = GetContrastCaps(&m_maxContrast, &m_contrastCaps); + LOGINFO("GetContrastCaps returned status: %d, max: %d", m_contrastStatus, m_maxContrast); + if (m_contrastStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "Contrast", info, PQ_PARAM_CONTRAST, level); + } else { + updateAVoutputTVParamV2("sync", "Contrast", paramJson, PQ_PARAM_CONTRAST,level); } - else { - LOGERR("Contrast Sync to cache Failed !!!\n"); + + // Sharpness + m_sharpnessStatus = GetSharpnessCaps(&m_maxSharpness, &m_sharpnessCaps); + LOGINFO("GetSharpnessCaps returned status: %d, max: %d", m_sharpnessStatus, m_maxSharpness); + if (m_sharpnessStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "Sharpness", info, PQ_PARAM_SHARPNESS, level); + } else { + updateAVoutputTVParamV2("sync", "Sharpness", paramJson, PQ_PARAM_SHARPNESS, level); } - if( !updateAVoutputTVParam("sync","Sharpness",info,PQ_PARAM_SHARPNESS,level)) { - LOGINFO("Sharpness Successfully Synced to Drive Cache\n"); + // Saturation + m_saturationStatus = GetSaturationCaps(&m_maxSaturation, &m_saturationCaps); + LOGINFO("GetSaturationCaps returned status: %d, max: %d", m_saturationStatus, m_maxSaturation); + if (m_saturationStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "Saturation", info, PQ_PARAM_SATURATION, level); + } else { + updateAVoutputTVParamV2("sync", "Saturation", paramJson, PQ_PARAM_SATURATION,level); } - else { - LOGERR("Sharpness Sync to cache Failed !!!\n"); + + // Hue + m_hueStatus = GetHueCaps(&m_maxHue, &m_hueCaps); + LOGINFO("GetHueCaps returned status: %d, max: %d", m_hueStatus, m_maxHue); + if (m_hueStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "Hue", info, PQ_PARAM_HUE, level); + } else { + updateAVoutputTVParamV2("sync", "Hue", paramJson, PQ_PARAM_HUE, level); } - if( !updateAVoutputTVParam("sync","Saturation",info,PQ_PARAM_SATURATION,level)) { - LOGINFO("Saturation Successfully Synced to Drive Cache\n"); + // ColorTemperature + m_colorTempStatus = GetColorTemperatureCaps(&m_colortemp, &m_numColortemp, &m_colortempCaps); + LOGINFO("GetColorTemperatureCaps returned status: %d, numColortemp: %d", m_colorTempStatus, m_numColortemp); + if (m_colorTempStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "ColorTemp", info, PQ_PARAM_COLOR_TEMPERATURE, level); + } else { + updateAVoutputTVParamV2("sync", "ColorTemp", paramJson, PQ_PARAM_COLOR_TEMPERATURE,level); } - else { - LOGERR("Saturation Sync to cache Failed !!!\n"); + + // HDRMode + updateAVoutputTVParam("sync", "HDRMode", info, PQ_PARAM_DOLBY_MODE, level); + + // DimmingMode + m_dimmingModeStatus = GetTVDimmingModeCaps(&m_dimmingModes, &m_numdimmingModes, &m_dimmingModeCaps); + LOGINFO("GetTVDimmingModeCaps returned status: %d, numdimmingModes: %d", m_dimmingModeStatus, m_numdimmingModes); + if (m_dimmingModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "DimmingMode", info, PQ_PARAM_DIMMINGMODE, level); + } else { +#if !HAL_NOT_READY + updateAVoutputTVParamV2("sync", "DimmingMode", paramJson, PQ_PARAM_DIMMINGMODE,level); +#endif } - if( !updateAVoutputTVParam("sync","Hue",info,PQ_PARAM_HUE,level)) { - LOGINFO("Hue Successfully Synced to Drive Cache\n"); + // Backlight + LOGINFO("Calling GetBacklightCaps..."); + m_backlightStatus = GetBacklightCaps(&m_maxBacklight, &m_backlightCaps); + LOGINFO("GetBacklightCaps returned status: %d, maxBacklight: %d", m_backlightStatus, m_maxBacklight); +#if DEBUG + if (m_backlightCaps) + { + LOGINFO("Backlight caps pointer is valid. Num contexts: %zu", m_backlightCaps->num_contexts); + for (size_t i = 0; i < m_backlightCaps->num_contexts; ++i) { + const auto& context = m_backlightCaps->contexts[i]; + std::string pqModeStr = AVOutputTV::pqModeMap.count(context.pq_mode) ? + AVOutputTV::pqModeMap.at(context.pq_mode) : "Unknown"; + std::string formatStr = AVOutputTV::videoFormatMap.count(context.videoFormatType) ? + AVOutputTV::videoFormatMap.at(context.videoFormatType) : "Unknown"; + std::string srcStr = AVOutputTV::videoSrcMap.count(context.videoSrcType) ? + AVOutputTV::videoSrcMap.at(context.videoSrcType) : "Unknown"; + LOGINFO("Context[%zu]: PQMode = %s (%d), Format = %s (%d), Source = %s (%d)", + i, pqModeStr.c_str(), context.pq_mode, + formatStr.c_str(), context.videoFormatType, + srcStr.c_str(), context.videoSrcType); + } + } else { + LOGWARN("Backlight caps pointer is null."); } - else { - LOGERR("Hue Sync to cache Failed !!!\n"); +#endif + if (m_backlightStatus == tvERROR_OPERATION_NOT_SUPPORTED) { + updateAVoutputTVParam("sync", "Backlight", info, PQ_PARAM_BACKLIGHT, level); + } else { + updateAVoutputTVParamV2("sync", "Backlight", paramJson, PQ_PARAM_BACKLIGHT, level); } - if( !updateAVoutputTVParam("sync","ColorTemp",info,PQ_PARAM_COLOR_TEMPERATURE,level)) { - LOGINFO("ColorTemp Successfully Synced to Drive Cache\n"); + //Ambient Bakclight Mode + m_backlightModeStatus = GetBacklightModeCaps(&m_backlightModes, &m_numBacklightModes, &m_backlightModeCaps); + if (m_backlightModeStatus == tvERROR_NONE) { + updateAVoutputTVParamV2("sync", "BacklightMode", paramJson, PQ_PARAM_BACKLIGHT_MODE, level); } - else { - LOGERR("ColorTemp Sync to cache Failed !!!\n"); + + //AspectRatio + m_aspectRatioStatus = GetAspectRatioCaps(&m_aspectRatio, &m_numAspectRatio, &m_aspectRatioCaps); + //LowLatencyState + m_lowLatencyStateStatus = GetLowLatencyStateCaps(&m_maxlowLatencyState, &m_lowLatencyStateCaps); + // PrecisionDetail + m_precisionDetailStatus = GetPrecisionDetailCaps(&m_maxPrecisionDetail, &m_precisionDetailCaps); + //PictureMode + m_pictureModeStatus = GetTVPictureModeCaps(&m_pictureModes, &m_numPictureModes, &m_pictureModeCaps); + + // LocalContrastEnhancement + m_localContrastEnhancementStatus = GetLocalContrastEnhancementCaps(&m_maxLocalContrastEnhancement, &m_localContrastEnhancementCaps); + if (m_localContrastEnhancementStatus == tvERROR_NONE) { + updateAVoutputTVParamV2("sync", "LocalContrastEnhancement", paramJson, PQ_PARAM_LOCAL_CONTRAST_ENHANCEMENT, level); } - if( !updateAVoutputTVParam("sync","HDRMode",info,PQ_PARAM_DOLBY_MODE,level)) { - LOGINFO("HDRmode Successfully Synced to Drive Cache\n"); + + // MPEGNoiseReduction + m_MPEGNoiseReductionStatus = GetMPEGNoiseReductionCaps(&m_maxMPEGNoiseReduction, &m_MPEGNoiseReductionCaps); + if (m_MPEGNoiseReductionStatus == tvERROR_NONE) { + updateAVoutputTVParamV2("sync", "MPEGNoiseReduction", paramJson, PQ_PARAM_MPEG_NOISE_REDUCTION, level); } - else { - LOGERR("HDRmode Sync to cache Failed !!!\n"); + + // DigitalNoiseReduction + m_digitalNoiseReductionStatus = GetDigitalNoiseReductionCaps(&m_maxDigitalNoiseReduction, &m_digitalNoiseReductionCaps); + if (m_digitalNoiseReductionStatus == tvERROR_NONE) { + updateAVoutputTVParamV2("sync", "DigitalNoiseReduction", paramJson, PQ_PARAM_DIGITAL_NOISE_REDUCTION, level); } - if( !updateAVoutputTVParam("sync","DimmingMode",info,PQ_PARAM_DIMMINGMODE,level)) { - LOGINFO("dimmingmode Successfully Synced to Drive Cache\n"); + // AISuperResolution + m_AISuperResolutionStatus = GetAISuperResolutionCaps(&m_maxAISuperResolution, &m_AISuperResolutionCaps); + if (m_AISuperResolutionStatus == tvERROR_NONE) { + updateAVoutputTVParamV2("sync", "AISuperResolution", paramJson, PQ_PARAM_AI_SUPER_RESOLUTION,level); } - else { - LOGERR("dimmingmode Sync to cache Failed !!!\n"); + + // MEMC + m_MEMCStatus = GetMEMCCaps(&m_maxMEMC, &m_MEMCCaps); + if (m_MEMCStatus == tvERROR_NONE) { + updateAVoutputTVParamV2("sync", "MEMC", paramJson, PQ_PARAM_MEMC, level); } - if( !updateAVoutputTVParam("sync","Backlight",info,PQ_PARAM_BACKLIGHT,level) ) { - LOGINFO("Backlight Successfully Synced to Drive Cache\n"); + m_cmsStatus = GetCMSCaps(&m_maxCmsHue, &m_maxCmsSaturation, &m_maxCmsLuma, + &m_cmsColorArr, &m_cmsComponentArr, + &m_numColor, &m_numComponent, &m_cmsCaps); + if (m_cmsStatus == tvERROR_NONE) { + for (size_t i = 0; i < m_numColor; i++) { + std::string colorStr = getCMSColorStringFromEnum(m_cmsColorArr[i]); + m_cmsColorList.push_back(colorStr); + } + for (size_t i = 0; i < m_numComponent; i++) { + std::string componentStr = getCMSComponentStringFromEnum(m_cmsComponentArr[i]); + m_cmsComponentList.push_back(componentStr); + } + syncCMSParamsV2(); } - else { - LOGERR("Backlight Sync to cache Failed !!!\n"); + if(m_cmsStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + syncCMSParams(); } - syncCMSParams(); //sync CMS - syncWBParams(); - info.format = "DV";//Sync only for Dolby - - if( !updateAVoutputTVParam("sync","DolbyVisionMode",info,PQ_PARAM_DOLBY_MODE,level)) { - LOGINFO("dvmode Successfully Synced to Drive Cache\n"); - } - else { - LOGERR("dvmode Sync to cache Failed !!!\n"); - } + // Dolby Vision Mode + info.format = "DV"; // Sync only for Dolby + updateAVoutputTVParam("sync", "DolbyVisionMode", info, PQ_PARAM_DOLBY_MODE, level); - LOGINFO("Exit %s : pqmode : %s source : %s format : %s\n",__FUNCTION__,pqmode.c_str(),source.c_str(),format.c_str()); + LOGINFO("Exit %s : pqmode : %s source : %s format : %s\n", __FUNCTION__, pqmode.c_str(), source.c_str(), format.c_str()); return tvERROR_NONE; } - - int AVOutputTV::syncAvoutputTVPQModeParamsToHAL(std::string pqmode, std::string source, std::string format) + int AVOutputTV::syncAvoutputTVPQModeParamsToHALV2(std::string pqmode, std::string source, std::string format) { - capDetails_t inputInfo; - valueVectors_t valueVectors; tr181ErrorCode_t err = tr181Success; TR181_ParamData_t param = {0}; - int ret = 0; + bool contextSynced = false; + + // Treat "none" as "Global" + if (source == "none") + source = "Global"; + if (format == "none") + format = "Global"; + + // Handle "Current" source/format substitution + if (source == "Current" || format == "Current") { + tvVideoSrcType_t currentSrc = VIDEO_SOURCE_IP; + tvVideoFormatType_t currentFmt = VIDEO_FORMAT_SDR; + GetCurrentVideoSource(¤tSrc); + GetCurrentVideoFormat(¤tFmt); + if (currentFmt == VIDEO_FORMAT_NONE) + currentFmt = VIDEO_FORMAT_SDR; + + if (source == "Current") + source = convertSourceIndexToStringV2(currentSrc); + if (format == "Current") + format = convertVideoFormatToStringV2(currentFmt); + } + if (m_pictureModeStatus == tvERROR_NONE) + { + for (size_t i = 0; i < m_pictureModeCaps->num_contexts; ++i) { + const tvConfigContext_t& ctx = m_pictureModeCaps->contexts[i]; - inputInfo.pqmode = pqmode; - inputInfo.source = source; - inputInfo.format = format; + std::string sourceStr = convertSourceIndexToStringV2(ctx.videoSrcType); + std::string formatStr = convertVideoFormatToStringV2(ctx.videoFormatType); + + // Filter by provided source/format + if (source != "Global" && source != sourceStr) + continue; + if (format != "Global" && format != formatStr) + continue; + + std::string tr181Param = std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM) + + "." + sourceStr + ".Format." + formatStr + ".PictureModeString"; + + err = getLocalParam(rfc_caller_id, tr181Param.c_str(), ¶m); + if (err != tr181Success) { + LOGWARN("Failed to getLocalParam for %s\n", tr181Param.c_str()); + continue; + } + + std::string modeStr = param.value; + int modeIndex = -1; + for (size_t i = 0; i < m_numPictureModes; ++i) { + if (pqModeMap.at(m_pictureModes[i]) == modeStr) { + modeIndex = static_cast(i); + break; + } + } +#if DEBUG + LOGINFO("Got mode string from TR181: %s -> index=%d", modeStr.c_str(), modeIndex); +#endif + tvError_t tv_err = SaveSourcePictureMode(ctx.videoSrcType, ctx.videoFormatType, modeIndex); + if (tv_err != tvERROR_NONE) { + LOGWARN("Failed SaveSourcePictureMode for %s / %s\n", sourceStr.c_str(), formatStr.c_str()); + continue; + } + + contextSynced = true; + } - ret = getSaveConfig("PictureMode", inputInfo, valueVectors); - - if (ret == 0 ) { - for (int source : valueVectors.sourceValues ) { - tvVideoSrcType_t sourceType = (tvVideoSrcType_t)source; - for (int format : valueVectors.formatValues ) { - tvVideoFormatType_t formatType = (tvVideoFormatType_t)format; - std::string tr181_param_name = ""; - tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); - tr181_param_name += "."+convertSourceIndexToString(sourceType)+"."+"Format."+ - convertVideoFormatToString(formatType)+"."+"PictureModeString"; - - err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); - if ( tr181Success == err ) { - std::string local = param.value; - int pqmodeindex = (int)getPictureModeIndex(local); - - tvError_t tv_err = SaveSourcePictureMode(sourceType, formatType, pqmodeindex); - if (tv_err != tvERROR_NONE) { - LOGWARN("failed to SaveSourcePictureMode \n"); + if (!contextSynced) { + LOGWARN("No matching context synced for pqmode=%s source=%s format=%s\n", + pqmode.c_str(), source.c_str(), format.c_str()); + return -1; + } + return 0; + } + return -1; + } + + int AVOutputTV::syncAvoutputTVPQModeParamsToHAL(std::string pqmode, std::string source, std::string format) + { + if (m_pictureModeStatus == tvERROR_OPERATION_NOT_SUPPORTED) + { + capDetails_t inputInfo; + valueVectors_t valueVectors; + tr181ErrorCode_t err = tr181Success; + TR181_ParamData_t param = {0}; + int ret = 0; + + inputInfo.pqmode = pqmode; + inputInfo.source = source; + inputInfo.format = format; + + ret = getSaveConfig("PictureMode", inputInfo, valueVectors); + + if (ret == 0 ) { + for (int source : valueVectors.sourceValues ) { + tvVideoSrcType_t sourceType = (tvVideoSrcType_t)source; + for (int format : valueVectors.formatValues ) { + tvVideoFormatType_t formatType = (tvVideoFormatType_t)format; + std::string tr181_param_name = ""; + tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); + tr181_param_name += "."+convertSourceIndexToString(sourceType)+"."+"Format."+ + convertVideoFormatToString(formatType)+"."+"PictureModeString"; + + err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); + if ( tr181Success == err ) { + std::string local = param.value; + int pqmodeindex = (int)getPictureModeIndex(local); + + tvError_t tv_err = SaveSourcePictureMode(sourceType, formatType, pqmodeindex); + if (tv_err != tvERROR_NONE) { + LOGWARN("failed to SaveSourcePictureMode \n"); + return -1; + } + } + else { + LOGWARN("Failed to get the getLocalParam \n"); return -1; } } - else { - LOGWARN("Failed to get the getLocalParam \n"); - return -1; - } } } + return ret; + } + else + { + return syncAvoutputTVPQModeParamsToHALV2(pqmode,source,format); } - return ret; } uint32_t AVOutputTV::generateStorageIdentifier(std::string &key, std::string forParam, paramIndex_t info) @@ -1439,7 +1698,7 @@ namespace Plugin { } else if( forParam.compare("WhiteBalance") == 0 ) { generateStorageIdentifierWB(key,forParam,indexInfo); } else { - generateStorageIdentifier(key,forParam,indexInfo); + generateStorageIdentifierV2(key,forParam,indexInfo); } if(key.empty()) { @@ -1480,6 +1739,21 @@ namespace Plugin { } return 0; } + else if ( forParam.compare("BacklightMode") == 0 ) { + if (strncmp(param.value, "Manual", strlen(param.value)) == 0) { + value = tvBacklightMode_MANUAL; + } + else if (strncmp(param.value, "Ambient", strlen(param.value)) == 0) { + value = tvBacklightMode_AMBIENT; + } + else if (strncmp(param.value, "Eco", strlen(param.value)) == 0) { + value = tvBacklightMode_ECO; + } + else { + value = tvBacklightMode_MANUAL; // Default fallback + } + return 0; + } else if ( forParam.compare("DolbyVisionMode") == 0) { if (strncmp(param.value, "Dark", strlen(param.value)) == 0) { value = tvDolbyMode_Dark; @@ -1611,13 +1885,13 @@ namespace Plugin { void AVOutputTV::getDimmingModeStringFromEnum(int value, std::string &toStore) { - const char *color_temp_string[] = { + const char *dimmingmode_string[] = { [tvDimmingMode_Fixed] = "Fixed", [tvDimmingMode_Local] = "Local", [tvDimmingMode_Global] = "Global", }; toStore.clear(); - toStore+=color_temp_string[value]; + toStore+=dimmingmode_string[value]; } void AVOutputTV::getColorTempStringFromEnum(int value, std::string &toStore) @@ -1631,6 +1905,44 @@ namespace Plugin { toStore.clear(); toStore+=color_temp_string[value]; } + void AVOutputTV::getDisplayModeStringFromEnum(int value, std::string &toStore) + { + static const char* display_mode_string[] = { + [tvDisplayMode_4x3] = "TV 4X3 PILLARBOX", + [tvDisplayMode_16x9] = "TV 16X9 STRETCH", + [tvDisplayMode_FULL] = "TV FULL", + [tvDisplayMode_NORMAL] = "TV NORMAL", + [tvDisplayMode_AUTO] = "TV AUTO", + [tvDisplayMode_DIRECT] = "TV DIRECT", + [tvDisplayMode_ZOOM] = "TV ZOOM" + }; + + toStore.clear(); + if (value >= 0 && value < tvDisplayMode_MAX && display_mode_string[value]) { + toStore += display_mode_string[value]; + } else { + toStore += "TV AUTO"; + } + } + + void AVOutputTV::getBacklightModeStringFromEnum(int value, std::string& toStore) + { + toStore.clear(); + switch (static_cast(value)) { + case tvBacklightMode_MANUAL: + toStore = "Manual"; + break; + case tvBacklightMode_AMBIENT: + toStore = "Ambient"; + break; + case tvBacklightMode_ECO: + toStore = "Eco"; + break; + default: + toStore = "Unknown"; + break; + } + } int AVOutputTV::getCurrentPictureMode(char *picMode) { @@ -1652,15 +1964,14 @@ namespace Plugin { } tr181_param_name += std::string(AVOUTPUT_SOURCE_PICTUREMODE_STRING_RFC_PARAM); - tr181_param_name += "." + convertSourceIndexToString(currentSource) + "." + "Format."+convertVideoFormatToString(current_format)+"."+"PictureModeString"; + tr181_param_name += "." + convertSourceIndexToStringV2(currentSource) + "." + "Format."+convertVideoFormatToStringV2(current_format)+"."+"PictureModeString"; memset(¶m, 0, sizeof(param)); tr181ErrorCode_t err = getLocalParam(rfc_caller_id, tr181_param_name.c_str(), ¶m); if ( err == tr181Success ) { strncpy(picMode, param.value, strlen(param.value)+1); - picMode[strlen(param.value)] = '\0'; - LOGINFO("getLocalParam success, mode = %s\n", picMode); + //LOGINFO("getLocalParam success, mode = %s\n", picMode); return 1; } else { @@ -1804,7 +2115,7 @@ namespace Plugin { { tvError_t ret = tvERROR_GENERAL; #if !defined (HDMIIN_4K_ZOOM) - LOGERR("%s:mode selected is: %d", __FUNCTION__, m_videoZoomMode); + LOGINFO("%s:mode selected is: %d", __FUNCTION__, m_videoZoomMode); if (AVOutputTV::instance->m_isDisabledHdmiIn4KZoom) { if (!(AVOutputTV::instance->m_currentHdmiInResolutonm_currentHdmiInResoluton))) { @@ -2207,7 +2518,7 @@ namespace Plugin { case tvColorTemp_WARM: return "Warm"; case tvColorTemp_COLD: return "Cold"; case tvColorTemp_USER : return "UserDefined"; - default : return "Max"; + default : return "Standard"; } } @@ -2265,92 +2576,1694 @@ namespace Plugin { } return 0; } +//JSON Based V2 Helpers + const std::map AVOutputTV::pqModeMap = { + {PQ_MODE_SPORTS, "Sports"}, + {PQ_MODE_THEATER, "Theater"}, + {PQ_MODE_GAME, "Game"}, + {PQ_MODE_IQ, "IQ"}, + {PQ_MODE_DARK, "Dark"}, + {PQ_MODE_BRIGHT, "Bright"}, + {PQ_MODE_AIPQ, "AI PQ"}, + {PQ_MODE_STANDARD, "Standard"}, + {PQ_MODE_VIVID, "Vivid"}, + {PQ_MODE_ENERGY_SAVING, "EnergySaving"}, + {PQ_MODE_CUSTOM, "Custom"} + }; + + const std::map AVOutputTV::videoFormatMap = { + {VIDEO_FORMAT_NONE, "None"}, + {VIDEO_FORMAT_SDR, "SDR"}, + {VIDEO_FORMAT_HDR10, "HDR10"}, + {VIDEO_FORMAT_HDR10PLUS, "HDR10Plus"}, + {VIDEO_FORMAT_DV, "DV"}, + {VIDEO_FORMAT_HLG, "HLG"} + }; + + const std::map AVOutputTV::videoSrcMap = { + {VIDEO_SOURCE_COMPOSITE1, "Composite1"}, + {VIDEO_SOURCE_HDMI1, "HDMI1"}, + {VIDEO_SOURCE_HDMI2, "HDMI2"}, + {VIDEO_SOURCE_HDMI3, "HDMI3"}, + {VIDEO_SOURCE_HDMI4, "HDMI4"}, + {VIDEO_SOURCE_IP, "IP"}, + {VIDEO_SOURCE_TUNER, "Tuner"} + }; + const std::unordered_map AVOutputTV::backlightModeMap = { + {tvBacklightMode_MANUAL, "Manual"}, + {tvBacklightMode_AMBIENT, "Ambient"}, + {tvBacklightMode_ECO, "Eco"} + }; + + std::unordered_map AVOutputTV::pqModeReverseMap; + std::unordered_map AVOutputTV::videoFormatReverseMap; + std::unordered_map AVOutputTV::videoSrcReverseMap; + bool AVOutputTV::reverseMapsInitialized = false; + + void AVOutputTV::initializeReverseMaps() { + if (reverseMapsInitialized) return; + + for (const auto& entry : pqModeMap) { + pqModeReverseMap[entry.second] = static_cast(entry.first); + } + for (const auto& entry : videoFormatMap) { + videoFormatReverseMap[entry.second] = static_cast(entry.first); + } + for (const auto& entry : videoSrcMap) { + videoSrcReverseMap[entry.second] = static_cast(entry.first); + } + reverseMapsInitialized = true; + } - int AVOutputTV::ReadCapablitiesFromConf(std::string param, capDetails_t& info) - { - int ret = 0; + const std::unordered_map AVOutputTV::backlightModeReverseMap = []{ + std::unordered_map m; + for (const auto& pair : AVOutputTV::backlightModeMap) m[pair.second] = pair.first; + return m; + }(); - /*Consider User WhiteBalance as CustomWhiteBalance - To avoid clash with Factory WhiteBalance Calibration capablities*/ + std::string AVOutputTV::convertSourceIndexToStringV2(int source) { + auto it = videoSrcMap.find(source); + return (it != videoSrcMap.end()) ? it->second : ""; + } - if ( param == "WhiteBalance") { - param = "CustomWhiteBalance"; - } else if ( param == "AutoBacklightMode") { - param = "BacklightControl"; + std::string AVOutputTV::convertVideoFormatToStringV2(int format) { + auto it = videoFormatMap.find(format); + return (it != videoFormatMap.end()) ? it->second : ""; + } + + std::string AVOutputTV::convertPictureIndexToStringV2(int pqmode) { + auto it = pqModeMap.find(pqmode); + return (it != pqModeMap.end()) ? it->second : ""; + } + uint32_t AVOutputTV::generateStorageIdentifierV2(std::string &key, std::string forParam, paramIndex_t info) + { + key += AVOUTPUT_GENERIC_STRING_RFC_PARAM; + key += STRING_SOURCE + convertSourceIndexToStringV2(info.sourceIndex) + "." + + STRING_PICMODE + convertPictureIndexToStringV2(info.pqmodeIndex) + "." + + STRING_FORMAT + convertVideoFormatToStringV2(info.formatIndex) + "." + + forParam; + return tvERROR_NONE; + } + + bool AVOutputTV::isValidSource(const std::vector& sourceArray, tvVideoSrcType_t sourceIndex) + { + // If "Current" is passed, match the current source + if (std::find(sourceArray.begin(), sourceArray.end(), "Current") != sourceArray.end()) { + tvVideoSrcType_t currentSource = VIDEO_SOURCE_IP; + GetCurrentVideoSource(¤tSource); + return (sourceIndex == currentSource); } - try { - CIniFile inFile(CAPABLITY_FILE_NAME); - std::string configString; + // Match against specific source strings + const std::string srcStr = convertSourceIndexToStringV2(sourceIndex); + return std::find(sourceArray.begin(), sourceArray.end(), srcStr) != sourceArray.end(); + } - if(param == "CMS") - { - configString = param + ".color"; - info.color = inFile.Get(configString); + bool AVOutputTV::isValidFormat(const std::vector& formatArray, tvVideoFormatType_t formatIndex) + { + // If "Current" is passed, match the current format + if (std::find(formatArray.begin(), formatArray.end(), "Current") != formatArray.end()) { + tvVideoFormatType_t currentFormat = VIDEO_FORMAT_NONE; + GetCurrentVideoFormat(¤tFormat); + return (formatIndex == currentFormat); + } - configString = param + ".component"; - info.component = inFile.Get(configString); + // Match against specific format strings + const std::string fmtStr = convertVideoFormatToStringV2(formatIndex); + return std::find(formatArray.begin(), formatArray.end(), fmtStr) != formatArray.end(); + } + + tvConfigContext_t AVOutputTV::getValidContextFromGetParameters(const JsonObject& parameters, const std::string& paramName) + { + tvConfigContext_t validContext = {PQ_MODE_INVALID, VIDEO_FORMAT_NONE, VIDEO_SOURCE_ALL}; + // Picture Mode + std::string pictureModeStr; + //"Current", empty string, or missing key as a cue to fetch system values + if (!parameters.HasLabel("pictureMode") || + (pictureModeStr = parameters["pictureMode"].String()).empty() || + pictureModeStr == "Current") + { + char picMode[PIC_MODE_NAME_MAX] = {0}; + getCurrentPictureMode(picMode); + std::string pictureModeStr(picMode); + LOGINFO("Current Picture Mode: %s", picMode); + validContext.pq_mode = static_cast( + pqModeReverseMap.count(pictureModeStr) ? pqModeReverseMap.at(pictureModeStr) : PQ_MODE_INVALID + ); + } + else + { + validContext.pq_mode = static_cast( + pqModeReverseMap.count(pictureModeStr) ? pqModeReverseMap.at(pictureModeStr) : PQ_MODE_INVALID + ); + } + // Video Format + std::string videoFormatStr; + if (!parameters.HasLabel("videoFormat") || + (videoFormatStr = parameters["videoFormat"].String()).empty() || + videoFormatStr == "Current") + { + GetCurrentVideoFormat(&validContext.videoFormatType); + } + else + { + validContext.videoFormatType = static_cast( + videoFormatReverseMap.count(videoFormatStr) ? videoFormatReverseMap.at(videoFormatStr) : VIDEO_FORMAT_NONE + ); + } + // Video Source + std::string videoSourceStr; + if (!parameters.HasLabel("videoSource") || + (videoSourceStr = parameters["videoSource"].String()).empty() || + videoSourceStr == "Current") + { + GetCurrentVideoSource(&validContext.videoSrcType); + } + else + { + validContext.videoSrcType = static_cast( + videoSrcReverseMap.count(videoSourceStr) ? videoSrcReverseMap.at(videoSourceStr) : VIDEO_SOURCE_ALL + ); + } + tvContextCaps_t* caps = getCapsForParam(paramName); + LOGINFO("Looking for context: PQMode=%d, Format=%d, Source=%d", + validContext.pq_mode, validContext.videoFormatType, validContext.videoSrcType); + // Match context if caps exist + if (caps && caps->num_contexts > 0) { + for (size_t i = 0; i < caps->num_contexts; ++i) { + const tvConfigContext_t& available = caps->contexts[i]; +#if DEBUG + LOGINFO("Context[%zu]: PQMode=%d, Format=%d, Source=%d", i, + available.pq_mode, available.videoFormatType, available.videoSrcType); +#endif + if (available.videoSrcType == validContext.videoSrcType && + available.videoFormatType == validContext.videoFormatType && + available.pq_mode == validContext.pq_mode) { + return available; // valid context found + } } + } + LOGWARN("No valid context found for %s with provided parameters", paramName.c_str()); + validContext = {PQ_MODE_INVALID, VIDEO_FORMAT_NONE, VIDEO_SOURCE_ALL}; + return validContext; + } - if(param == "CustomWhiteBalance") - { - configString = param + ".color"; - info.color = inFile.Get(configString); + bool AVOutputTV::isGlobalParam(const JsonArray& arr) { + return (arr.Length() == 0) || + (arr.Length() == 1 && ( + arr[0].String() == "Global" || arr[0].String() == "none")); + } - configString = param + ".control"; - info.control = inFile.Get(configString); + std::vector AVOutputTV::extractPQModes(const JsonObject& parameters) { + initializeReverseMaps(); - } + std::vector pqModes; + if (!parameters.HasLabel("pictureMode")) { + return pqModes; + } - if ((param == "DolbyVisionMode") || (param == "Backlight") || (param == "CMS") || (param == "CustomWhiteBalance") || (param == "HDRMode") || (param == "BacklightControl")) { - configString = param + ".platformsupport"; - info.isPlatformSupport = inFile.Get(configString); - printf(" platformsupport : %s\n",info.isPlatformSupport.c_str() ); + JsonArray pqmodeArray = parameters["pictureMode"].Array(); + pqModes.reserve(pqmodeArray.Length()); // Pre-allocate + + for (uint32_t i = 0; i < pqmodeArray.Length(); ++i) { + std::string modeStr = pqmodeArray[i].String(); + + if (modeStr == "Current") { + char picMode[PIC_MODE_NAME_MAX] = {0}; + if (getCurrentPictureMode(picMode)) { + auto it = pqModeReverseMap.find(std::string(picMode)); + if (it != pqModeReverseMap.end()) { + pqModes.push_back(it->second); + } + } + } else { + auto it = pqModeReverseMap.find(modeStr); + if (it != pqModeReverseMap.end()) { + pqModes.push_back(it->second); + } } + } + return pqModes; + } - if ( (param == "ColorTemperature") || (param == "DimmingMode") || - ( param == "BacklightControl") || (param == "DolbyVisionMode") || - (param == "HDR10Mode") || (param == "HLGMode") || (param == "AspectRatio") || - (param == "PictureMode") || (param == "VideoSource") || (param == "VideoFormat") || - (param == "VideoFrameRate") || (param == "HDRMode") ) { - configString = param + ".range"; - info.range = inFile.Get(configString); - printf(" String Range info : %s\n",info.range.c_str() ); - } else if ( (param == "CMS" )) { - configString.clear(); - configString = param + ".range_Saturation_from"; - info.range = inFile.Get(configString); - configString = param + ".range_Saturation_to"; - info.range += ","+inFile.Get(configString); + std::vector AVOutputTV::extractVideoSources(const JsonObject& parameters) { + initializeReverseMaps(); - configString = param + ".range_Hue_from"; - info.range += ","+inFile.Get(configString); - configString = param + ".range_Hue_to"; - info.range += ","+inFile.Get(configString); + std::vector sources; + if (!parameters.HasLabel("videoSource")) { + return sources; + } - configString = param + ".range_Luma_from"; - info.range += ","+inFile.Get(configString); - configString = param + ".range_Luma_to"; - info.range += ","+inFile.Get(configString); - } else if ( (param == "CustomWhiteBalance")) { - configString = param + ".range_Gain_from"; - info.range = inFile.Get(configString); - configString = param + ".range_Gain_to"; - info.range += ","+inFile.Get(configString); + JsonArray sourceArray = parameters["videoSource"].Array(); + sources.reserve(sourceArray.Length()); // Pre-allocate - configString = param + ".range_Offset_from"; - info.range += ","+inFile.Get(configString); - configString = param + ".range_Offset_to"; - info.range += ","+inFile.Get(configString); + for (uint32_t i = 0; i < sourceArray.Length(); ++i) { + std::string srcStr = sourceArray[i].String(); + + if (srcStr == "Current") { + tvVideoSrcType_t sourceIndex = VIDEO_SOURCE_IP; + if (GetCurrentVideoSource(&sourceIndex) == tvERROR_NONE) { + sources.push_back(sourceIndex); + } } else { - configString = param + ".range_from"; - info.range = inFile.Get(configString); - configString = param + ".range_to"; - info.range += ","+inFile.Get(configString); - printf(" Integer Range Info : %s\n",info.range.c_str() ); + auto it = videoSrcReverseMap.find(srcStr); + if (it != videoSrcReverseMap.end()) { + sources.push_back(it->second); + } } + } + return sources; + } - if ((param == "VideoSource") || (param == "PictureMode") || (param == "VideoFormat") ) { + std::vector AVOutputTV::extractVideoFormats(const JsonObject& parameters) { + initializeReverseMaps(); + + std::vector formats; + if (!parameters.HasLabel("videoFormat")) { + return formats; + } + + JsonArray formatArray = parameters["videoFormat"].Array(); + formats.reserve(formatArray.Length()); // Pre-allocate + + for (uint32_t i = 0; i < formatArray.Length(); ++i) { + std::string fmtStr = formatArray[i].String(); + + if (fmtStr == "Current") { + tvVideoFormatType_t formatIndex = VIDEO_FORMAT_NONE; + GetCurrentVideoFormat(&formatIndex); + if (formatIndex == VIDEO_FORMAT_NONE) { + formatIndex = VIDEO_FORMAT_SDR; + } + formats.push_back(formatIndex); + } else { + auto it = videoFormatReverseMap.find(fmtStr); + if (it != videoFormatReverseMap.end()) { + formats.push_back(it->second); + } + } + } + return formats; + } + + JsonArray AVOutputTV::getJsonArrayIfArray(const JsonObject& obj, const std::string& key) { + return (obj.HasLabel(key.c_str()) && obj[key.c_str()].Content() == JsonValue::type::ARRAY) + ? obj[key.c_str()].Array() + : JsonArray(); // returns empty array + } + + tvContextCaps_t* AVOutputTV::getCapsForParam(const std::string& paramName) + { + tvContextCaps_t* caps = nullptr; + if (paramName == "Backlight") caps = m_backlightCaps; + else if (paramName == "Brightness") caps = m_brightnessCaps; + else if (paramName == "Contrast") caps = m_contrastCaps; + else if (paramName == "Sharpness") caps = m_sharpnessCaps; + else if (paramName == "Saturation") caps = m_saturationCaps; + else if (paramName == "Hue") caps = m_hueCaps; + else if (paramName == "ColorTemp") caps = m_colortempCaps; + else if (paramName == "DimmingMode") caps = m_dimmingModeCaps; + else if (paramName == "PictureMode") caps = m_pictureModeCaps; + else if (paramName == "AspectRatio") caps = m_aspectRatioCaps; + else if (paramName == "LowLatencyState") caps = m_lowLatencyStateCaps; + else if (paramName == "PrecisionDetail") caps = m_precisionDetailCaps; + else if (paramName == "LocalContrastEnhancement") caps = m_localContrastEnhancementCaps; + else if (paramName == "MPEGNoiseReduction") caps = m_MPEGNoiseReductionCaps; + else if (paramName == "DigitalNoiseReduction") caps = m_digitalNoiseReductionCaps; + else if (paramName == "AISuperResolution") caps = m_AISuperResolutionCaps; + else if (paramName == "MEMC") caps = m_MEMCCaps; + else if (paramName == "BacklightMode") caps = m_backlightModeCaps; + else if (paramName == "CMS") caps = m_cmsCaps; + else { + LOGERR("Unknown ParamName: %s", paramName.c_str()); + return nullptr; + } + // Fallback to global pictureModeCaps if cap is empty + if (!caps || caps->num_contexts == 0) + caps = m_pictureModeCaps; + + return caps; + } + std::string AVOutputTV::getCurrentPictureModeAsString() { + char picMode[PIC_MODE_NAME_MAX] = {0}; + if (!getCurrentPictureMode(picMode)) { + LOGERR("Failed to get current picture mode"); + return ""; + } + return picMode; + } + + std::string AVOutputTV::getCurrentVideoSourceAsString() { + tvVideoSrcType_t sourceIndex = VIDEO_SOURCE_IP; + if (GetCurrentVideoSource(&sourceIndex) != tvERROR_NONE) { + LOGERR("GetCurrentVideoSource failed"); + return ""; + } + return convertSourceIndexToStringV2(sourceIndex); + } + + std::string AVOutputTV::getCurrentVideoFormatAsString() { + tvVideoFormatType_t formatIndex = VIDEO_FORMAT_NONE; + if (GetCurrentVideoFormat(&formatIndex) != tvERROR_NONE || formatIndex == VIDEO_FORMAT_NONE) { + formatIndex = VIDEO_FORMAT_SDR; + } + return convertVideoFormatToStringV2(formatIndex); + } + + bool AVOutputTV::isSetRequiredForParam(const JsonObject& parameters, const std::string& paramName) + { + // Get current state once + const std::string curPicMode = getCurrentPictureModeAsString(); + const std::string curSource = getCurrentVideoSourceAsString(); + const std::string curFormat = getCurrentVideoFormatAsString(); + + // Helper to resolve a parameter to a list of effective values + auto resolveParam = [&](const std::string& label, const std::string& currentValue) -> std::vector { + std::vector result; + + if (!parameters.HasLabel(label.c_str())){ + result.push_back(currentValue); + return result; + } + + const auto& array = parameters[label.c_str()].Array(); + if (array.Length() == 0){ + result.push_back(currentValue); + return result; + } + + for (uint16_t i = 0; i < array.Length(); ++i) { + const std::string val = array[i].String(); + if (val == "Current" || val == "Global" || val == "none") { + result.push_back(currentValue); + } else { + result.push_back(val); + } + } + return result; + }; + + // Resolve all + const auto resolvedPicModes = resolveParam("pictureMode", curPicMode); + const auto resolvedFormats = resolveParam("videoFormat", curFormat); + const auto resolvedSources = resolveParam("videoSource", curSource); +#if DEBUG + // Helper function to log vector content + auto logResolvedValues = [&](const std::string& label, const std::vector& values) { + std::string joined; + for (const auto& val : values) { + if (!joined.empty()) joined += ", "; + joined += val; + } + LOGINFO("Resolved %s: [%s]", label.c_str(), joined.c_str()); + }; + + // Debug logs + logResolvedValues("pictureMode", resolvedPicModes); + logResolvedValues("videoSource", resolvedSources); + logResolvedValues("videoFormat", resolvedFormats); +#endif + + // Check if current combination exists in resolved sets + for (const auto& pm : resolvedPicModes) { + if (pm != curPicMode) continue; + + for (const auto& fmt : resolvedFormats) { + if (fmt != curFormat) continue; + + for (const auto& src : resolvedSources) { + if (src == curSource) { + tvContextCaps_t* caps = getCapsForParam(paramName); + if (!caps) { + LOGERR("No caps found for param: %s", paramName.c_str()); + return false; + } + for (size_t i = 0; i < caps->num_contexts; ++i) { + const tvConfigContext_t& ctx = caps->contexts[i]; + std::string capPicMode = convertPictureIndexToStringV2(ctx.pq_mode); + std::string capSource = convertSourceIndexToStringV2(ctx.videoSrcType); + std::string capFormat = convertVideoFormatToStringV2(ctx.videoFormatType); + if ((capPicMode == curPicMode) && + (capSource == curSource) && + (capFormat == curFormat)) + { + // Log the matched combination + LOGINFO("isSetRequiredForParam: matched combination - pictureMode: %s, videoFormat: %s, videoSource: %s", + pm.c_str(), fmt.c_str(), src.c_str()); + return true; + } + } + } + } + } + } + + return false; + } + std::string AVOutputTV::getCMSNameFromEnum(tvDataComponentColor_t colorEnum) + { + switch (colorEnum) { + case tvDataColor_RED: return "Red"; + case tvDataColor_GREEN: return "Green"; + case tvDataColor_BLUE: return "Blue"; + case tvDataColor_CYAN: return "Cyan"; + case tvDataColor_YELLOW: return "Yellow"; + case tvDataColor_MAGENTA: return "Magenta"; + default: return "Unknown"; + } + } + std::vector AVOutputTV::getValidContextsFromParameters(const JsonObject& parameters, const std::string& tr181ParamName) + { + std::vector validContexts; + tvContextCaps_t* caps = getCapsForParam(tr181ParamName); + + if (caps == nullptr || caps->contexts == nullptr) { + LOGWARN("Caps or contexts is null for parameter: %s", tr181ParamName.c_str()); + return validContexts; + } + + // Create a hash set of available contexts for O(1) lookup instead of O(n) linear search + std::unordered_set availableContextsSet; + for (size_t i = 0; i < caps->num_contexts; ++i) { + const auto& ctx = caps->contexts[i]; + std::string key = std::to_string(ctx.pq_mode) + "_" + + std::to_string(ctx.videoFormatType) + "_" + + std::to_string(ctx.videoSrcType); + availableContextsSet.insert(key); + } + + JsonArray pqmodeArray = getJsonArrayIfArray(parameters, "pictureMode"); + JsonArray sourceArray = getJsonArrayIfArray(parameters, "videoSource"); + JsonArray formatArray = getJsonArrayIfArray(parameters, "videoFormat"); + + std::vector pqModes = extractPQModes(parameters); + std::vector sources = extractVideoSources(parameters); + std::vector formats = extractVideoFormats(parameters); + + // Handle global parameters - collect unique values to avoid duplicates + std::unordered_set pqModeSet(pqModes.begin(), pqModes.end()); + std::unordered_set sourceSet(sources.begin(), sources.end()); + std::unordered_set formatSet(formats.begin(), formats.end()); + + if (isGlobalParam(pqmodeArray)) { + for (size_t i = 0; i < caps->num_contexts; ++i) { + pqModeSet.insert(caps->contexts[i].pq_mode); + } + } + if (isGlobalParam(sourceArray)) { + for (size_t i = 0; i < caps->num_contexts; ++i) { + sourceSet.insert(caps->contexts[i].videoSrcType); + } + } + if (isGlobalParam(formatArray)) { + for (size_t i = 0; i < caps->num_contexts; ++i) { + formatSet.insert(caps->contexts[i].videoFormatType); + } + } + + if (pqModeSet.empty() || sourceSet.empty() || formatSet.empty()) { + LOGWARN("One or more parameter sets are empty: PQModes[%zu], Sources[%zu], Formats[%zu]", + pqModeSet.size(), sourceSet.size(), formatSet.size()); + return validContexts; + } + + std::unordered_set seenContexts; + validContexts.reserve(pqModeSet.size() * sourceSet.size() * formatSet.size()); // Pre-allocate memory + + // Generate contexts and check validity in single pass + for (const auto& pq : pqModeSet) { + for (const auto& fmt : formatSet) { + for (const auto& src : sourceSet) { + std::string contextKey = std::to_string(pq) + "_" + + std::to_string(fmt) + "_" + + std::to_string(src); + + if (seenContexts.find(contextKey) != seenContexts.end()) { + continue; + } + + if (availableContextsSet.find(contextKey) != availableContextsSet.end()) { + tvConfigContext_t testCtx = { pq, fmt, src }; + validContexts.push_back(testCtx); + seenContexts.insert(contextKey); + } + } + } + } + + // Sort only if we have results to sort + if (!validContexts.empty()) { + std::sort(validContexts.begin(), validContexts.end(), + [](const tvConfigContext_t& a, const tvConfigContext_t& b) { + return std::tie(a.pq_mode, a.videoFormatType, a.videoSrcType) < + std::tie(b.pq_mode, b.videoFormatType, b.videoSrcType); + }); + } + + return validContexts; + } + + int AVOutputTV::updateAVoutputTVParamV2(std::string action, std::string tr181ParamName, + const JsonObject& parameters, + tvPQParameterIndex_t pqParamIndex,int level) + { +#if DEBUG + LOGINFO("Entry %s: Action: %s, Param: %s, Level: %d", __FUNCTION__, action.c_str(), tr181ParamName.c_str(), level); +#endif + int ret = 0; + const bool isSet = (action == "set"); + const bool isReset = (action == "reset"); + const bool isSync = (action == "sync"); + + std::vector validContexts = getValidContextsFromParameters(parameters, tr181ParamName); + LOGINFO("%s: Number of validContexts = %zu", __FUNCTION__, validContexts.size()); +#if DEBUG + for (const auto& ctx : validContexts) { + + std::string pqStr = pqModeMap.count(ctx.pq_mode) ? pqModeMap.at(ctx.pq_mode) : std::to_string(ctx.pq_mode); + std::string fmtStr = videoFormatMap.count(ctx.videoFormatType) ? videoFormatMap.at(ctx.videoFormatType) : std::to_string(ctx.videoFormatType); + std::string srcStr = videoSrcMap.count(ctx.videoSrcType) ? videoSrcMap.at(ctx.videoSrcType) : std::to_string(ctx.videoSrcType); + LOGINFO("Valid Context - PQMode: %s, Format: %s, Source: %s", pqStr.c_str(), fmtStr.c_str(), srcStr.c_str()); + } +#endif + if (validContexts.empty()) { + LOGWARN("%s: No valid contexts found for parameters", __FUNCTION__); + return (int)tvERROR_GENERAL; + } + if (tr181ParamName == "CMS") { + JsonArray colorArray = getJsonArrayIfArray(parameters, "color"); + JsonArray componentArray = getJsonArrayIfArray(parameters, "component"); + + std::vector colors, components; + + for (size_t i = 0; i < colorArray.Length(); ++i) + colors.emplace_back(colorArray[i].String()); + + for (size_t i = 0; i < componentArray.Length(); ++i) + components.emplace_back(componentArray[i].String()); + + if (colors.empty()) colors.push_back("Global"); + if (components.empty()) components.push_back("Global"); + + if (colors.size() == 1 && colors[0] == "Global") + colors = m_cmsColorList; + + if (components.size() == 1 && components[0] == "Global") + components = m_cmsComponentList; + + for (const auto& ctx : validContexts) { + for (const auto& colorStr : colors) { + for (const auto& componentStr : components) { +#if DEBUG + LOGINFO("%s: Processing Color: %s, Component: %s", __FUNCTION__, colorStr.c_str(), componentStr.c_str()); +#endif + tvPQParameterIndex_t pqIndex; + if (convertCMSParamToPQEnum(componentStr, colorStr, pqIndex) != 0) { + LOGERR("%s: convertCMSParamToPQEnum failed for color: %s, component: %s", + __FUNCTION__, colorStr.c_str(), componentStr.c_str()); + ret |= 1; + continue; + } + tvDataComponentColor_t colorValue = tvDataColor_NONE; + if ( getCMSColorEnumFromString(colorStr, colorValue ) == -1 ) { + LOGERR("%s : getCMSColorEnumFromString failed for color: %s", __FUNCTION__, colorStr.c_str()); + ret |= 2; + continue; + } + tvComponentType_t componentValue; + if ( getCMSComponentEnumFromString(componentStr, componentValue ) == -1 ) { + LOGERR("%s : getCMSComponentEnumFromString failed for component: %s", __FUNCTION__, componentStr.c_str()); + ret |= 4; + continue; + } + if (std::find(m_cmsColorList.begin(), m_cmsColorList.end(), colorStr) == m_cmsColorList.end()) { + LOGERR("%s: Color '%s' is not supported as per capabilities", __FUNCTION__, colorStr.c_str()); + ret |= 8; + continue; + } + if (std::find(m_cmsComponentList.begin(), m_cmsComponentList.end(), componentStr) == m_cmsComponentList.end()) { + LOGERR("%s: Component '%s' is not supported as per capabilities", __FUNCTION__, componentStr.c_str()); + ret |= 16; + continue; + } + paramIndex_t paramIndex; + paramIndex.sourceIndex = static_cast(ctx.videoSrcType); + paramIndex.pqmodeIndex = static_cast(ctx.pq_mode); + paramIndex.formatIndex = static_cast(ctx.videoFormatType); + paramIndex.componentIndex = static_cast(componentValue); + paramIndex.colorIndex = static_cast(colorValue); + paramIndex.colorTempIndex = 0; + paramIndex.controlIndex = 0; + + int value = 0; + if (isReset) { + ret |= updateAVoutputTVParamToHAL(tr181ParamName, paramIndex, 0, false); + level = 0; + } + + if (isSync || isReset) { + if (getLocalparam(tr181ParamName, paramIndex, value, pqIndex, isSync) == 0) { + level = value; + } else { + LOGWARN("%s: Skipping sync for color: %s, component: %s", + __FUNCTION__, colorStr.c_str(), componentStr.c_str()); + continue; + } + } + ret |= SaveCMS(static_cast(paramIndex.sourceIndex), + paramIndex.pqmodeIndex, + static_cast(paramIndex.formatIndex), + static_cast(paramIndex.componentIndex), + static_cast(paramIndex.colorIndex), + level); + + if (isSet) { + ret |= updateAVoutputTVParamToHAL(tr181ParamName, paramIndex, level, true); + } + } + } + } + LOGINFO("Exit: %s, Return Value: %d", __FUNCTION__, ret); + return (ret < 0) ? -1 : 0; + } + for (const auto& ctx : validContexts) + { + paramIndex_t paramIndex { + .sourceIndex = static_cast(ctx.videoSrcType), + .pqmodeIndex = static_cast(ctx.pq_mode), + .formatIndex = static_cast(ctx.videoFormatType) + }; + std::string pqStr = pqModeMap.count(ctx.pq_mode) ? pqModeMap.at(ctx.pq_mode) : std::to_string(ctx.pq_mode); + std::string fmtStr = videoFormatMap.count(ctx.videoFormatType) ? videoFormatMap.at(ctx.videoFormatType) : std::to_string(ctx.videoFormatType); + std::string srcStr = videoSrcMap.count(ctx.videoSrcType) ? videoSrcMap.at(ctx.videoSrcType) : std::to_string(ctx.videoSrcType); + + if (isSet) + { + ret |= updateAVoutputTVParamToHALV2(tr181ParamName, paramIndex, level, true); + } + else + { + if (isReset) + { + ret |= updateAVoutputTVParamToHALV2(tr181ParamName, paramIndex, level, false); + } + if(getLocalparam(tr181ParamName,paramIndex,level,pqParamIndex,isSync)) + { + continue; + } + } + switch (pqParamIndex) + { + case PQ_PARAM_BRIGHTNESS: + ret |= SaveBrightness((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); + break; + case PQ_PARAM_CONTRAST: + ret |= SaveContrast((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); + break; + case PQ_PARAM_SHARPNESS: + ret |= SaveSharpness((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); + break; + case PQ_PARAM_HUE: + ret |= SaveHue((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); + break; + case PQ_PARAM_SATURATION: + ret |= SaveSaturation((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); + break; + case PQ_PARAM_COLOR_TEMPERATURE: + ret |= SaveColorTemperature((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvColorTemp_t)level); + break; + case PQ_PARAM_BACKLIGHT: + ret |= SaveBacklight((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); + break; + case PQ_PARAM_DIMMINGMODE: + ret |= SaveTVDimmingMode((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvDimmingMode_t)level); + break; + case PQ_PARAM_LOWLATENCY_STATE: + ret |= SaveLowLatencyState((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,level); + break; + case PQ_PARAM_DOLBY_MODE: + ret |= SaveTVDolbyVisionMode((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvDolbyMode_t)level); + break; + case PQ_PARAM_ASPECT_RATIO: + ret |= SaveAspectRatio((tvVideoSrcType_t)paramIndex.sourceIndex, paramIndex.pqmodeIndex,(tvVideoFormatType_t)paramIndex.formatIndex,(tvDisplayMode_t)level); + break; + case PQ_PARAM_PRECISION_DETAIL: + #if HAL_NOT_READY + #else + ret |= SetPrecisionDetail((tvVideoSrcType_t)paramIndex.sourceIndex, + (tvPQModeIndex_t)paramIndex.pqmodeIndex, + (tvVideoFormatType_t)paramIndex.formatIndex, + level); + #endif + break; + + case PQ_PARAM_LOCAL_CONTRAST_ENHANCEMENT: + #if HAL_NOT_READY + #else + #if ENABLE_PQ_PARAM + ret |= SetLocalContrastEnhancement((tvVideoSrcType_t)paramIndex.sourceIndex, + (tvPQModeIndex_t)paramIndex.pqmodeIndex, + (tvVideoFormatType_t)paramIndex.formatIndex, + level); + #endif + #endif + break; + + case PQ_PARAM_MPEG_NOISE_REDUCTION: + #if HAL_NOT_READY + #else + #if ENABLE_PQ_PARAM + ret |= SetMPEGNoiseReduction((tvVideoSrcType_t)paramIndex.sourceIndex, + (tvPQModeIndex_t)paramIndex.pqmodeIndex, + (tvVideoFormatType_t)paramIndex.formatIndex, + level); + #endif + #endif + break; + + case PQ_PARAM_DIGITAL_NOISE_REDUCTION: + #if HAL_NOT_READY + #else + #if ENABLE_PQ_PARAM + ret |= SetDigitalNoiseReduction((tvVideoSrcType_t)paramIndex.sourceIndex, + (tvPQModeIndex_t)paramIndex.pqmodeIndex, + (tvVideoFormatType_t)paramIndex.formatIndex, + level); + #endif + #endif + break; + + case PQ_PARAM_AI_SUPER_RESOLUTION: + #if HAL_NOT_READY + #else + ret |= SetAISuperResolution((tvVideoSrcType_t)paramIndex.sourceIndex, + (tvPQModeIndex_t)paramIndex.pqmodeIndex, + (tvVideoFormatType_t)paramIndex.formatIndex, + level); + #endif + break; + + case PQ_PARAM_MEMC: + #if HAL_NOT_READY + #else + ret |= SetMEMC((tvVideoSrcType_t)paramIndex.sourceIndex, + (tvPQModeIndex_t)paramIndex.pqmodeIndex, + (tvVideoFormatType_t)paramIndex.formatIndex, + level); + #endif + break; + #if HAL_NOT_READY + #else + case PQ_PARAM_BACKLIGHT_MODE: + ret |= SaveBacklightMode((tvVideoSrcType_t)paramIndex.sourceIndex, + (tvPQModeIndex_t)paramIndex.pqmodeIndex, + (tvVideoFormatType_t)paramIndex.formatIndex, + static_cast(level)); + #endif + break; + case PQ_PARAM_HDR10_MODE: + case PQ_PARAM_HLG_MODE: + case PQ_PARAM_LDIM: + case PQ_PARAM_LOCALDIMMING_LEVEL: + + case PQ_PARAM_WB_GAIN_RED: + case PQ_PARAM_WB_GAIN_GREEN: + case PQ_PARAM_WB_GAIN_BLUE: + case PQ_PARAM_WB_OFFSET_RED: + case PQ_PARAM_WB_OFFSET_GREEN: + case PQ_PARAM_WB_OFFSET_BLUE: + // TODO: Add implementation + break; + + default: + // Prevent compiler warning for unhandled enums + LOGWARN("Unhandled PQ parameter index: %d", pqParamIndex); + break; + } + } + LOGINFO("Exit: %s, Return Value: %d", __FUNCTION__, ret); + return ret; + } + +tvError_t AVOutputTV::ReadJsonFile(JsonObject& root) { + std::ifstream file(CAPABLITY_FILE_NAMEV2); + if (!file.is_open()) { + LOGWARN("AVOutputPlugins: %s: Unable to open file", __FUNCTION__); + return tvERROR_GENERAL; + } + + std::string jsonStr((std::istreambuf_iterator(file)), std::istreambuf_iterator()); + file.close(); + + if (!root.FromString(jsonStr)) { + LOGWARN("AVOutputPlugins: %s: JSON parsing failed", __FUNCTION__); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + +tvError_t AVOutputTV::ExtractRangeInfo(const JsonObject& data, int* max_value) { + if (!data.HasLabel("rangeInfo")) { + LOGWARN("AVOutputPlugins: %s: 'rangeInfo' not available", __FUNCTION__); + return tvERROR_NONE; + } + + JsonObject rangeInfo = data["rangeInfo"].Object(); + if (rangeInfo.HasLabel("to")) { + if (!max_value) { + LOGWARN("AVOutputPlugins: %s: NULL input param max_value", __FUNCTION__); + return tvERROR_INVALID_PARAM; + } + *max_value = rangeInfo["to"].Number(); + return tvERROR_NONE; + } + + LOGWARN("AVOutputPlugins: %s: Invalid 'rangeInfo' format", __FUNCTION__); + return tvERROR_GENERAL; +} + +tvError_t AVOutputTV::ExtractContextCaps(const JsonObject& data, tvContextCaps_t** context_caps) { + if (!context_caps) { + LOGWARN("AVOutputPlugins: %s: NULL input param", __FUNCTION__); + return tvERROR_INVALID_PARAM; + } + + if (!data.HasLabel("context")) { + LOGWARN("AVOutputPlugins: %s: 'context' missing", __FUNCTION__); + return tvERROR_GENERAL; + } + + JsonObject context = data["context"].Object(); + if (!context.IsSet()) { + LOGWARN("AVOutputPlugins: %s: Context is not set", __FUNCTION__); + return tvERROR_GENERAL; + } + + std::vector contexts = ParseContextCaps(context); + *context_caps = AllocateContextCaps(contexts); + if (!*context_caps) { + LOGWARN("AVOutputPlugins: %s: Memory allocation failed", __FUNCTION__); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} +template +bool LookupEnum(const std::string& str, const std::map& map, EnumType& outEnum) { + for (const auto& entry : map) { + if (entry.second == str) { + outEnum = static_cast(entry.first); + return true; + } + } + return false; +} + +std::vector AVOutputTV::ParseContextCaps(const JsonObject& context) +{ + std::vector contexts; + std::set> seen; + + WPEFramework::Core::JSON::VariantContainer::Iterator modeIterator = context.Variants(); + while (modeIterator.Next()) { + std::string modeStr = modeIterator.Label(); + + tvPQModeIndex_t modeEnum; + if (!LookupEnum(modeStr, pqModeMap, modeEnum)) continue; + + const auto& modeValue = context[modeStr.c_str()]; + if (!modeValue.IsSet() || modeValue.Content() != WPEFramework::Core::JSON::Variant::type::OBJECT) continue; + + JsonObject formatMap = modeValue.Object(); + WPEFramework::Core::JSON::VariantContainer::Iterator formatIterator = formatMap.Variants(); + while (formatIterator.Next()) { + std::string formatStr = formatIterator.Label(); + + tvVideoFormatType_t fmtEnum; + if (!LookupEnum(formatStr, videoFormatMap, fmtEnum)) continue; + + const auto& formatValue = formatMap[formatStr.c_str()]; + if (!formatValue.IsSet() || formatValue.Content() != WPEFramework::Core::JSON::Variant::type::ARRAY) continue; + + JsonArray sources = formatValue.Array(); + for (uint32_t i = 0; i < sources.Length(); ++i) { + std::string srcStr = sources[i].String(); + + tvVideoSrcType_t srcEnum; + if (!LookupEnum(srcStr, videoSrcMap, srcEnum)) continue; + + auto triplet = std::make_tuple(modeEnum, fmtEnum, srcEnum); + if (seen.find(triplet) == seen.end()) { + contexts.push_back({modeEnum, fmtEnum, srcEnum}); + seen.insert(triplet); + } + } + } + } + + return contexts; +} + +tvContextCaps_t* AVOutputTV::AllocateContextCaps(const std::vector& contexts) { + tvContextCaps_t* context_caps = new (std::nothrow) tvContextCaps_t; + if (!context_caps) { + return nullptr; + } + + context_caps->num_contexts = contexts.size(); + context_caps->contexts = contexts.empty() ? nullptr : new (std::nothrow) tvConfigContext_t[contexts.size()]; + + if (!contexts.empty() && !context_caps->contexts) { + delete context_caps; + return nullptr; + } + + if (!contexts.empty()) { + std::copy(contexts.begin(), contexts.end(), context_caps->contexts); + } + return context_caps; +} + +tvError_t AVOutputTV::GetCaps(const std::string& key, int* max_value, tvContextCaps_t** context_caps) { + LOGINFO("Entry\n"); + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + if (!root.HasLabel(key.c_str())) { + LOGWARN("AVOutputPlugins: %s: Missing '%s' label", __FUNCTION__, key.c_str()); + return tvERROR_GENERAL; + } + + JsonObject data = root[key.c_str()].Object(); + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) { + LOGWARN("AVOutputPlugins: %s: Platform support is false", __FUNCTION__); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + if (ExtractRangeInfo(data, max_value) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + +tvError_t AVOutputTV::GetDVCalibrationCaps(tvDVCalibrationSettings_t **min_values, tvDVCalibrationSettings_t **max_values, tvContextCaps_t **context_caps) { + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + if (!root.HasLabel("DolbyVisionCalibration")) { + LOGWARN("AVOutputPlugins: %s: Missing 'DolbyVisionCalibration' label", __FUNCTION__); + return tvERROR_GENERAL; + } + + JsonObject data = root["DolbyVisionCalibration"].Object(); + *min_values = new tvDVCalibrationSettings_t(); + *max_values = new tvDVCalibrationSettings_t(); + + std::map keyMap = { + {"Tmax", &tvDVCalibrationSettings_t::Tmax}, + {"Tmin", &tvDVCalibrationSettings_t::Tmin}, + {"Tgamma", &tvDVCalibrationSettings_t::Tgamma}, + {"Rx", &tvDVCalibrationSettings_t::Rx}, + {"Ry", &tvDVCalibrationSettings_t::Ry}, + {"Gx", &tvDVCalibrationSettings_t::Gx}, + {"Gy", &tvDVCalibrationSettings_t::Gy}, + {"Bx", &tvDVCalibrationSettings_t::Bx}, + {"By", &tvDVCalibrationSettings_t::By}, + {"Wx", &tvDVCalibrationSettings_t::Wx}, + {"Wy", &tvDVCalibrationSettings_t::Wy} + }; + + for (auto it = keyMap.begin(); it != keyMap.end(); ++it) { + const std::string& key = it->first; + double tvDVCalibrationSettings_t::*member = it->second; + std::string minKey = "range" + key; + if (data.HasLabel(minKey.c_str())) { + JsonObject range = data[minKey.c_str()].Object(); + (*min_values)->*member = range["from"].Number(); + (*max_values)->*member = range["to"].Number(); + } + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + return tvERROR_NONE; +} + +tvError_t AVOutputTV::GetBacklightModeCaps(tvBacklightMode_t** backlight_mode, size_t* num_backlight_mode, tvContextCaps_t** context_caps) +{ + LOGINFO("Entry\n"); + + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + std::string key = "BacklightMode"; + if (!root.HasLabel(key.c_str())) { + LOGWARN("AVOutputPlugins: %s: Missing '%s' label", __FUNCTION__, key.c_str()); + return tvERROR_GENERAL; + } + + JsonObject data = root[key.c_str()].Object(); + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) { + LOGWARN("AVOutputPlugins: %s: Platform support is false", __FUNCTION__); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject rangeInfo = data["rangeInfo"].Object(); + JsonArray optionsArray = rangeInfo["options"].Array(); + + *num_backlight_mode = optionsArray.Length(); + *backlight_mode = static_cast(malloc(*num_backlight_mode * sizeof(tvBacklightMode_t))); + if (!(*backlight_mode)) { + return tvERROR_GENERAL; + } + + for (size_t i = 0; i < *num_backlight_mode; ++i) { + std::string modeStr = optionsArray[i].String(); + auto it = backlightModeReverseMap.find(modeStr); + if (it != backlightModeReverseMap.end()) { + (*backlight_mode)[i] = static_cast (it->second); + } else { + (*backlight_mode)[i] = tvBacklightMode_INVALID; + } + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + free(*backlight_mode); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + +// LocalContrastEnhancement +tvError_t AVOutputTV::GetLocalContrastEnhancementCaps(int * maxLocalContrastEnhancement, tvContextCaps_t ** context_caps) { + return GetCaps("LocalContrastEnhancement", maxLocalContrastEnhancement, context_caps); +} + +// MPEGNoiseReduction +tvError_t AVOutputTV::GetMPEGNoiseReductionCaps(int * maxMPEGNoiseReduction, tvContextCaps_t ** context_caps) { + return GetCaps("MPEGNoiseReduction", maxMPEGNoiseReduction, context_caps); +} + +// DigitalNoiseReduction +tvError_t AVOutputTV::GetDigitalNoiseReductionCaps(int * maxDigitalNoiseReduction, tvContextCaps_t ** context_caps) { + return GetCaps("DigitalNoiseReduction", maxDigitalNoiseReduction, context_caps); +} + +tvError_t AVOutputTV::GetMultiPointWBCaps(int* num_hal_matrix_points, + int* rgb_min, + int* rgb_max, + int* num_ui_matrix_points, + double** ui_matrix_positions, + tvContextCaps_t** context_caps) +{ + if (!num_hal_matrix_points || !rgb_min || !rgb_max || + !num_ui_matrix_points || !ui_matrix_positions || !context_caps) + return tvERROR_INVALID_PARAM; + + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) + return tvERROR_GENERAL; + + const std::string key = "MultiPointWB"; + if (!root.HasLabel(key.c_str())) + return tvERROR_OPERATION_NOT_SUPPORTED; + + JsonObject data = root[key.c_str()].Object(); + + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) + return tvERROR_OPERATION_NOT_SUPPORTED; + + // Extract matrix points + if (!data.HasLabel("points")) + return tvERROR_INVALID_PARAM; + *num_hal_matrix_points = data["points"].Number(); + + // Extract range info + if (!data.HasLabel("rangeInfo")) + return tvERROR_INVALID_PARAM; + + JsonObject range = data["rangeInfo"].Object(); + if (!range.HasLabel("from") || !range.HasLabel("to")) + return tvERROR_INVALID_PARAM; + + *rgb_min = range["from"].Number(); + *rgb_max = range["to"].Number(); + + // Allocate UI matrix points (same count for now) + *num_ui_matrix_points = *num_hal_matrix_points; + *ui_matrix_positions = new double[*num_ui_matrix_points]; + if (!(*ui_matrix_positions)) + return tvERROR_GENERAL; + + for (int i = 0; i < *num_ui_matrix_points; ++i) + (*ui_matrix_positions)[i] = static_cast(i) / (*num_ui_matrix_points - 1); + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) + return tvERROR_GENERAL; + + return tvERROR_NONE; +} + +tvError_t AVOutputTV::GetCMSCaps(int* max_hue, + int* max_saturation, + int* max_luma, + tvDataComponentColor_t** color, + tvComponentType_t** component, + size_t* num_color, + size_t* num_component, + tvContextCaps_t** context_caps) +{ + if (!max_hue || !max_saturation || !max_luma || !color || !component || !num_color || !num_component || !context_caps) { + return tvERROR_INVALID_PARAM; + } + + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + const char* key = "CMS"; + if (!root.HasLabel(key)) { + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject cms = root[key].Object(); + + if (!cms.HasLabel("platformSupport") || !cms["platformSupport"].Boolean()) { + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + // Extract ranges + *max_hue = cms.HasLabel("rangeHue") ? cms["rangeHue"].Object()["to"].Number() : 0; + *max_saturation = cms.HasLabel("rangeSaturation") ? cms["rangeSaturation"].Object()["to"].Number() : 0; + *max_luma = cms.HasLabel("rangeLuma") ? cms["rangeLuma"].Object()["to"].Number() : 0; + + // Extract colors + const JsonArray& colorArray = cms["color"].Array(); + *num_color = colorArray.Length(); + *color = new tvDataComponentColor_t[*num_color]; + for (size_t i = 0; i < *num_color; ++i) { + std::string colorStr = colorArray[i].String(); + if (getCMSColorEnumFromString(colorStr, (*color)[i]) != 0) { + delete[] *color; + *color = nullptr; + return tvERROR_INVALID_PARAM; + } + } + + // Extract components + const JsonArray& compArray = cms["component"].Array(); + *num_component = compArray.Length(); + *component = new tvComponentType_t[*num_component]; + for (size_t i = 0; i < *num_component; ++i) { + std::string compStr = compArray[i].String(); + if (getCMSComponentEnumFromString(compStr, (*component)[i]) != 0) { + delete[] *color; + delete[] *component; + *color = nullptr; + *component = nullptr; + return tvERROR_INVALID_PARAM; + } + } + + // Extract context capabilities + if (ExtractContextCaps(cms, context_caps) != tvERROR_NONE) { + delete[] *color; + delete[] *component; + *color = nullptr; + *component = nullptr; + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + +tvError_t AVOutputTV::GetCustom2PointWhiteBalanceCaps(int* min_gain, int* min_offset, + int* max_gain, int* max_offset, + tvWBColor_t** color, + tvWBControl_t** control, + size_t* num_color, size_t* num_control, + tvContextCaps_t** context_caps) +{ + if (!min_gain || !min_offset || !max_gain || !max_offset || + !color || !control || !num_color || !num_control || !context_caps) + { + LOGERR("Invalid input pointers"); + return tvERROR_INVALID_PARAM; + } + + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + LOGERR("Failed to read JSON capabilities"); + return tvERROR_GENERAL; + } + + const char* key = "Custom2PointWhiteBalance"; + if (!root.HasLabel(key)) { + LOGERR("Missing key: %s", key); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject section = root[key].Object(); + + if (!section.HasLabel("platformSupport") || !section["platformSupport"].Boolean()) { + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + // Parse rangeGain and rangeOffset + *min_gain = section["rangeGain"].Object()["from"].Number(); + *max_gain = section["rangeGain"].Object()["to"].Number(); + *min_offset = section["rangeOffset"].Object()["from"].Number(); + *max_offset = section["rangeOffset"].Object()["to"].Number(); + + // Parse control array + JsonArray controlArray = section["control"].Array(); + *num_control = controlArray.Length(); + *control = new tvWBControl_t[*num_control]; + for (size_t i = 0; i < *num_control; ++i) { + std::string ctrlStr = controlArray[i].String(); + if (getWBControlEnumFromString(ctrlStr, (*control)[i]) != 0) { + LOGERR("Invalid control: %s", ctrlStr.c_str()); + delete[] *control; + *control = nullptr; + return tvERROR_INVALID_PARAM; + } + } + + // Parse color array + JsonArray colorArray = section["color"].Array(); + *num_color = colorArray.Length(); + *color = new tvWBColor_t[*num_color]; + for (size_t i = 0; i < *num_color; ++i) { + std::string colStr = colorArray[i].String(); + if (getWBColorEnumFromString(colStr, (*color)[i]) != 0) { + LOGERR("Invalid color: %s", colStr.c_str()); + delete[] *color; + delete[] *control; + *color = nullptr; + *control = nullptr; + return tvERROR_INVALID_PARAM; + } + } + + // Parse contextCaps + if (ExtractContextCaps(section, context_caps) != tvERROR_NONE) { + delete[] *color; + delete[] *control; + *color = nullptr; + *control = nullptr; + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + +#if HAL_NOT_READY +tvError_t AVOutputTV::GetBacklightCaps(int* max_backlight, tvContextCaps_t** context_caps) { + return GetCaps("Backlight", max_backlight, context_caps); +} + +tvError_t AVOutputTV::GetBrightnessCaps(int* max_brightness, tvContextCaps_t** context_caps) { + return GetCaps("Brightness", max_brightness, context_caps); +} + +tvError_t AVOutputTV::GetContrastCaps(int* max_contrast, tvContextCaps_t** context_caps) { + return GetCaps("Contrast", max_contrast, context_caps); +} + +tvError_t AVOutputTV::GetSharpnessCaps(int* max_sharpness, tvContextCaps_t** context_caps) { + return GetCaps("Sharpness", max_sharpness, context_caps); +} + +tvError_t AVOutputTV::GetSaturationCaps(int* max_saturation, tvContextCaps_t** context_caps) { + return GetCaps("Saturation", max_saturation, context_caps); +} + +tvError_t AVOutputTV::GetHueCaps(int* max_hue, tvContextCaps_t** context_caps) { + return GetCaps("Hue", max_hue, context_caps); +} + +tvError_t AVOutputTV::GetLowLatencyStateCaps(int* max_latency, tvContextCaps_t ** context_caps){ + return GetCaps("LowLatencyState", max_latency, context_caps); +} + +// PrecisionDetail +tvError_t AVOutputTV::GetPrecisionDetailCaps(int * maxPrecision, tvContextCaps_t ** context_caps) { + return GetCaps("PrecisionDetail", maxPrecision, context_caps); +} + +// AISuperResolution +tvError_t AVOutputTV::GetAISuperResolutionCaps(int * maxAISuperResolution, tvContextCaps_t ** context_caps) { + return GetCaps("AISuperResolution", maxAISuperResolution, context_caps); +} + +// MEMC +tvError_t AVOutputTV::GetMEMCCaps(int * maxMEMC, tvContextCaps_t ** context_caps) { + return GetCaps("MEMC", maxMEMC, context_caps); +} + +tvError_t AVOutputTV::GetColorTemperatureCaps(tvColorTemp_t** color_temp, size_t* num_color_temp, tvContextCaps_t** context_caps) { + LOGINFO("Entry\n"); + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + std::string key = "ColorTemperature"; + if (!root.HasLabel(key.c_str())) { + LOGWARN("AVOutputPlugins: %s: Missing '%s' label", __FUNCTION__, key.c_str()); + return tvERROR_GENERAL; + } + + JsonObject data = root[key.c_str()].Object(); + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) { + LOGWARN("AVOutputPlugins: %s: Platform support is false", __FUNCTION__); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject rangeInfo = data["rangeInfo"].Object(); + JsonArray optionsArray = rangeInfo["options"].Array(); + + *num_color_temp = optionsArray.Length(); + *color_temp = static_cast(malloc(*num_color_temp * sizeof(tvColorTemp_t))); + if (!(*color_temp)) { + return tvERROR_GENERAL; + } + + for (size_t i = 0; i < *num_color_temp; ++i) { + std::string tempStr = optionsArray[i].String(); + if (tempStr == "Standard") (*color_temp)[i] = tvColorTemp_STANDARD; + else if (tempStr == "Warm") (*color_temp)[i] = tvColorTemp_WARM; + else if (tempStr == "Cold") (*color_temp)[i] = tvColorTemp_COLD; + else if (tempStr == "UserDefined") (*color_temp)[i] = tvColorTemp_USER; + else if (tempStr == "Supercold") (*color_temp)[i] = tvColorTemp_SUPERCOLD; + else if (tempStr == "BoostStandard") (*color_temp)[i] = tvColorTemp_BOOST_STANDARD; + else if (tempStr == "BoostWarm") (*color_temp)[i] = tvColorTemp_BOOST_WARM; + else if (tempStr == "BoostCold") (*color_temp)[i] = tvColorTemp_BOOST_COLD; + else if (tempStr == "BoostUserDefined") (*color_temp)[i] = tvColorTemp_BOOST_USER; + else if (tempStr == "BoostSupercold") (*color_temp)[i] = tvColorTemp_BOOST_SUPERCOLD; + else (*color_temp)[i] = tvColorTemp_STANDARD; + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + free(*color_temp); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + + +tvError_t AVOutputTV::GetSdrGammaCaps(tvSdrGamma_t** sdr_gamma, size_t* num_sdr_gamma, tvContextCaps_t** context_caps) { + LOGINFO("Entry\n"); + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + std::string key = "SDRGamma"; + if (!root.HasLabel(key.c_str())) { + LOGWARN("AVOutputPlugins: %s: Missing '%s' label", __FUNCTION__, key.c_str()); + return tvERROR_GENERAL; + } + + JsonObject data = root[key.c_str()].Object(); + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) { + LOGWARN("AVOutputPlugins: %s: Platform support is false", __FUNCTION__); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject rangeInfo = data["rangeInfo"].Object(); + JsonArray optionsArray = rangeInfo["options"].Array(); + + *num_sdr_gamma = optionsArray.Length(); + *sdr_gamma = static_cast(malloc(*num_sdr_gamma * sizeof(tvSdrGamma_t))); + if (!(*sdr_gamma)) { + return tvERROR_GENERAL; + } + + for (size_t i = 0; i < *num_sdr_gamma; ++i) { + std::string gammaStr = optionsArray[i].String(); + if (gammaStr == "1.8") (*sdr_gamma)[i] = tvSdrGamma_1_8; + else if (gammaStr == "1.9") (*sdr_gamma)[i] = tvSdrGamma_1_9; + else if (gammaStr == "2.0") (*sdr_gamma)[i] = tvSdrGamma_2_0; + else if (gammaStr == "2.1") (*sdr_gamma)[i] = tvSdrGamma_2_1; + else if (gammaStr == "2.2") (*sdr_gamma)[i] = tvSdrGamma_2_2; + else if (gammaStr == "2.3") (*sdr_gamma)[i] = tvSdrGamma_2_3; + else if (gammaStr == "2.4") (*sdr_gamma)[i] = tvSdrGamma_2_4; + else if (gammaStr == "BT.1886") (*sdr_gamma)[i] = tvSdrGamma_BT_1886; + else (*sdr_gamma)[i] = tvSdrGamma_INVALID; + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + free(*sdr_gamma); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} +tvError_t AVOutputTV::GetTVDimmingModeCaps(tvDimmingMode_t** dimming_mode, size_t* num_dimming_mode, tvContextCaps_t** context_caps){ + LOGINFO("Entry\n"); + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + std::string key = "DimmingMode"; + if (!root.HasLabel(key.c_str())) { + LOGWARN("AVOutputPlugins: %s: Missing '%s' label", __FUNCTION__, key.c_str()); + return tvERROR_GENERAL; + } + JsonObject data = root[key.c_str()].Object(); + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) { + LOGWARN("AVOutputPlugins: %s: Platform support is false", __FUNCTION__); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject rangeInfo = data["rangeInfo"].Object(); + JsonArray optionsArray = rangeInfo["options"].Array(); + *num_dimming_mode = optionsArray.Length(); + *dimming_mode = static_cast(malloc(*num_dimming_mode * sizeof(tvDimmingMode_t))); + if (!(*dimming_mode)) { + return tvERROR_GENERAL; + } + + for (size_t i = 0; i < *num_dimming_mode; ++i) { + std::string modeStr = optionsArray[i].String(); + if (modeStr == "Fixed") (*dimming_mode)[i] = tvDimmingMode_Fixed; + else if (modeStr == "Local") (*dimming_mode)[i] = tvDimmingMode_Local; + else if (modeStr == "Global") (*dimming_mode)[i] = tvDimmingMode_Global; + else (*dimming_mode)[i] = tvDimmingMode_MAX; + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + free(*dimming_mode); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; + +} + +tvError_t AVOutputTV::GetAspectRatioCaps(tvDisplayMode_t** aspect_ratio, size_t* num_aspect_ratio, tvContextCaps_t** context_caps) { + LOGINFO("Entry\n"); + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + std::string key = "AspectRatio"; + if (!root.HasLabel(key.c_str())) { + LOGWARN("AVOutputPlugins: %s: Missing '%s' label", __FUNCTION__, key.c_str()); + return tvERROR_GENERAL; + } + + JsonObject data = root[key.c_str()].Object(); + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) { + LOGWARN("AVOutputPlugins: %s: Platform support is false", __FUNCTION__); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject rangeInfo = data["rangeInfo"].Object(); + JsonArray optionsArray = rangeInfo["options"].Array(); + + *num_aspect_ratio = optionsArray.Length(); + *aspect_ratio = static_cast(malloc(*num_aspect_ratio * sizeof(tvDisplayMode_t))); + if (!(*aspect_ratio)) { + return tvERROR_GENERAL; + } + + for (size_t i = 0; i < *num_aspect_ratio; ++i) { + std::string aspectStr = optionsArray[i].String(); + if (aspectStr == "TV AUTO") (*aspect_ratio)[i] = tvDisplayMode_AUTO; + else if (aspectStr == "TV DIRECT") (*aspect_ratio)[i] = tvDisplayMode_DIRECT; + else if (aspectStr == "TV FULL") (*aspect_ratio)[i] = tvDisplayMode_FULL; + else if (aspectStr == "TV NORMAL") (*aspect_ratio)[i] = tvDisplayMode_NORMAL; + else if (aspectStr == "TV 16X9 STRETCH") (*aspect_ratio)[i] = tvDisplayMode_16x9; + else if (aspectStr == "TV 4X3 PILLARBOX") (*aspect_ratio)[i] = tvDisplayMode_4x3; + else if (aspectStr == "TV ZOOM") (*aspect_ratio)[i] = tvDisplayMode_ZOOM; + else (*aspect_ratio)[i] = tvDisplayMode_MAX; + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + free(*aspect_ratio); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + +tvError_t AVOutputTV::GetTVPictureModeCaps(tvPQModeIndex_t** mode, size_t* num_pic_modes, tvContextCaps_t** context_caps) { + LOGINFO("Entry\n"); + JsonObject root; + if (ReadJsonFile(root) != tvERROR_NONE) { + return tvERROR_GENERAL; + } + + std::string key = "PictureMode"; + if (!root.HasLabel(key.c_str())) { + LOGWARN("AVOutputPlugins: %s: Missing '%s' label", __FUNCTION__, key.c_str()); + return tvERROR_GENERAL; + } + + JsonObject data = root[key.c_str()].Object(); + if (!data.HasLabel("platformSupport") || !data["platformSupport"].Boolean()) { + LOGWARN("AVOutputPlugins: %s: Platform support is false", __FUNCTION__); + return tvERROR_OPERATION_NOT_SUPPORTED; + } + + JsonObject rangeInfo = data["rangeInfo"].Object(); + JsonArray optionsArray = rangeInfo["options"].Array(); + + *num_pic_modes = optionsArray.Length(); + *mode = static_cast(malloc(*num_pic_modes * sizeof(tvPQModeIndex_t))); + if (!(*mode)) { + return tvERROR_GENERAL; + } + + for (size_t i = 0; i < *num_pic_modes; ++i) { + std::string modeStr = optionsArray[i].String(); + + if (modeStr == "Standard") (*mode)[i] = PQ_MODE_STANDARD; + else if (modeStr == "Vivid") (*mode)[i] = PQ_MODE_VIVID; + else if (modeStr == "EnergySaving" || modeStr == "Energy Saving") (*mode)[i] = PQ_MODE_ENERGY_SAVING; + else if (modeStr == "Theater") (*mode)[i] = PQ_MODE_THEATER; + else if (modeStr == "Game") (*mode)[i] = PQ_MODE_GAME; + else if (modeStr == "Sports") (*mode)[i] = PQ_MODE_SPORTS; + else if (modeStr == "AI PQ") (*mode)[i] = PQ_MODE_AIPQ; + else if (modeStr == "Dark") (*mode)[i] = PQ_MODE_DARK; + else if (modeStr == "Bright") (*mode)[i] = PQ_MODE_BRIGHT; + else if (modeStr == "IQ") (*mode)[i] = PQ_MODE_IQ; + else (*mode)[i] = PQ_MODE_INVALID; + } + + if (ExtractContextCaps(data, context_caps) != tvERROR_NONE) { + free(*mode); + return tvERROR_GENERAL; + } + + return tvERROR_NONE; +} + +#endif + + int AVOutputTV::ReadCapablitiesFromConf(std::string param, capDetails_t& info) + { + int ret = 0; + + /*Consider User WhiteBalance as CustomWhiteBalance + To avoid clash with Factory WhiteBalance Calibration capablities*/ + + if ( param == "WhiteBalance") { + param = "CustomWhiteBalance"; + } else if ( param == "AutoBacklightMode") { + param = "BacklightControl"; + } + + try { + CIniFile inFile(CAPABLITY_FILE_NAME); + std::string configString; + + if(param == "CMS") + { + configString = param + ".color"; + info.color = inFile.Get(configString); + + configString = param + ".component"; + info.component = inFile.Get(configString); + } + + if(param == "CustomWhiteBalance") + { + configString = param + ".color"; + info.color = inFile.Get(configString); + + configString = param + ".control"; + info.control = inFile.Get(configString); + + } + + if ((param == "DolbyVisionMode") || (param == "Backlight") || (param == "CMS") || (param == "CustomWhiteBalance") || (param == "HDRMode") || (param == "BacklightControl")) { + configString = param + ".platformsupport"; + info.isPlatformSupport = inFile.Get(configString); + printf(" platformsupport : %s\n",info.isPlatformSupport.c_str() ); + } + + if ( (param == "ColorTemperature") || (param == "DimmingMode") || + ( param == "BacklightControl") || (param == "DolbyVisionMode") || + (param == "HDR10Mode") || (param == "HLGMode") || (param == "AspectRatio") || + (param == "PictureMode") || (param == "VideoSource") || (param == "VideoFormat") || + (param == "VideoFrameRate") || (param == "HDRMode") ) { + configString = param + ".range"; + info.range = inFile.Get(configString); + printf(" String Range info : %s\n",info.range.c_str() ); + } else if ( (param == "CMS" )) { + configString.clear(); + configString = param + ".range_Saturation_from"; + info.range = inFile.Get(configString); + configString = param + ".range_Saturation_to"; + info.range += ","+inFile.Get(configString); + + configString = param + ".range_Hue_from"; + info.range += ","+inFile.Get(configString); + configString = param + ".range_Hue_to"; + info.range += ","+inFile.Get(configString); + + configString = param + ".range_Luma_from"; + info.range += ","+inFile.Get(configString); + configString = param + ".range_Luma_to"; + info.range += ","+inFile.Get(configString); + } else if ( (param == "CustomWhiteBalance")) { + configString = param + ".range_Gain_from"; + info.range = inFile.Get(configString); + configString = param + ".range_Gain_to"; + info.range += ","+inFile.Get(configString); + + configString = param + ".range_Offset_from"; + info.range += ","+inFile.Get(configString); + configString = param + ".range_Offset_to"; + info.range += ","+inFile.Get(configString); + } else { + configString = param + ".range_from"; + info.range = inFile.Get(configString); + configString = param + ".range_to"; + info.range += ","+inFile.Get(configString); + printf(" Integer Range Info : %s\n",info.range.c_str() ); + } + + if ((param == "VideoSource") || (param == "PictureMode") || (param == "VideoFormat") ) { configString.clear(); configString = param + ".index"; info.index = inFile.Get(configString); @@ -2372,7 +4285,6 @@ namespace Plugin { } return ret; } - bool AVOutputTV::checkCMSColorAndComponentCapability(const std::string capValue, const std::string inputValue) { // Parse capValue into a set std::set capSet; diff --git a/AVOutput/CHANGELOG.md b/AVOutput/CHANGELOG.md index e5f68a0f..774b8127 100644 --- a/AVOutput/CHANGELOG.md +++ b/AVOutput/CHANGELOG.md @@ -13,6 +13,9 @@ All notable changes to this RDK Service will be documented in this file. * **Security** in case of vulnerabilities. * Changes in CHANGELOG should be updated when commits are added to the main or release branches. There should be one CHANGELOG entry per JIRA Ticket. This is not enforced on sprint branches since there could be multiple changes for the same JIRA ticket during development. +## [1.2.0] - 2025-06-25 +### Added +- Advance PQ Params ## [1.1.2] - 2025-07-01 ### Fixed