diff --git a/.clang-format b/.clang-format new file mode 100644 index 00000000..d349ce45 --- /dev/null +++ b/.clang-format @@ -0,0 +1,78 @@ +--- +Language: Cpp +# BasedOnStyle: Chromium +AlignAfterOpenBracket: Align +AlignConsecutiveMacros: true +AlignConsecutiveAssignments: false +AlignConsecutiveDeclarations: false +AlignEscapedNewlines: Right +AlignOperands: false +AlignTrailingComments: true +AllowAllParametersOfDeclarationOnNextLine: false +AllowShortBlocksOnASingleLine: Never +AllowShortCaseLabelsOnASingleLine: false +AllowShortFunctionsOnASingleLine: None +AllowShortIfStatementsOnASingleLine: Never +AllowShortLoopsOnASingleLine: false +AlwaysBreakAfterDefinitionReturnType: None +AlwaysBreakAfterReturnType: None +AlwaysBreakBeforeMultilineStrings: false +BinPackArguments: true +BinPackParameters: true +BraceWrapping: + AfterCaseLabel: false + AfterClass: false + AfterControlStatement: false + AfterEnum: false + AfterFunction: false + AfterStruct: false + AfterUnion: false + AfterExternBlock: false + BeforeCatch: false + BeforeElse: false + IndentBraces: false + SplitEmptyFunction: false + AfterNamespace: false +BreakBeforeBinaryOperators: None +BreakBeforeBraces: WebKit +BreakBeforeTernaryOperators: true +BreakStringLiterals: true +ColumnLimit: 150 +CommentPragmas: '^ IWYU pragma:' +ContinuationIndentWidth: 4 +DerivePointerAlignment: false +IncludeBlocks: Preserve +IndentCaseLabels: true +IndentGotoLabels: true +IndentPPDirectives: None +IndentWidth: 4 +IndentWrappedFunctionNames: false +CompactNamespaces: false +KeepEmptyLinesAtTheStartOfBlocks: false +PenaltyBreakAssignment: 2 +PenaltyBreakBeforeFirstCallParameter: 19 +PenaltyBreakComment: 300 +PenaltyBreakFirstLessLess: 120 +PenaltyBreakString: 1000 +PenaltyExcessCharacter: 1000000 +PenaltyReturnTypeOnItsOwnLine: 200 +PointerAlignment: Left +ReflowComments: true +SortIncludes: false +SpaceAfterCStyleCast: true +SpaceAfterLogicalNot: false +SpaceBeforeAssignmentOperators: true +SpaceBeforeParens: ControlStatements +SpaceInEmptyBlock: false +SpaceInEmptyParentheses: false +SpacesBeforeTrailingComments: 1 +SpacesInAngles: false +SpacesInConditionalStatement: false +SpacesInCStyleCastParentheses: false +SpacesInParentheses: false +SpacesInSquareBrackets: false +SpaceBeforeSquareBrackets: false +Standard: Auto +TabWidth: 4 +UseTab: Never +... diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cfe8d7b8..8369e4d3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,6 +10,19 @@ on: - master jobs: + clang-format-check: + runs-on: macos-11 + steps: + - name: Clone repository + uses: actions/checkout@v3 + - name: Install clang-format + run: | + brew install clang-format + clang-format --version + - name: Run clang format check + run: | + bash scripts/check-clang.sh + mac-os-build-clang: runs-on: macos-12 env: diff --git a/samples/include.h b/samples/include.h index 10dc7f42..6187d36b 100644 --- a/samples/include.h +++ b/samples/include.h @@ -1,8 +1,8 @@ #ifndef _KVS_SAMPLE_INCLUDE_H #define _KVS_SAMPLE_INCLUDE_H -#define STATUS_KVS_GSTREAMER_SAMPLE_BASE 0x00080000 -#define STATUS_KVS_GSTREAMER_SAMPLE_ERROR STATUS_KVS_GSTREAMER_SAMPLE_BASE + 0x00000001 +#define STATUS_KVS_GSTREAMER_SAMPLE_BASE 0x00080000 +#define STATUS_KVS_GSTREAMER_SAMPLE_ERROR STATUS_KVS_GSTREAMER_SAMPLE_BASE + 0x00000001 #define STATUS_KVS_GSTREAMER_SAMPLE_INTERRUPTED STATUS_KVS_GSTREAMER_SAMPLE_BASE + 0x00000002 #endif // _KVS_SAMPLE_INCLUDE_H diff --git a/samples/kvs_gstreamer_audio_video_sample.cpp b/samples/kvs_gstreamer_audio_video_sample.cpp index 87bca3cb..fa14b560 100644 --- a/samples/kvs_gstreamer_audio_video_sample.cpp +++ b/samples/kvs_gstreamer_audio_video_sample.cpp @@ -23,7 +23,7 @@ using namespace log4cplus; extern "C" { #endif -int gstreamer_init(int, char **); +int gstreamer_init(int, char**); #ifdef __cplusplus } @@ -34,67 +34,56 @@ LOGGER_TAG("com.amazonaws.kinesis.video.gstreamer"); #define VIDEO_DEVICE_ENV_VAR "AWS_KVS_VIDEO_DEVICE" #define AUDIO_DEVICE_ENV_VAR "AWS_KVS_AUDIO_DEVICE" -#define DEFAULT_RETENTION_PERIOD_HOURS 2 -#define DEFAULT_KMS_KEY_ID "" -#define DEFAULT_STREAMING_TYPE STREAMING_TYPE_REALTIME -#define DEFAULT_CONTENT_TYPE "video/h264,audio/aac" -#define DEFAULT_MAX_LATENCY_SECONDS 60 -#define DEFAULT_FRAGMENT_DURATION_MILLISECONDS 2000 -#define DEFAULT_TIMECODE_SCALE_MILLISECONDS 1 -#define DEFAULT_KEY_FRAME_FRAGMENTATION TRUE -#define DEFAULT_FRAME_TIMECODES TRUE -#define DEFAULT_ABSOLUTE_FRAGMENT_TIMES TRUE -#define DEFAULT_FRAGMENT_ACKS TRUE -#define DEFAULT_RESTART_ON_ERROR TRUE -#define DEFAULT_RECALCULATE_METRICS TRUE -#define DEFAULT_STREAM_FRAMERATE 100 -#define DEFAULT_AVG_BANDWIDTH_BPS (4 * 1024 * 1024) -#define DEFAULT_BUFFER_DURATION_SECONDS 120 -#define DEFAULT_REPLAY_DURATION_SECONDS 40 -#define DEFAULT_CONNECTION_STALENESS_SECONDS 60 -#define DEFAULT_CODEC_ID "V_MPEG4/ISO/AVC" -#define DEFAULT_TRACKNAME "kinesis_video" -#define APP_SINK_BASE_NAME "appsink" -#define DEFAULT_BUFFER_SIZE (1 * 1024 * 1024) -#define DEFAULT_STORAGE_SIZE (128 * 1024 * 1024) -#define DEFAULT_CREDENTIAL_ROTATION_SECONDS 3600 -#define DEFAULT_CREDENTIAL_EXPIRATION_SECONDS 180 +#define DEFAULT_RETENTION_PERIOD_HOURS 2 +#define DEFAULT_KMS_KEY_ID "" +#define DEFAULT_STREAMING_TYPE STREAMING_TYPE_REALTIME +#define DEFAULT_CONTENT_TYPE "video/h264,audio/aac" +#define DEFAULT_MAX_LATENCY_SECONDS 60 +#define DEFAULT_FRAGMENT_DURATION_MILLISECONDS 2000 +#define DEFAULT_TIMECODE_SCALE_MILLISECONDS 1 +#define DEFAULT_KEY_FRAME_FRAGMENTATION TRUE +#define DEFAULT_FRAME_TIMECODES TRUE +#define DEFAULT_ABSOLUTE_FRAGMENT_TIMES TRUE +#define DEFAULT_FRAGMENT_ACKS TRUE +#define DEFAULT_RESTART_ON_ERROR TRUE +#define DEFAULT_RECALCULATE_METRICS TRUE +#define DEFAULT_STREAM_FRAMERATE 100 +#define DEFAULT_AVG_BANDWIDTH_BPS (4 * 1024 * 1024) +#define DEFAULT_BUFFER_DURATION_SECONDS 120 +#define DEFAULT_REPLAY_DURATION_SECONDS 40 +#define DEFAULT_CONNECTION_STALENESS_SECONDS 60 +#define DEFAULT_CODEC_ID "V_MPEG4/ISO/AVC" +#define DEFAULT_TRACKNAME "kinesis_video" +#define APP_SINK_BASE_NAME "appsink" +#define DEFAULT_BUFFER_SIZE (1 * 1024 * 1024) +#define DEFAULT_STORAGE_SIZE (128 * 1024 * 1024) +#define DEFAULT_CREDENTIAL_ROTATION_SECONDS 3600 +#define DEFAULT_CREDENTIAL_EXPIRATION_SECONDS 180 #define DEFAULT_AUDIO_VIDEO_DRIFT_TIMEOUT_SECOND 5 -#define DEFAULT_VIDEO_TRACKID 1 +#define DEFAULT_VIDEO_TRACKID 1 #define DEFAULT_AUDIO_TRACK_NAME "audio" -#define DEFAULT_AUDIO_CODEC_ID "A_AAC" -#define DEFAULT_AUDIO_TRACKID 2 +#define DEFAULT_AUDIO_CODEC_ID "A_AAC" +#define DEFAULT_AUDIO_TRACKID 2 typedef struct _FileInfo { - _FileInfo(): - path(""), - last_fragment_ts(0) {} + _FileInfo() : path(""), last_fragment_ts(0) + { + } string path; uint64_t last_fragment_ts; } FileInfo; typedef struct _CustomData { - _CustomData(): - first_video_frame(true), - eos_triggered(false), - pipeline_blocked(false), - stream_status(STATUS_SUCCESS), - base_pts(0), - max_frame_pts(0), - total_track_count(1), - key_frame_pts(0), - current_file_idx(0), - last_unpersisted_file_idx(0), - kinesis_video_producer(nullptr), - kinesis_video_stream(nullptr), - main_loop(NULL), - first_pts(GST_CLOCK_TIME_NONE), - use_absolute_fragment_times(true) { + _CustomData() + : first_video_frame(true), eos_triggered(false), pipeline_blocked(false), stream_status(STATUS_SUCCESS), base_pts(0), max_frame_pts(0), + total_track_count(1), key_frame_pts(0), current_file_idx(0), last_unpersisted_file_idx(0), kinesis_video_producer(nullptr), + kinesis_video_stream(nullptr), main_loop(NULL), first_pts(GST_CLOCK_TIME_NONE), use_absolute_fragment_times(true) + { producer_start_time = chrono::duration_cast(systemCurrentTime().time_since_epoch()).count(); } - GMainLoop *main_loop; + GMainLoop* main_loop; unique_ptr kinesis_video_producer; shared_ptr kinesis_video_stream; @@ -105,7 +94,7 @@ typedef struct _CustomData { mutex file_list_mtx; condition_variable audio_video_sync_cv; - char *stream_name; + char* stream_name; // indicate if either audio or video media pipeline is currently blocked. If so, the other pipeline line will wake up // the blocked one when the time is right. @@ -166,171 +155,180 @@ typedef struct _CustomData { // Pts of first frame uint64_t first_pts; - GstElement *pipeline; + GstElement* pipeline; } CustomData; -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { - class SampleClientCallbackProvider : public ClientCallbackProvider { - public: +class SampleClientCallbackProvider : public ClientCallbackProvider { + public: + UINT64 getCallbackCustomData() override + { + return reinterpret_cast(this); + } - UINT64 getCallbackCustomData() override { - return reinterpret_cast (this); - } + StorageOverflowPressureFunc getStorageOverflowPressureCallback() override + { + return storageOverflowPressure; + } - StorageOverflowPressureFunc getStorageOverflowPressureCallback() override { - return storageOverflowPressure; - } + static STATUS storageOverflowPressure(UINT64 custom_handle, UINT64 remaining_bytes); +}; - static STATUS storageOverflowPressure(UINT64 custom_handle, UINT64 remaining_bytes); - }; +class SampleStreamCallbackProvider : public StreamCallbackProvider { + UINT64 custom_data_; - class SampleStreamCallbackProvider : public StreamCallbackProvider { - UINT64 custom_data_; - public: - SampleStreamCallbackProvider(UINT64 custom_data) : custom_data_(custom_data) {} - - UINT64 getCallbackCustomData() override { - return custom_data_; - } + public: + SampleStreamCallbackProvider(UINT64 custom_data) : custom_data_(custom_data) + { + } - StreamConnectionStaleFunc getStreamConnectionStaleCallback() override { - return streamConnectionStaleHandler; - }; + UINT64 getCallbackCustomData() override + { + return custom_data_; + } - StreamErrorReportFunc getStreamErrorReportCallback() override { - return streamErrorReportHandler; - }; + StreamConnectionStaleFunc getStreamConnectionStaleCallback() override + { + return streamConnectionStaleHandler; + }; - DroppedFrameReportFunc getDroppedFrameReportCallback() override { - return droppedFrameReportHandler; - }; + StreamErrorReportFunc getStreamErrorReportCallback() override + { + return streamErrorReportHandler; + }; - FragmentAckReceivedFunc getFragmentAckReceivedCallback() override { - return fragmentAckReceivedHandler; - }; + DroppedFrameReportFunc getDroppedFrameReportCallback() override + { + return droppedFrameReportHandler; + }; - private: - static STATUS - streamConnectionStaleHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UINT64 last_buffering_ack); + FragmentAckReceivedFunc getFragmentAckReceivedCallback() override + { + return fragmentAckReceivedHandler; + }; - static STATUS - streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, UINT64 errored_timecode, - STATUS status_code); + private: + static STATUS streamConnectionStaleHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 last_buffering_ack); - static STATUS - droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UINT64 dropped_frame_timecode); + static STATUS streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, UINT64 errored_timecode, + STATUS status_code); - static STATUS - fragmentAckReceivedHandler( UINT64 custom_data, STREAM_HANDLE stream_handle, - UPLOAD_HANDLE upload_handle, PFragmentAck pFragmentAck); - }; + static STATUS droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 dropped_frame_timecode); - class SampleCredentialProvider : public StaticCredentialProvider { - // Test rotation period is 40 minute for the grace period. - const std::chrono::duration ROTATION_PERIOD = std::chrono::seconds(DEFAULT_CREDENTIAL_ROTATION_SECONDS); - public: - SampleCredentialProvider(const Credentials &credentials) : - StaticCredentialProvider(credentials) {} - - void updateCredentials(Credentials &credentials) override { - // Copy the stored creds forward - credentials = credentials_; - - // Update only the expiration - auto now_time = std::chrono::duration_cast( - std::chrono::system_clock::now().time_since_epoch()); - auto expiration_seconds = now_time + ROTATION_PERIOD; - credentials.setExpiration(std::chrono::seconds(expiration_seconds.count())); - LOG_INFO("New credentials expiration is " << credentials.getExpiration().count()); - } - }; + static STATUS fragmentAckReceivedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, PFragmentAck pFragmentAck); +}; - class SampleDeviceInfoProvider : public DefaultDeviceInfoProvider { - public: - device_info_t getDeviceInfo() override { - auto device_info = DefaultDeviceInfoProvider::getDeviceInfo(); - // Set the storage size to 128MB - device_info.storageInfo.storageSize = DEFAULT_STORAGE_SIZE; - return device_info; - } - }; +class SampleCredentialProvider : public StaticCredentialProvider { + // Test rotation period is 40 minute for the grace period. + const std::chrono::duration ROTATION_PERIOD = std::chrono::seconds(DEFAULT_CREDENTIAL_ROTATION_SECONDS); - STATUS - SampleClientCallbackProvider::storageOverflowPressure(UINT64 custom_handle, UINT64 remaining_bytes) { - UNUSED_PARAM(custom_handle); - LOG_WARN("Reporting storage overflow. Bytes remaining " << remaining_bytes); - return STATUS_SUCCESS; + public: + SampleCredentialProvider(const Credentials& credentials) : StaticCredentialProvider(credentials) + { } - STATUS SampleStreamCallbackProvider::streamConnectionStaleHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 last_buffering_ack) { - LOG_WARN("Reporting stream stale. Last ACK received " << last_buffering_ack); - return STATUS_SUCCESS; - } + void updateCredentials(Credentials& credentials) override + { + // Copy the stored creds forward + credentials = credentials_; - STATUS - SampleStreamCallbackProvider::streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UPLOAD_HANDLE upload_handle, UINT64 errored_timecode, STATUS status_code) { - LOG_ERROR("Reporting stream error. Errored timecode: " << errored_timecode << " Status: " - << status_code); - CustomData *data = reinterpret_cast(custom_data); - bool terminate_pipeline = false; - - // Terminate pipeline if error is not retriable or if error is retriable but we are streaming file. - // When streaming file, we choose to terminate the pipeline on error because the easiest way to recover - // is to stream the file from the beginning again. - // In realtime streaming, retriable error can be handled underneath. Otherwise terminate pipeline - // and store error status if error is fatal. - if ((IS_RETRIABLE_ERROR(status_code) && data->uploading_file) || - (!IS_RETRIABLE_ERROR(status_code) && !IS_RECOVERABLE_ERROR(status_code))) { - data->stream_status = status_code; - terminate_pipeline = true; - } + // Update only the expiration + auto now_time = std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()); + auto expiration_seconds = now_time + ROTATION_PERIOD; + credentials.setExpiration(std::chrono::seconds(expiration_seconds.count())); + LOG_INFO("New credentials expiration is " << credentials.getExpiration().count()); + } +}; + +class SampleDeviceInfoProvider : public DefaultDeviceInfoProvider { + public: + device_info_t getDeviceInfo() override + { + auto device_info = DefaultDeviceInfoProvider::getDeviceInfo(); + // Set the storage size to 128MB + device_info.storageInfo.storageSize = DEFAULT_STORAGE_SIZE; + return device_info; + } +}; + +STATUS +SampleClientCallbackProvider::storageOverflowPressure(UINT64 custom_handle, UINT64 remaining_bytes) +{ + UNUSED_PARAM(custom_handle); + LOG_WARN("Reporting storage overflow. Bytes remaining " << remaining_bytes); + return STATUS_SUCCESS; +} - if (terminate_pipeline && data->main_loop != NULL) { - LOG_WARN("Terminating pipeline due to unrecoverable stream error: " << status_code); - g_main_loop_quit(data->main_loop); - } +STATUS SampleStreamCallbackProvider::streamConnectionStaleHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 last_buffering_ack) +{ + LOG_WARN("Reporting stream stale. Last ACK received " << last_buffering_ack); + return STATUS_SUCCESS; +} - return STATUS_SUCCESS; +STATUS +SampleStreamCallbackProvider::streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, + UINT64 errored_timecode, STATUS status_code) +{ + LOG_ERROR("Reporting stream error. Errored timecode: " << errored_timecode << " Status: " << status_code); + CustomData* data = reinterpret_cast(custom_data); + bool terminate_pipeline = false; + + // Terminate pipeline if error is not retriable or if error is retriable but we are streaming file. + // When streaming file, we choose to terminate the pipeline on error because the easiest way to recover + // is to stream the file from the beginning again. + // In realtime streaming, retriable error can be handled underneath. Otherwise terminate pipeline + // and store error status if error is fatal. + if ((IS_RETRIABLE_ERROR(status_code) && data->uploading_file) || (!IS_RETRIABLE_ERROR(status_code) && !IS_RECOVERABLE_ERROR(status_code))) { + data->stream_status = status_code; + terminate_pipeline = true; } - STATUS - SampleStreamCallbackProvider::droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UINT64 dropped_frame_timecode) { - LOG_WARN("Reporting dropped frame. Frame timecode " << dropped_frame_timecode); - return STATUS_SUCCESS; + if (terminate_pipeline && data->main_loop != NULL) { + LOG_WARN("Terminating pipeline due to unrecoverable stream error: " << status_code); + g_main_loop_quit(data->main_loop); } - STATUS - SampleStreamCallbackProvider::fragmentAckReceivedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UPLOAD_HANDLE upload_handle, PFragmentAck pFragmentAck) { - CustomData *data = reinterpret_cast(custom_data); - if (data->uploading_file && pFragmentAck->ackType == FRAGMENT_ACK_TYPE_PERSISTED) { - std::unique_lock lk(data->file_list_mtx); - uint32_t last_unpersisted_file_idx = data->last_unpersisted_file_idx.load(); - uint64_t last_frag_ts = data->file_list.at(last_unpersisted_file_idx).last_fragment_ts / - duration_cast(milliseconds(DEFAULT_TIMECODE_SCALE_MILLISECONDS)).count(); - if (last_frag_ts != 0 && last_frag_ts == pFragmentAck->timestamp) { - data->last_unpersisted_file_idx = last_unpersisted_file_idx + 1; - LOG_INFO("Successfully persisted file " << data->file_list.at(last_unpersisted_file_idx).path); - } + return STATUS_SUCCESS; +} + +STATUS +SampleStreamCallbackProvider::droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 dropped_frame_timecode) +{ + LOG_WARN("Reporting dropped frame. Frame timecode " << dropped_frame_timecode); + return STATUS_SUCCESS; +} + +STATUS +SampleStreamCallbackProvider::fragmentAckReceivedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, + PFragmentAck pFragmentAck) +{ + CustomData* data = reinterpret_cast(custom_data); + if (data->uploading_file && pFragmentAck->ackType == FRAGMENT_ACK_TYPE_PERSISTED) { + std::unique_lock lk(data->file_list_mtx); + uint32_t last_unpersisted_file_idx = data->last_unpersisted_file_idx.load(); + uint64_t last_frag_ts = data->file_list.at(last_unpersisted_file_idx).last_fragment_ts / + duration_cast(milliseconds(DEFAULT_TIMECODE_SCALE_MILLISECONDS)).count(); + if (last_frag_ts != 0 && last_frag_ts == pFragmentAck->timestamp) { + data->last_unpersisted_file_idx = last_unpersisted_file_idx + 1; + LOG_INFO("Successfully persisted file " << data->file_list.at(last_unpersisted_file_idx).path); } - LOG_DEBUG("Reporting fragment ack received. Ack timecode " << pFragmentAck->timestamp); - return STATUS_SUCCESS; } + LOG_DEBUG("Reporting fragment ack received. Ack timecode " << pFragmentAck->timestamp); + return STATUS_SUCCESS; +} -} // namespace video -} // namespace kinesis -} // namespace amazonaws -} // namespace com; +} // namespace video +} // namespace kinesis +} // namespace amazonaws +} // namespace com -void create_kinesis_video_frame(Frame *frame, const nanoseconds &pts, const nanoseconds &dts, FRAME_FLAGS flags, - void *data, size_t len, UINT64 track_id) { +void create_kinesis_video_frame(Frame* frame, const nanoseconds& pts, const nanoseconds& dts, FRAME_FLAGS flags, void* data, size_t len, + UINT64 track_id) +{ frame->flags = flags; frame->decodingTs = static_cast(dts.count()) / DEFAULT_TIME_UNIT_IN_NANOS; frame->presentationTs = static_cast(pts.count()) / DEFAULT_TIME_UNIT_IN_NANOS; @@ -340,7 +338,8 @@ void create_kinesis_video_frame(Frame *frame, const nanoseconds &pts, const nano frame->trackId = track_id; } -bool all_stream_started(CustomData *data) { +bool all_stream_started(CustomData* data) +{ bool started = true; if (data->stream_started.size() < data->total_track_count) { started = false; @@ -356,38 +355,39 @@ bool all_stream_started(CustomData *data) { return started; } -void kinesis_video_stream_init(CustomData *data); +void kinesis_video_stream_init(CustomData* data); -static GstFlowReturn on_new_sample(GstElement *sink, CustomData *data) { +static GstFlowReturn on_new_sample(GstElement* sink, CustomData* data) +{ std::unique_lock lk(data->audio_video_sync_mtx); - GstSample *sample = nullptr; - GstBuffer *buffer; + GstSample* sample = nullptr; + GstBuffer* buffer; size_t buffer_size; bool delta, dropFrame; FRAME_FLAGS kinesis_video_flags; - uint8_t *data_buffer; + uint8_t* data_buffer; Frame frame; GstFlowReturn ret = GST_FLOW_OK; STATUS curr_stream_status = data->stream_status.load(); - GstSegment *segment; + GstSegment* segment; GstClockTime buf_pts, buf_dts; gint dts_sign; - gchar *g_stream_handle_key = gst_element_get_name(sink); + gchar* g_stream_handle_key = gst_element_get_name(sink); int track_id = (string(g_stream_handle_key).back()) - '0'; g_free(g_stream_handle_key); GstMapInfo info; info.data = nullptr; - sample = gst_app_sink_pull_sample(GST_APP_SINK (sink)); + sample = gst_app_sink_pull_sample(GST_APP_SINK(sink)); // extract cpd for the first frame for each track if (!data->stream_started[track_id]) { data->stream_started[track_id] = true; - GstCaps *gstcaps = (GstCaps *) gst_sample_get_caps(sample); + GstCaps* gstcaps = (GstCaps*) gst_sample_get_caps(sample); GST_LOG("caps are %" GST_PTR_FORMAT, gstcaps); - GstStructure *gststructforcaps = gst_caps_get_structure(gstcaps, 0); - const GValue *gstStreamFormat = gst_structure_get_value(gststructforcaps, "codec_data"); - gchar *cpd = gst_value_serialize(gstStreamFormat); + GstStructure* gststructforcaps = gst_caps_get_structure(gstcaps, 0); + const GValue* gstStreamFormat = gst_structure_get_value(gststructforcaps, "codec_data"); + gchar* cpd = gst_value_serialize(gstStreamFormat); data->kinesis_video_stream->start(std::string(cpd), track_id); g_free(cpd); @@ -402,11 +402,9 @@ static GstFlowReturn on_new_sample(GstElement *sink, CustomData *data) { if (!data->uploading_file) { goto CleanUp; } - data->audio_video_sync_cv.wait_for(lk, seconds(DEFAULT_AUDIO_VIDEO_DRIFT_TIMEOUT_SECOND), [data]{ - return all_stream_started(data); - }); + data->audio_video_sync_cv.wait_for(lk, seconds(DEFAULT_AUDIO_VIDEO_DRIFT_TIMEOUT_SECOND), [data] { return all_stream_started(data); }); - if(!all_stream_started(data)) { + if (!all_stream_started(data)) { LOG_ERROR("Drift between audio and video is above threshold"); ret = GST_FLOW_ERROR; goto CleanUp; @@ -419,11 +417,10 @@ static GstFlowReturn on_new_sample(GstElement *sink, CustomData *data) { buffer = gst_sample_get_buffer(sample); buffer_size = gst_buffer_get_size(buffer); - dropFrame = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_CORRUPTED) || - GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DECODE_ONLY) || - (GST_BUFFER_FLAGS(buffer) == GST_BUFFER_FLAG_DISCONT) || - (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT) && GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT)) || - (!GST_BUFFER_PTS_IS_VALID(buffer)); //frame with invalid pts cannot be processed. + dropFrame = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_CORRUPTED) || GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DECODE_ONLY) || + (GST_BUFFER_FLAGS(buffer) == GST_BUFFER_FLAG_DISCONT) || + (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT) && GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT)) || + (!GST_BUFFER_PTS_IS_VALID(buffer)); // frame with invalid pts cannot be processed. if (dropFrame) { if (!GST_BUFFER_PTS_IS_VALID(buffer)) { LOG_WARN("Dropping frame due to invalid presentation timestamp."); @@ -467,7 +464,6 @@ static GstFlowReturn on_new_sample(GstElement *sink, CustomData *data) { } if (data->uploading_file) { - data->max_frame_pts = MAX(data->max_frame_pts, buffer->pts); // when reading file using gstreamer, dts is undefined. @@ -485,11 +481,11 @@ static GstFlowReturn on_new_sample(GstElement *sink, CustomData *data) { buffer->pts += data->producer_start_time - data->first_pts; } - if (!gst_buffer_map(buffer, &info, GST_MAP_READ)){ + if (!gst_buffer_map(buffer, &info, GST_MAP_READ)) { goto CleanUp; } - create_kinesis_video_frame(&frame, std::chrono::nanoseconds(buffer->pts), std::chrono::nanoseconds(buffer->dts), - kinesis_video_flags, info.data, info.size, track_id); + create_kinesis_video_frame(&frame, std::chrono::nanoseconds(buffer->pts), std::chrono::nanoseconds(buffer->dts), kinesis_video_flags, info.data, + info.size, track_id); data->kinesis_video_stream->putFrame(frame); @@ -507,13 +503,14 @@ static GstFlowReturn on_new_sample(GstElement *sink, CustomData *data) { } /* This function is called when an error message is posted on the bus */ -static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data) { - GError *err; - gchar *debug_info; +static void error_cb(GstBus* bus, GstMessage* msg, CustomData* data) +{ + GError* err; + gchar* debug_info; /* Print error details on the screen */ gst_message_parse_error(msg, &err, &debug_info); - g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); + g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(msg->src), err->message); g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error(&err); g_free(debug_info); @@ -521,15 +518,15 @@ static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data) { g_main_loop_quit(data->main_loop); } -static void eos_cb(GstElement *sink, CustomData *data) { +static void eos_cb(GstElement* sink, CustomData* data) +{ if (!data->eos_triggered.load()) { // Media pipeline for one track has ended. Next time eos_cb is called means the entire file has been received. data->eos_triggered = true; data->audio_video_sync_cv.notify_all(); } else { - // bookkeeping base_pts. add 1ms to avoid overlap. - data->base_pts += + data->max_frame_pts + duration_cast(milliseconds(1)).count(); + data->base_pts += +data->max_frame_pts + duration_cast(milliseconds(1)).count(); data->max_frame_pts = 0; { @@ -543,13 +540,14 @@ static void eos_cb(GstElement *sink, CustomData *data) { } } -static gboolean demux_pad_cb(GstElement *element, GstPad *pad, CustomData *data) { - GstPad *video_sink = gst_element_get_static_pad(GST_ELEMENT(data->video_queue), "sink"); - GstPad *audio_sink = gst_element_get_static_pad(GST_ELEMENT(data->audio_queue), "sink"); +static gboolean demux_pad_cb(GstElement* element, GstPad* pad, CustomData* data) +{ + GstPad* video_sink = gst_element_get_static_pad(GST_ELEMENT(data->video_queue), "sink"); + GstPad* audio_sink = gst_element_get_static_pad(GST_ELEMENT(data->audio_queue), "sink"); GstPadLinkReturn link_ret; gboolean ret = TRUE; - gchar *pad_name = gst_pad_get_name(pad); + gchar* pad_name = gst_pad_get_name(pad); // link queue to corresponding sinks if (gst_pad_can_link(pad, video_sink)) { @@ -568,24 +566,24 @@ static gboolean demux_pad_cb(GstElement *element, GstPad *pad, CustomData *data) return ret; } -void kinesis_video_init(CustomData *data) { +void kinesis_video_init(CustomData* data) +{ unique_ptr device_info_provider(new SampleDeviceInfoProvider()); unique_ptr client_callback_provider(new SampleClientCallbackProvider()); - unique_ptr stream_callback_provider(new SampleStreamCallbackProvider( - reinterpret_cast(data))); + unique_ptr stream_callback_provider(new SampleStreamCallbackProvider(reinterpret_cast(data))); - char const *accessKey; - char const *secretKey; - char const *sessionToken; - char const *defaultRegion; + char const* accessKey; + char const* secretKey; + char const* sessionToken; + char const* defaultRegion; string defaultRegionStr; string sessionTokenStr; - char const *iot_get_credential_endpoint; - char const *cert_path; - char const *private_key_path; - char const *role_alias; - char const *ca_cert_path; + char const* iot_get_credential_endpoint; + char const* cert_path; + char const* private_key_path; + char const* role_alias; + char const* ca_cert_path; unique_ptr credential_provider; @@ -596,9 +594,7 @@ void kinesis_video_init(CustomData *data) { } LOG_INFO("Using region: " << defaultRegionStr); - if (nullptr != (accessKey = getenv(ACCESS_KEY_ENV_VAR)) && - nullptr != (secretKey = getenv(SECRET_KEY_ENV_VAR))) { - + if (nullptr != (accessKey = getenv(ACCESS_KEY_ENV_VAR)) && nullptr != (secretKey = getenv(SECRET_KEY_ENV_VAR))) { LOG_INFO("Using aws credentials for Kinesis Video Streams"); if (nullptr != (sessionToken = getenv(SESSION_TOKEN_ENV_VAR))) { LOG_INFO("Session token detected."); @@ -608,41 +604,30 @@ void kinesis_video_init(CustomData *data) { sessionTokenStr = ""; } - data->credential.reset(new Credentials(string(accessKey), - string(secretKey), - sessionTokenStr, - std::chrono::seconds(DEFAULT_CREDENTIAL_EXPIRATION_SECONDS))); + data->credential.reset( + new Credentials(string(accessKey), string(secretKey), sessionTokenStr, std::chrono::seconds(DEFAULT_CREDENTIAL_EXPIRATION_SECONDS))); credential_provider.reset(new SampleCredentialProvider(*data->credential.get())); - } else if (nullptr != (iot_get_credential_endpoint = getenv("IOT_GET_CREDENTIAL_ENDPOINT")) && - nullptr != (cert_path = getenv("CERT_PATH")) && - nullptr != (private_key_path = getenv("PRIVATE_KEY_PATH")) && - nullptr != (role_alias = getenv("ROLE_ALIAS")) && + } else if (nullptr != (iot_get_credential_endpoint = getenv("IOT_GET_CREDENTIAL_ENDPOINT")) && nullptr != (cert_path = getenv("CERT_PATH")) && + nullptr != (private_key_path = getenv("PRIVATE_KEY_PATH")) && nullptr != (role_alias = getenv("ROLE_ALIAS")) && nullptr != (ca_cert_path = getenv("CA_CERT_PATH"))) { LOG_INFO("Using IoT credentials for Kinesis Video Streams"); - credential_provider.reset(new IotCertCredentialProvider(iot_get_credential_endpoint, - cert_path, - private_key_path, - role_alias, - ca_cert_path, - data->stream_name)); + credential_provider.reset( + new IotCertCredentialProvider(iot_get_credential_endpoint, cert_path, private_key_path, role_alias, ca_cert_path, data->stream_name)); } else { LOG_AND_THROW("No valid credential method was found"); } - data->kinesis_video_producer = KinesisVideoProducer::createSync(std::move(device_info_provider), - std::move(client_callback_provider), - std::move(stream_callback_provider), - std::move(credential_provider), - API_CALL_CACHE_TYPE_ALL, - defaultRegionStr); + data->kinesis_video_producer = + KinesisVideoProducer::createSync(std::move(device_info_provider), std::move(client_callback_provider), std::move(stream_callback_provider), + std::move(credential_provider), API_CALL_CACHE_TYPE_ALL, defaultRegionStr); LOG_DEBUG("Client is ready"); } -void kinesis_video_stream_init(CustomData *data) { - +void kinesis_video_stream_init(CustomData* data) +{ STREAMING_TYPE streaming_type = DEFAULT_STREAMING_TYPE; data->use_absolute_fragment_times = DEFAULT_ABSOLUTE_FRAGMENT_TIMES; @@ -652,35 +637,13 @@ void kinesis_video_stream_init(CustomData *data) { } unique_ptr stream_definition(new StreamDefinition( - data->stream_name, - hours(DEFAULT_RETENTION_PERIOD_HOURS), - nullptr, - DEFAULT_KMS_KEY_ID, - streaming_type, - DEFAULT_CONTENT_TYPE, - duration_cast (seconds(DEFAULT_MAX_LATENCY_SECONDS)), - milliseconds(DEFAULT_FRAGMENT_DURATION_MILLISECONDS), - milliseconds(DEFAULT_TIMECODE_SCALE_MILLISECONDS), - DEFAULT_KEY_FRAME_FRAGMENTATION, - DEFAULT_FRAME_TIMECODES, - data->use_absolute_fragment_times, - DEFAULT_FRAGMENT_ACKS, - DEFAULT_RESTART_ON_ERROR, - DEFAULT_RECALCULATE_METRICS, - true, - NAL_ADAPTATION_FLAG_NONE, - DEFAULT_STREAM_FRAMERATE, - DEFAULT_AVG_BANDWIDTH_BPS, - seconds(DEFAULT_BUFFER_DURATION_SECONDS), - seconds(DEFAULT_REPLAY_DURATION_SECONDS), - seconds(DEFAULT_CONNECTION_STALENESS_SECONDS), - DEFAULT_CODEC_ID, - DEFAULT_TRACKNAME, - nullptr, - 0, - MKV_TRACK_INFO_TYPE_VIDEO, - vector(), - DEFAULT_VIDEO_TRACKID)); + data->stream_name, hours(DEFAULT_RETENTION_PERIOD_HOURS), nullptr, DEFAULT_KMS_KEY_ID, streaming_type, DEFAULT_CONTENT_TYPE, + duration_cast(seconds(DEFAULT_MAX_LATENCY_SECONDS)), milliseconds(DEFAULT_FRAGMENT_DURATION_MILLISECONDS), + milliseconds(DEFAULT_TIMECODE_SCALE_MILLISECONDS), DEFAULT_KEY_FRAME_FRAGMENTATION, DEFAULT_FRAME_TIMECODES, + data->use_absolute_fragment_times, DEFAULT_FRAGMENT_ACKS, DEFAULT_RESTART_ON_ERROR, DEFAULT_RECALCULATE_METRICS, true, + NAL_ADAPTATION_FLAG_NONE, DEFAULT_STREAM_FRAMERATE, DEFAULT_AVG_BANDWIDTH_BPS, seconds(DEFAULT_BUFFER_DURATION_SECONDS), + seconds(DEFAULT_REPLAY_DURATION_SECONDS), seconds(DEFAULT_CONNECTION_STALENESS_SECONDS), DEFAULT_CODEC_ID, DEFAULT_TRACKNAME, nullptr, 0, + MKV_TRACK_INFO_TYPE_VIDEO, vector(), DEFAULT_VIDEO_TRACKID)); stream_definition->addTrack(DEFAULT_AUDIO_TRACKID, DEFAULT_AUDIO_TRACK_NAME, DEFAULT_AUDIO_CODEC_ID, MKV_TRACK_INFO_TYPE_AUDIO); data->kinesis_video_stream = data->kinesis_video_producer->createStreamSync(std::move(stream_definition)); @@ -698,14 +661,14 @@ void kinesis_video_stream_init(CustomData *data) { LOG_DEBUG("Stream is ready: " << data->stream_name); } -int gstreamer_init(int argc, char *argv[], CustomData &data) { - +int gstreamer_init(int argc, char* argv[], CustomData& data) +{ GstStateChangeReturn ret; // Reset first frame pts data.first_pts = GST_CLOCK_TIME_NONE; - //reset state + // reset state data.eos_triggered = false; /* init GStreamer */ @@ -713,32 +676,32 @@ int gstreamer_init(int argc, char *argv[], CustomData &data) { GstElement *appsink_video, *appsink_audio, *audio_queue, *video_queue, *pipeline, *video_filter, *audio_filter; string video_caps_string, audio_caps_string; - GstCaps *caps; + GstCaps* caps; video_caps_string = "video/x-h264, stream-format=(string) avc, alignment=(string) au"; audio_caps_string = "audio/mpeg, stream-format=(string) raw"; video_filter = gst_element_factory_make("capsfilter", "video_filter"); caps = gst_caps_from_string(video_caps_string.c_str()); - g_object_set(G_OBJECT (video_filter), "caps", caps, NULL); + g_object_set(G_OBJECT(video_filter), "caps", caps, NULL); gst_caps_unref(caps); audio_filter = gst_element_factory_make("capsfilter", "audio_filter"); caps = gst_caps_from_string(audio_caps_string.c_str()); - g_object_set(G_OBJECT (audio_filter), "caps", caps, NULL); + g_object_set(G_OBJECT(audio_filter), "caps", caps, NULL); gst_caps_unref(caps); // hardcoding appsink name and track id const string video_appsink_name = "appsink_" + to_string(DEFAULT_VIDEO_TRACKID); const string audio_appsink_name = "appsink_" + to_string(DEFAULT_AUDIO_TRACKID); - appsink_video = gst_element_factory_make("appsink", (gchar *) video_appsink_name.c_str()); - appsink_audio = gst_element_factory_make("appsink", (gchar *) audio_appsink_name.c_str()); + appsink_video = gst_element_factory_make("appsink", (gchar*) video_appsink_name.c_str()); + appsink_audio = gst_element_factory_make("appsink", (gchar*) audio_appsink_name.c_str()); /* configure appsink */ - g_object_set(G_OBJECT (appsink_video), "emit-signals", TRUE, "sync", FALSE, NULL); + g_object_set(G_OBJECT(appsink_video), "emit-signals", TRUE, "sync", FALSE, NULL); g_signal_connect(appsink_video, "new-sample", G_CALLBACK(on_new_sample), &data); g_signal_connect(appsink_video, "eos", G_CALLBACK(eos_cb), &data); - g_object_set(G_OBJECT (appsink_audio), "emit-signals", TRUE, "sync", FALSE, NULL); + g_object_set(G_OBJECT(appsink_audio), "emit-signals", TRUE, "sync", FALSE, NULL); g_signal_connect(appsink_audio, "new-sample", G_CALLBACK(on_new_sample), &data); g_signal_connect(appsink_audio, "eos", G_CALLBACK(eos_cb), &data); LOG_DEBUG("appsink configured"); @@ -774,7 +737,7 @@ int gstreamer_init(int argc, char *argv[], CustomData &data) { } filesrc = gst_element_factory_make("filesrc", "filesrc"); - g_object_set(G_OBJECT (filesrc), "location", file_path.c_str(), NULL); + g_object_set(G_OBJECT(filesrc), "location", file_path.c_str(), NULL); demux = gst_element_factory_make(demuxer.c_str(), "demux"); h264parse = gst_element_factory_make("h264parse", "h264parse"); aac_parse = gst_element_factory_make("aacparse", "aac_parse"); @@ -784,26 +747,22 @@ int gstreamer_init(int argc, char *argv[], CustomData &data) { return 1; } - gst_bin_add_many(GST_BIN (pipeline), appsink_video, appsink_audio, filesrc, demux, h264parse, aac_parse, video_queue, - audio_queue, video_filter, audio_filter, - NULL); + gst_bin_add_many(GST_BIN(pipeline), appsink_video, appsink_audio, filesrc, demux, h264parse, aac_parse, video_queue, audio_queue, + video_filter, audio_filter, NULL); - if (!gst_element_link_many(filesrc, demux, - NULL)) { + if (!gst_element_link_many(filesrc, demux, NULL)) { g_printerr("Elements could not be linked.\n"); gst_object_unref(pipeline); return 1; } - if (!gst_element_link_many(video_queue, h264parse, video_filter, appsink_video, - NULL)) { + if (!gst_element_link_many(video_queue, h264parse, video_filter, appsink_video, NULL)) { g_printerr("Video elements could not be linked.\n"); gst_object_unref(pipeline); return 1; } - if (!gst_element_link_many(audio_queue, aac_parse, audio_filter, appsink_audio, - NULL)) { + if (!gst_element_link_many(audio_queue, aac_parse, audio_filter, appsink_audio, NULL)) { g_printerr("Audio elements could not be linked.\n"); gst_object_unref(pipeline); return 1; @@ -812,7 +771,6 @@ int gstreamer_init(int argc, char *argv[], CustomData &data) { g_signal_connect(demux, "pad-added", G_CALLBACK(demux_pad_cb), &data); } else { - GstElement *videosrc, *videoconvert, *h264enc, *video_src_filter, *h264parse; GstElement *audiosrc, *audioconvert, *aac_enc, *audio_resample, *audio_src_filter, *aac_parse; string audio_device, video_device; @@ -831,18 +789,18 @@ int gstreamer_init(int argc, char *argv[], CustomData &data) { if (!video_device.empty()) { LOG_INFO("Using video device " << video_device); // find your video device by running gst-device-monitor-1.0 - g_object_set(G_OBJECT (videosrc), "device-path", video_device.c_str(), NULL); + g_object_set(G_OBJECT(videosrc), "device-path", video_device.c_str(), NULL); } else { LOG_INFO("Using default video device"); } h264enc = gst_element_factory_make("x264enc", "h264enc"); - g_object_set(G_OBJECT (h264enc), "bframes", 0, "key-int-max", 45, "bitrate", 512, NULL); - gst_util_set_object_arg(G_OBJECT (h264enc), "tune", "zerolatency"); + g_object_set(G_OBJECT(h264enc), "bframes", 0, "key-int-max", 45, "bitrate", 512, NULL); + gst_util_set_object_arg(G_OBJECT(h264enc), "tune", "zerolatency"); audiosrc = gst_element_factory_make("wasapisrc", "audiosrc"); if (!audio_device.empty()) { LOG_INFO("Using audio device " << audio_device); // find your audio device by running gst-device-monitor-1.0 - g_object_set(G_OBJECT (audiosrc), "device", audio_device.c_str(), "low-latency", TRUE, "use-audioclient3", TRUE, NULL); + g_object_set(G_OBJECT(audiosrc), "device", audio_device.c_str(), "low-latency", TRUE, "use-audioclient3", TRUE, NULL); } else { LOG_ERROR("No audio device found. Please do export " << AUDIO_DEVICE_ENV_VAR << "=audio_device to config audio device"); return 1; @@ -863,11 +821,11 @@ int gstreamer_init(int argc, char *argv[], CustomData &data) { LOG_INFO("Using audio device " << audio_device_index); videosrc = gst_element_factory_make("avfvideosrc", "videosrc"); - g_object_set(G_OBJECT (videosrc), "device-index", video_device_index, NULL); + g_object_set(G_OBJECT(videosrc), "device-index", video_device_index, NULL); h264enc = gst_element_factory_make("vtenc_h264_hw", "h264enc"); - g_object_set(G_OBJECT (h264enc), "allow-frame-reordering", FALSE, "realtime", TRUE, "max-keyframe-interval", 45, "bitrate", 512, NULL); + g_object_set(G_OBJECT(h264enc), "allow-frame-reordering", FALSE, "realtime", TRUE, "max-keyframe-interval", 45, "bitrate", 512, NULL); audiosrc = gst_element_factory_make("osxaudiosrc", "audiosrc"); - g_object_set(G_OBJECT (audiosrc), "device", audio_device_index, NULL); + g_object_set(G_OBJECT(audiosrc), "device", audio_device_index, NULL); // mac quirk video_caps_string += ", width=(int) 1280, height=(int) 720"; @@ -877,27 +835,28 @@ int gstreamer_init(int argc, char *argv[], CustomData &data) { if (!video_device.empty()) { LOG_INFO("Using video device " << video_device); // find your video device by running path/to/sdk/kinesis-video-native-build/downloads/local/bin/gst-device-monitor-1.0 - g_object_set(G_OBJECT (videosrc), "device", video_device.c_str(), NULL); + g_object_set(G_OBJECT(videosrc), "device", video_device.c_str(), NULL); } else { LOG_INFO("Using default video device"); } if (nullptr != (h264enc = gst_element_factory_make("omxh264enc", "h264enc"))) { // setting target bitrate in omx is currently broken: https://gitlab.freedesktop.org/gstreamer/gst-omx/issues/21 - g_object_set(G_OBJECT (h264enc), "periodicty-idr", 45, "inline-header", FALSE, NULL); + g_object_set(G_OBJECT(h264enc), "periodicty-idr", 45, "inline-header", FALSE, NULL); } else { h264enc = gst_element_factory_make("x264enc", "h264enc"); - g_object_set(G_OBJECT (h264enc), "bframes", 0, "key-int-max", 45, "bitrate", 512, NULL); - gst_util_set_object_arg(G_OBJECT (h264enc), "tune", "zerolatency"); + g_object_set(G_OBJECT(h264enc), "bframes", 0, "key-int-max", 45, "bitrate", 512, NULL); + gst_util_set_object_arg(G_OBJECT(h264enc), "tune", "zerolatency"); } audiosrc = gst_element_factory_make("alsasrc", "audiosrc"); if (!audio_device.empty()) { LOG_INFO("Using audio device " << audio_device); // find your audio recording device by running "arecord -l" - g_object_set(G_OBJECT (audiosrc), "device", audio_device.c_str(), NULL); + g_object_set(G_OBJECT(audiosrc), "device", audio_device.c_str(), NULL); } else { - LOG_ERROR("No audio device found. Please do export " << AUDIO_DEVICE_ENV_VAR << "=audio_device to config audio device (e.g. export AWS_KVS_AUDIO_DEVICE=hw:1,0)"); + LOG_ERROR("No audio device found. Please do export " << AUDIO_DEVICE_ENV_VAR + << "=audio_device to config audio device (e.g. export AWS_KVS_AUDIO_DEVICE=hw:1,0)"); return 1; } @@ -908,12 +867,12 @@ int gstreamer_init(int argc, char *argv[], CustomData &data) { video_src_filter = gst_element_factory_make("capsfilter", "video_src_filter"); caps = gst_caps_from_string(video_caps_string.c_str()); - g_object_set(G_OBJECT (video_src_filter), "caps", caps, NULL); + g_object_set(G_OBJECT(video_src_filter), "caps", caps, NULL); gst_caps_unref(caps); audio_src_filter = gst_element_factory_make("capsfilter", "audio_src_filter"); caps = gst_caps_from_string(audio_caps_string.c_str()); - g_object_set(G_OBJECT (audio_src_filter), "caps", caps, NULL); + g_object_set(G_OBJECT(audio_src_filter), "caps", caps, NULL); gst_caps_unref(caps); videoconvert = gst_element_factory_make("videoconvert", "videoconvert"); @@ -921,28 +880,24 @@ int gstreamer_init(int argc, char *argv[], CustomData &data) { aac_enc = gst_element_factory_make("avenc_aac", "aac_enc"); audio_resample = gst_element_factory_make("audioresample", "audioresample"); - if (!videosrc || !h264enc || !audiosrc || !audioconvert || !aac_enc || !videoconvert || !audio_resample || - !audio_src_filter || !video_src_filter || !h264parse || !aac_parse) { + if (!videosrc || !h264enc || !audiosrc || !audioconvert || !aac_enc || !videoconvert || !audio_resample || !audio_src_filter || + !video_src_filter || !h264parse || !aac_parse) { g_printerr("Not all elements could be created:\n"); return 1; } - gst_bin_add_many(GST_BIN (pipeline), appsink_video, - appsink_audio, videosrc, h264enc, audiosrc, audioconvert, aac_enc, videoconvert, video_queue, - audio_queue, video_filter, audio_filter, audio_resample, audio_src_filter, video_src_filter, - h264parse, aac_parse, NULL); + gst_bin_add_many(GST_BIN(pipeline), appsink_video, appsink_audio, videosrc, h264enc, audiosrc, audioconvert, aac_enc, videoconvert, + video_queue, audio_queue, video_filter, audio_filter, audio_resample, audio_src_filter, video_src_filter, h264parse, + aac_parse, NULL); - if (!gst_element_link_many(videosrc, videoconvert, video_src_filter, h264enc, h264parse, video_filter, video_queue, - appsink_video, - NULL)) { + if (!gst_element_link_many(videosrc, videoconvert, video_src_filter, h264enc, h264parse, video_filter, video_queue, appsink_video, NULL)) { g_printerr("Video elements could not be linked.\n"); gst_object_unref(pipeline); return 1; } if (!gst_element_link_many(audiosrc, audio_resample, audio_queue, audioconvert, audio_src_filter, aac_enc, aac_parse, audio_filter, - appsink_audio, - NULL)) { + appsink_audio, NULL)) { g_printerr("Audio elements could not be linked.\n"); gst_object_unref(pipeline); return 1; @@ -952,9 +907,9 @@ int gstreamer_init(int argc, char *argv[], CustomData &data) { data.pipeline = pipeline; /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */ - GstBus *bus = gst_element_get_bus(pipeline); + GstBus* bus = gst_element_get_bus(pipeline); gst_bus_add_signal_watch(bus); - g_signal_connect (G_OBJECT(bus), "message::error", (GCallback) error_cb, &data); + g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_cb, &data); gst_object_unref(bus); ret = gst_element_set_state(pipeline, GST_STATE_PLAYING); @@ -976,20 +931,21 @@ int gstreamer_init(int argc, char *argv[], CustomData &data) { */ gst_bus_remove_signal_watch(bus); gst_element_set_state(pipeline, GST_STATE_NULL); - gst_object_unref(GST_OBJECT (pipeline)); + gst_object_unref(GST_OBJECT(pipeline)); g_main_loop_unref(data.main_loop); data.main_loop = NULL; return 0; } -int main(int argc, char *argv[]) { +int main(int argc, char* argv[]) +{ PropertyConfigurator::doConfigure("../kvs_log_configuration"); if (argc < 2) { - LOG_ERROR( - "Usage: AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kinesis_video_gstreamer_audio_video_sample_app my-stream-name /path/to/file" - "AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kinesis_video_gstreamer_audio_video_sample_app my-stream-name"); + LOG_ERROR("Usage: AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kinesis_video_gstreamer_audio_video_sample_app " + "my-stream-name /path/to/file" + "AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kinesis_video_gstreamer_audio_video_sample_app my-stream-name"); return 1; } @@ -1001,7 +957,6 @@ int main(int argc, char *argv[]) { int file_retry_count = PUTFRAME_FAILURE_RETRY_COUNT; STATUS stream_status = STATUS_SUCCESS; - /* init Kinesis Video */ STRNCPY(stream_name, argv[1], MAX_STREAM_NAME_LEN); stream_name[MAX_STREAM_NAME_LEN - 1] = '\0'; @@ -1011,7 +966,7 @@ int main(int argc, char *argv[]) { if (argc >= 3) { // skip over stream name - for(int i = 2; i < argc; ++i) { + for (int i = 2; i < argc; ++i) { file_path = string(argv[i]); // file path should be at least 4 char (shortest example: a.ts) if (file_path.size() < 4) { @@ -1029,7 +984,7 @@ int main(int argc, char *argv[]) { try { kinesis_video_init(&data); kinesis_video_stream_init(&data); - } catch (runtime_error &err) { + } catch (runtime_error& err) { LOG_ERROR("Failed to initialize kinesis video."); return 0; } @@ -1040,8 +995,7 @@ int main(int argc, char *argv[]) { uint32_t i = data.last_unpersisted_file_idx.load(); bool continue_uploading = true; - for(; i < data.file_list.size() && continue_uploading; ++i) { - + for (; i < data.file_list.size() && continue_uploading; ++i) { data.current_file_idx = i; LOG_DEBUG("Attempt to upload file: " << data.file_list[i].path); @@ -1082,7 +1036,7 @@ int main(int argc, char *argv[]) { if (file_retry_count == 0) { i = data.last_unpersisted_file_idx.load(); LOG_ERROR("Failed to upload file " << data.file_list[i].path << " after retrying. Terminating."); - do_retry = false; // exit while loop + do_retry = false; // exit while loop } else { // flush out buffers data.kinesis_video_stream->resetStream(); @@ -1091,7 +1045,7 @@ int main(int argc, char *argv[]) { data.stream_started.clear(); } } - } while(do_retry); + } while (do_retry); } else { // non file uploading scenario diff --git a/samples/kvs_gstreamer_file_uploader_sample.cpp b/samples/kvs_gstreamer_file_uploader_sample.cpp index c9aea07a..d55dcbe7 100644 --- a/samples/kvs_gstreamer_file_uploader_sample.cpp +++ b/samples/kvs_gstreamer_file_uploader_sample.cpp @@ -10,41 +10,47 @@ using namespace std; extern "C" { #endif -int gstreamer_init(int, char **); +int gstreamer_init(int, char**); #ifdef __cplusplus } #endif -#define DEFAULT_RETRY_COUNT 3 -#define DEFAULT_STREAMING_TYPE "offline" +#define DEFAULT_RETRY_COUNT 3 +#define DEFAULT_STREAMING_TYPE "offline" -#define PROPERTY_PREFIX "KVS_" -#define PROPERTY_KEY_MAX_LEN 32 -#define KVS_SINK_PLUGIN_NAME "kvssink" +#define PROPERTY_PREFIX "KVS_" +#define PROPERTY_KEY_MAX_LEN 32 +#define KVS_SINK_PLUGIN_NAME "kvssink" -#define STREAM_STATUS_OK 0 -#define STREAM_STATUS_FAILED 1 +#define STREAM_STATUS_OK 0 +#define STREAM_STATUS_FAILED 1 -#define CONTENT_TYPE_VIDEO_ONLY 0 -#define CONTENT_TYPE_AUDIO_VIDEO 1 +#define CONTENT_TYPE_VIDEO_ONLY 0 +#define CONTENT_TYPE_AUDIO_VIDEO 1 #define APP_NAME "kvs_gstreamer_file_uploader_sample" -#define LOG_INFO(fmt, ...) \ - do { fprintf(stdout, "[INFO] " APP_NAME ": " fmt "\n", ##__VA_ARGS__); } while(0) -#define LOG_ERROR(fmt, ...) \ - do { fprintf(stderr, "[ERROR] " APP_NAME ": " fmt "\n", ##__VA_ARGS__); } while(0) - -static const char* AVAILABLE_PROPERTIES[] = { - PROPERTY_PREFIX "IOT_CERTIFICATE", "Use aws iot certificate to obtain credentials", - PROPERTY_PREFIX "RETENTION_PERIOD", "Length of time stream is preserved. Unit: hours", - PROPERTY_PREFIX "STORAGE_SIZE", "Storage Size. Unit: MB", - NULL -}; +#define LOG_INFO(fmt, ...) \ + do { \ + fprintf(stdout, "[INFO] " APP_NAME ": " fmt "\n", ##__VA_ARGS__); \ + } while (0) +#define LOG_ERROR(fmt, ...) \ + do { \ + fprintf(stderr, "[ERROR] " APP_NAME ": " fmt "\n", ##__VA_ARGS__); \ + } while (0) + +static const char* AVAILABLE_PROPERTIES[] = {PROPERTY_PREFIX "IOT_CERTIFICATE", + "Use aws iot certificate to obtain credentials", + PROPERTY_PREFIX "RETENTION_PERIOD", + "Length of time stream is preserved. Unit: hours", + PROPERTY_PREFIX "STORAGE_SIZE", + "Storage Size. Unit: MB", + NULL}; typedef struct _CustomData { - _CustomData(): - stream_status(STREAM_STATUS_OK) {} + _CustomData() : stream_status(STREAM_STATUS_OK) + { + } int stream_status; int content_type; string file_path; @@ -56,19 +62,20 @@ typedef struct _CustomData { // KVS_STREAM_NAME=file-uploader-sample // KVS_MAX_LATENCY=60 // - // kvssink_str is going to be equal to the following: + // kvssink_str is going to be equal to the following: // kvssink stream-name=file-uploader-sample max-latency=60 string kvssink_str; } CustomData; /* This function is called when an error message is posted on the bus */ -static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data) { - GError *err; - gchar *debug_info; +static void error_cb(GstBus* bus, GstMessage* msg, CustomData* data) +{ + GError* err; + gchar* debug_info; /* Print error details on the screen */ gst_message_parse_error(msg, &err, &debug_info); - LOG_ERROR("Error received from element %s: %s", GST_OBJECT_NAME (msg->src), err->message); + LOG_ERROR("Error received from element %s: %s", GST_OBJECT_NAME(msg->src), err->message); LOG_ERROR("Debugging information: %sn", debug_info ? debug_info : "none"); g_clear_error(&err); g_free(debug_info); @@ -76,11 +83,12 @@ static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data) { data->stream_status = STREAM_STATUS_FAILED; } -int gstreamer_init(int argc, char* argv[], CustomData *data) { - GstElement *pipeline; - GstMessage *msg; +int gstreamer_init(int argc, char* argv[], CustomData* data) +{ + GstElement* pipeline; + GstMessage* msg; GstStateChangeReturn gst_ret; - GError *error = NULL; + GError* error = NULL; string file_path = data->file_path; const char* demuxer = NULL; char pipeline_buf[4096]; @@ -106,17 +114,14 @@ int gstreamer_init(int argc, char* argv[], CustomData *data) { } if (data->content_type == CONTENT_TYPE_VIDEO_ONLY) { // video only - ret = snprintf(pipeline_buf, sizeof(pipeline_buf), - "filesrc location=%s ! %s ! h264parse ! video/x-h264,stream-format=avc,alignment=au ! %s", - file_path.c_str(), demuxer, data->kvssink_str.c_str() - ); + ret = snprintf(pipeline_buf, sizeof(pipeline_buf), "filesrc location=%s ! %s ! h264parse ! video/x-h264,stream-format=avc,alignment=au ! %s", + file_path.c_str(), demuxer, data->kvssink_str.c_str()); } else { // audio-video ret = snprintf(pipeline_buf, sizeof(pipeline_buf), - "filesrc location=%s ! %s name=demuxer " - "demuxer. ! queue ! h264parse ! video/x-h264,stream-format=avc,alignment=au ! %s name=sink " - "demuxer. ! queue ! aacparse ! audio/mpeg,stream-format=raw ! sink.", - file_path.c_str(), demuxer, data->kvssink_str.c_str() - ); + "filesrc location=%s ! %s name=demuxer " + "demuxer. ! queue ! h264parse ! video/x-h264,stream-format=avc,alignment=au ! %s name=sink " + "demuxer. ! queue ! aacparse ! audio/mpeg,stream-format=raw ! sink.", + file_path.c_str(), demuxer, data->kvssink_str.c_str()); } if (ret < 0) { LOG_ERROR("Pipeline is too long"); @@ -131,9 +136,9 @@ int gstreamer_init(int argc, char* argv[], CustomData *data) { } /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */ - GstBus *bus = gst_element_get_bus(pipeline); + GstBus* bus = gst_element_get_bus(pipeline); gst_bus_add_signal_watch(bus); - g_signal_connect (G_OBJECT(bus), "message::error", (GCallback) error_cb, data); + g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_cb, data); gst_object_unref(bus); /* start streaming */ @@ -160,24 +165,23 @@ int gstreamer_init(int argc, char* argv[], CustomData *data) { return 0; } -string build_kvssink_str(string stream_name, unsigned long long file_start_time) { +string build_kvssink_str(string stream_name, unsigned long long file_start_time) +{ const char** property; stringstream ss; const char *key_raw, *value; int prefix_len = strlen(PROPERTY_PREFIX); char key[PROPERTY_KEY_MAX_LEN + 1]; - char *ch; + char* ch; - ss << KVS_SINK_PLUGIN_NAME - << " stream-name=" << stream_name - << " streaming-type=" DEFAULT_STREAMING_TYPE + ss << KVS_SINK_PLUGIN_NAME << " stream-name=" << stream_name << " streaming-type=" DEFAULT_STREAMING_TYPE << " file-start-time=" << file_start_time; for (property = AVAILABLE_PROPERTIES; *property != NULL; property += 2) { key_raw = property[0]; value = getenv(key_raw); if (value != NULL) { LOG_INFO("Found a property. Key: %s Value: %s", key_raw, value); - + // Remove property prefix and convert it into proper gstreamer syntax strncpy(key, key_raw + prefix_len, PROPERTY_KEY_MAX_LEN); for (ch = key; *ch != '\0'; ch++) { @@ -194,31 +198,34 @@ string build_kvssink_str(string stream_name, unsigned long long file_start_time) return ss.str(); } -void print_usage(char* program_path) { +void print_usage(char* program_path) +{ char padding[PROPERTY_KEY_MAX_LEN + 1]; - const char **property; + const char** property; int spaces; memset(padding, ' ', PROPERTY_KEY_MAX_LEN + 1); - printf( - "USAGE\n" - " %1$s [video-only|audio-video]\n\n" - "EXAMPLE\n" - " With AWS Credentials\n" - " AWS_ACCESS_KEY_ID= AWS_SECRET_ACCESS_KEY= %1$s stream-sample video.mp4 0\n\n" - " With AWS IOT Credentials\n" - " IOT_CERTIFICATE=\"iot-certificate,endpoint=endpoint,cert-path=/path/to/certificate,key-path=/path/to/private/key,ca-path=/path/to/ca-cert,role-aliases=role-aliases\" \\\n" - " %1$s stream-sample video.mp4 0\n\n" - "ARGUMENTS\n" - " stream_name Name of the destination stream\n" - " file_path A path to a file that you want to upload\n" - " start_time Epoch time that the file starts in kinesis video stream. If zero, current time is used. Unit: Seconds\n" - " content_type Optional. Either \"video-only\" or \"audio-video\"\n\n" - "AVAILABLE PROPERTIES\n" - " AWS_DEFAULT_REGION AWS Region\n" - " AWS_ACCESS_KEY_ID AWS Access Key\n" - " AWS_SECRET_ACCESS_KEY AWS Secret Key\n" - " AWS_SESSION_TOKEN AWS Session Token\n", program_path); + printf("USAGE\n" + " %1$s [video-only|audio-video]\n\n" + "EXAMPLE\n" + " With AWS Credentials\n" + " AWS_ACCESS_KEY_ID= AWS_SECRET_ACCESS_KEY= %1$s stream-sample video.mp4 0\n\n" + " With AWS IOT Credentials\n" + " " + "IOT_CERTIFICATE=\"iot-certificate,endpoint=endpoint,cert-path=/path/to/certificate,key-path=/path/to/private/key,ca-path=/path/to/" + "ca-cert,role-aliases=role-aliases\" \\\n" + " %1$s stream-sample video.mp4 0\n\n" + "ARGUMENTS\n" + " stream_name Name of the destination stream\n" + " file_path A path to a file that you want to upload\n" + " start_time Epoch time that the file starts in kinesis video stream. If zero, current time is used. Unit: Seconds\n" + " content_type Optional. Either \"video-only\" or \"audio-video\"\n\n" + "AVAILABLE PROPERTIES\n" + " AWS_DEFAULT_REGION AWS Region\n" + " AWS_ACCESS_KEY_ID AWS Access Key\n" + " AWS_SECRET_ACCESS_KEY AWS Secret Key\n" + " AWS_SESSION_TOKEN AWS Session Token\n", + program_path); for (property = AVAILABLE_PROPERTIES; *property != NULL; property += 2) { spaces = PROPERTY_KEY_MAX_LEN - strlen(property[0]); @@ -231,7 +238,8 @@ void print_usage(char* program_path) { } } -int main(int argc, char* argv[]) { +int main(int argc, char* argv[]) +{ if (argc < 4 || argc > 5) { print_usage(argv[0]); return 1; @@ -242,8 +250,8 @@ int main(int argc, char* argv[]) { int retry_count = DEFAULT_RETRY_COUNT; int stream_status = STREAM_STATUS_OK; bool do_retry = true; - char *ptr; - FILE *file; + char* ptr; + FILE* file; if (strcmp(argv[1], "--help") == 0 || strcmp(argv[1], "-h") == 0) { print_usage(argv[0]); @@ -287,9 +295,8 @@ int main(int argc, char* argv[]) { // control will return after gstreamer_init after file eos or any GST_ERROR was put on the bus. ret = gstreamer_init(argc, argv, &data); if (ret != 0) { - LOG_ERROR( - "Failed to initialize gstreamer pipeline. Have you set GST_PLUGIN_PATH properly?\n\n" - " For example: export GST_PLUGIN_PATH=/build:$GST_PLUGIN_PATH"); + LOG_ERROR("Failed to initialize gstreamer pipeline. Have you set GST_PLUGIN_PATH properly?\n\n" + " For example: export GST_PLUGIN_PATH=/build:$GST_PLUGIN_PATH"); do_retry = false; } else if (data.stream_status == STREAM_STATUS_OK) { LOG_INFO("Persisted successfully. File: %s", data.file_path.c_str()); @@ -300,7 +307,7 @@ int main(int argc, char* argv[]) { } else { LOG_INFO("Failed to persist %s, retrying...", data.file_path.c_str()); } - } while(do_retry); + } while (do_retry); return 0; } diff --git a/samples/kvs_gstreamer_multistream_sample.cpp b/samples/kvs_gstreamer_multistream_sample.cpp index fe2b9e22..f17fce73 100644 --- a/samples/kvs_gstreamer_multistream_sample.cpp +++ b/samples/kvs_gstreamer_multistream_sample.cpp @@ -17,7 +17,7 @@ using namespace log4cplus; extern "C" { #endif -int gstreamer_init(int, char **); +int gstreamer_init(int, char**); #ifdef __cplusplus } @@ -28,152 +28,157 @@ LOGGER_TAG("com.amazonaws.kinesis.video.gstreamer"); /* * https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers */ -#define MAX_URL_LENGTH 65536 -#define DEFAULT_RETENTION_PERIOD_HOURS 2 -#define DEFAULT_KMS_KEY_ID "" -#define DEFAULT_STREAMING_TYPE STREAMING_TYPE_REALTIME -#define DEFAULT_CONTENT_TYPE "video/h264" -#define DEFAULT_MAX_LATENCY_SECONDS 60 +#define MAX_URL_LENGTH 65536 +#define DEFAULT_RETENTION_PERIOD_HOURS 2 +#define DEFAULT_KMS_KEY_ID "" +#define DEFAULT_STREAMING_TYPE STREAMING_TYPE_REALTIME +#define DEFAULT_CONTENT_TYPE "video/h264" +#define DEFAULT_MAX_LATENCY_SECONDS 60 #define DEFAULT_FRAGMENT_DURATION_MILLISECONDS 2000 -#define DEFAULT_TIMECODE_SCALE_MILLISECONDS 1 -#define DEFAULT_KEY_FRAME_FRAGMENTATION TRUE -#define DEFAULT_FRAME_TIMECODES TRUE -#define DEFAULT_ABSOLUTE_FRAGMENT_TIMES TRUE -#define DEFAULT_FRAGMENT_ACKS TRUE -#define DEFAULT_RESTART_ON_ERROR TRUE -#define DEFAULT_RECALCULATE_METRICS TRUE -#define DEFAULT_STREAM_FRAMERATE 25 -#define DEFAULT_AVG_BANDWIDTH_BPS (4 * 1024 * 1024) -#define DEFAULT_BUFFER_DURATION_SECONDS 120 -#define DEFAULT_REPLAY_DURATION_SECONDS 40 -#define DEFAULT_CONNECTION_STALENESS_SECONDS 60 -#define DEFAULT_CODEC_ID "V_MPEG4/ISO/AVC" -#define DEFAULT_TRACKNAME "kinesis_video" -#define APP_SINK_BASE_NAME "appsink" -#define DEFAULT_BUFFER_SIZE (1 * 1024 * 1024) -#define DEFAULT_STORAGE_SIZE (128 * 1024 * 1024) -#define DEFAULT_ROTATION_TIME_SECONDS 3600 - -namespace com { namespace amazonaws { namespace kinesis { namespace video { - - class SampleClientCallbackProvider : public ClientCallbackProvider { - public: - - UINT64 getCallbackCustomData() override { - return reinterpret_cast (this); - } +#define DEFAULT_TIMECODE_SCALE_MILLISECONDS 1 +#define DEFAULT_KEY_FRAME_FRAGMENTATION TRUE +#define DEFAULT_FRAME_TIMECODES TRUE +#define DEFAULT_ABSOLUTE_FRAGMENT_TIMES TRUE +#define DEFAULT_FRAGMENT_ACKS TRUE +#define DEFAULT_RESTART_ON_ERROR TRUE +#define DEFAULT_RECALCULATE_METRICS TRUE +#define DEFAULT_STREAM_FRAMERATE 25 +#define DEFAULT_AVG_BANDWIDTH_BPS (4 * 1024 * 1024) +#define DEFAULT_BUFFER_DURATION_SECONDS 120 +#define DEFAULT_REPLAY_DURATION_SECONDS 40 +#define DEFAULT_CONNECTION_STALENESS_SECONDS 60 +#define DEFAULT_CODEC_ID "V_MPEG4/ISO/AVC" +#define DEFAULT_TRACKNAME "kinesis_video" +#define APP_SINK_BASE_NAME "appsink" +#define DEFAULT_BUFFER_SIZE (1 * 1024 * 1024) +#define DEFAULT_STORAGE_SIZE (128 * 1024 * 1024) +#define DEFAULT_ROTATION_TIME_SECONDS 3600 + +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { + +class SampleClientCallbackProvider : public ClientCallbackProvider { + public: + UINT64 getCallbackCustomData() override + { + return reinterpret_cast(this); + } - StorageOverflowPressureFunc getStorageOverflowPressureCallback() override { - return storageOverflowPressure; - } + StorageOverflowPressureFunc getStorageOverflowPressureCallback() override + { + return storageOverflowPressure; + } - static STATUS storageOverflowPressure(UINT64 custom_handle, UINT64 remaining_bytes); - }; + static STATUS storageOverflowPressure(UINT64 custom_handle, UINT64 remaining_bytes); +}; - class SampleStreamCallbackProvider : public StreamCallbackProvider { - public: +class SampleStreamCallbackProvider : public StreamCallbackProvider { + public: + UINT64 getCallbackCustomData() override + { + return reinterpret_cast(this); + } - UINT64 getCallbackCustomData() override { - return reinterpret_cast (this); - } + StreamConnectionStaleFunc getStreamConnectionStaleCallback() override + { + return streamConnectionStaleHandler; + }; - StreamConnectionStaleFunc getStreamConnectionStaleCallback() override { - return streamConnectionStaleHandler; - }; + StreamErrorReportFunc getStreamErrorReportCallback() override + { + return streamErrorReportHandler; + }; - StreamErrorReportFunc getStreamErrorReportCallback() override { - return streamErrorReportHandler; - }; + DroppedFrameReportFunc getDroppedFrameReportCallback() override + { + return droppedFrameReportHandler; + }; - DroppedFrameReportFunc getDroppedFrameReportCallback() override { - return droppedFrameReportHandler; - }; + private: + static STATUS streamConnectionStaleHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 last_buffering_ack); - private: - static STATUS - streamConnectionStaleHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UINT64 last_buffering_ack); + static STATUS streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, UINT64 errored_timecode, + STATUS status_code); - static STATUS - streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, UINT64 errored_timecode, - STATUS status_code); + static STATUS droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 dropped_frame_timecode); +}; - static STATUS - droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UINT64 dropped_frame_timecode); - }; +class SampleCredentialProvider : public StaticCredentialProvider { + // Test rotation period is 40 second for the grace period. + const std::chrono::duration ROTATION_PERIOD = std::chrono::seconds(DEFAULT_ROTATION_TIME_SECONDS); - class SampleCredentialProvider : public StaticCredentialProvider { - // Test rotation period is 40 second for the grace period. - const std::chrono::duration ROTATION_PERIOD = std::chrono::seconds(DEFAULT_ROTATION_TIME_SECONDS); - public: - SampleCredentialProvider(const Credentials &credentials) : - StaticCredentialProvider(credentials) {} - - void updateCredentials(Credentials &credentials) override { - // Copy the stored creds forward - credentials = credentials_; - - // Update only the expiration - auto now_time = std::chrono::duration_cast( - systemCurrentTime().time_since_epoch()); - auto expiration_seconds = now_time + ROTATION_PERIOD; - credentials.setExpiration(std::chrono::seconds(expiration_seconds.count())); - LOG_INFO("New credentials expiration is " << credentials.getExpiration().count()); - } - }; + public: + SampleCredentialProvider(const Credentials& credentials) : StaticCredentialProvider(credentials) + { + } - class SampleDeviceInfoProvider : public DefaultDeviceInfoProvider { - public: - device_info_t getDeviceInfo() override { - auto device_info = DefaultDeviceInfoProvider::getDeviceInfo(); - // Set the storage size to 64MB - device_info.storageInfo.storageSize = DEFAULT_STORAGE_SIZE; - return device_info; - } - }; + void updateCredentials(Credentials& credentials) override + { + // Copy the stored creds forward + credentials = credentials_; - STATUS - SampleClientCallbackProvider::storageOverflowPressure(UINT64 custom_handle, UINT64 remaining_bytes) { - UNUSED_PARAM(custom_handle); - LOG_WARN("Reporting storage overflow. Bytes remaining " << remaining_bytes); - return STATUS_SUCCESS; + // Update only the expiration + auto now_time = std::chrono::duration_cast(systemCurrentTime().time_since_epoch()); + auto expiration_seconds = now_time + ROTATION_PERIOD; + credentials.setExpiration(std::chrono::seconds(expiration_seconds.count())); + LOG_INFO("New credentials expiration is " << credentials.getExpiration().count()); } - - STATUS SampleStreamCallbackProvider::streamConnectionStaleHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 last_buffering_ack) { - LOG_WARN("Reporting stream stale. Last ACK received " << last_buffering_ack); - return STATUS_SUCCESS; +}; + +class SampleDeviceInfoProvider : public DefaultDeviceInfoProvider { + public: + device_info_t getDeviceInfo() override + { + auto device_info = DefaultDeviceInfoProvider::getDeviceInfo(); + // Set the storage size to 64MB + device_info.storageInfo.storageSize = DEFAULT_STORAGE_SIZE; + return device_info; } +}; + +STATUS +SampleClientCallbackProvider::storageOverflowPressure(UINT64 custom_handle, UINT64 remaining_bytes) +{ + UNUSED_PARAM(custom_handle); + LOG_WARN("Reporting storage overflow. Bytes remaining " << remaining_bytes); + return STATUS_SUCCESS; +} - STATUS - SampleStreamCallbackProvider::streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UPLOAD_HANDLE upload_handle, UINT64 errored_timecode, STATUS status_code) { - LOG_ERROR("Reporting stream error. Errored timecode: " << errored_timecode << " Status: " - << status_code); - return STATUS_SUCCESS; - } +STATUS SampleStreamCallbackProvider::streamConnectionStaleHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 last_buffering_ack) +{ + LOG_WARN("Reporting stream stale. Last ACK received " << last_buffering_ack); + return STATUS_SUCCESS; +} - STATUS - SampleStreamCallbackProvider::droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UINT64 dropped_frame_timecode) { - LOG_WARN("Reporting dropped frame. Frame timecode " << dropped_frame_timecode); - return STATUS_SUCCESS; - } +STATUS +SampleStreamCallbackProvider::streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, + UINT64 errored_timecode, STATUS status_code) +{ + LOG_ERROR("Reporting stream error. Errored timecode: " << errored_timecode << " Status: " << status_code); + return STATUS_SUCCESS; +} + +STATUS +SampleStreamCallbackProvider::droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 dropped_frame_timecode) +{ + LOG_WARN("Reporting dropped frame. Frame timecode " << dropped_frame_timecode); + return STATUS_SUCCESS; +} -} // namespace video -} // namespace kinesis -} // namespace amazonaws -} // namespace com; +} // namespace video +} // namespace kinesis +} // namespace amazonaws +} // namespace com unique_ptr credentials_; typedef struct _CustomData { - GMainLoop *main_loop; + GMainLoop* main_loop; unique_ptr kinesis_video_producer; map> kinesis_video_stream_handles; - vector pipelines; + vector pipelines; map stream_started; map frame_data_map; map frame_data_size_map; @@ -182,8 +187,8 @@ typedef struct _CustomData { map producer_start_time_map; } CustomData; -void create_kinesis_video_frame(Frame *frame, const nanoseconds &pts, const nanoseconds &dts, FRAME_FLAGS flags, - void *data, size_t len) { +void create_kinesis_video_frame(Frame* frame, const nanoseconds& pts, const nanoseconds& dts, FRAME_FLAGS flags, void* data, size_t len) +{ frame->flags = flags; frame->decodingTs = static_cast(dts.count()) / DEFAULT_TIME_UNIT_IN_NANOS; frame->presentationTs = static_cast(pts.count()) / DEFAULT_TIME_UNIT_IN_NANOS; @@ -194,49 +199,50 @@ void create_kinesis_video_frame(Frame *frame, const nanoseconds &pts, const nano frame->trackId = DEFAULT_TRACK_ID; } -bool put_frame(shared_ptr kinesis_video_stream, void *data, size_t len, const nanoseconds &pts, - const nanoseconds &dts, FRAME_FLAGS flags) { +bool put_frame(shared_ptr kinesis_video_stream, void* data, size_t len, const nanoseconds& pts, const nanoseconds& dts, + FRAME_FLAGS flags) +{ Frame frame; create_kinesis_video_frame(&frame, pts, dts, flags, data, len); return kinesis_video_stream->putFrame(frame); } -static GstFlowReturn on_new_sample(GstElement *sink, CustomData *data) { - GstSample *sample = gst_app_sink_pull_sample(GST_APP_SINK (sink)); - gchar *g_stream_handle_key = gst_element_get_name(sink); +static GstFlowReturn on_new_sample(GstElement* sink, CustomData* data) +{ + GstSample* sample = gst_app_sink_pull_sample(GST_APP_SINK(sink)); + gchar* g_stream_handle_key = gst_element_get_name(sink); string stream_handle_key = string(g_stream_handle_key); - GstCaps *gstcaps = (GstCaps *) gst_sample_get_caps(sample); + GstCaps* gstcaps = (GstCaps*) gst_sample_get_caps(sample); GST_LOG("caps are %" GST_PTR_FORMAT, gstcaps); bool isHeader, isDroppable; - GstStructure *gststructforcaps = gst_caps_get_structure(gstcaps, 0); + GstStructure* gststructforcaps = gst_caps_get_structure(gstcaps, 0); if (!data->stream_started[stream_handle_key]) { data->stream_started[stream_handle_key] = true; - const GValue *gstStreamFormat = gst_structure_get_value(gststructforcaps, "codec_data"); - gchar *cpd = gst_value_serialize(gstStreamFormat); + const GValue* gstStreamFormat = gst_structure_get_value(gststructforcaps, "codec_data"); + gchar* cpd = gst_value_serialize(gstStreamFormat); auto kvs_stream = data->kinesis_video_stream_handles[stream_handle_key]; kvs_stream->start(std::string(cpd)); g_free(cpd); } - GstBuffer *buffer = gst_sample_get_buffer(sample); + GstBuffer* buffer = gst_sample_get_buffer(sample); size_t buffer_size = gst_buffer_get_size(buffer); isHeader = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_HEADER); - isDroppable = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_CORRUPTED) || - GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DECODE_ONLY) || - (GST_BUFFER_FLAGS(buffer) == GST_BUFFER_FLAG_DISCONT) || - (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT) && GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT)) || - // drop if buffer contains header only and has invalid timestamp - (isHeader && (!GST_BUFFER_PTS_IS_VALID(buffer) || !GST_BUFFER_DTS_IS_VALID(buffer))); + isDroppable = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_CORRUPTED) || GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DECODE_ONLY) || + (GST_BUFFER_FLAGS(buffer) == GST_BUFFER_FLAG_DISCONT) || + (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT) && GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT)) || + // drop if buffer contains header only and has invalid timestamp + (isHeader && (!GST_BUFFER_PTS_IS_VALID(buffer) || !GST_BUFFER_DTS_IS_VALID(buffer))); if (!isDroppable) { UINT32 frame_data_size = data->frame_data_size_map[stream_handle_key]; if (frame_data_size < buffer_size) { frame_data_size = frame_data_size * 2; - delete [] data->frame_data_map[stream_handle_key]; + delete[] data->frame_data_map[stream_handle_key]; data->frame_data_size_map[stream_handle_key] = frame_data_size; data->frame_data_map[stream_handle_key] = new uint8_t[frame_data_size]; } @@ -259,8 +265,9 @@ static GstFlowReturn on_new_sample(GstElement *sink, CustomData *data) { buffer->pts += data->producer_start_time_map[stream_handle_key] - data->first_pts_map[stream_handle_key]; - if (false == put_frame(data->kinesis_video_stream_handles[stream_handle_key], data->frame_data_map[stream_handle_key], buffer_size, std::chrono::nanoseconds(buffer->pts), - std::chrono::nanoseconds(buffer->dts), kinesis_video_flags)) { + if (false == + put_frame(data->kinesis_video_stream_handles[stream_handle_key], data->frame_data_map[stream_handle_key], buffer_size, + std::chrono::nanoseconds(buffer->pts), std::chrono::nanoseconds(buffer->dts), kinesis_video_flags)) { GST_WARNING("Dropped frame"); } } @@ -271,13 +278,14 @@ static GstFlowReturn on_new_sample(GstElement *sink, CustomData *data) { } /* This function is called when an error message is posted on the bus */ -static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data) { - GError *err; - gchar *debug_info; +static void error_cb(GstBus* bus, GstMessage* msg, CustomData* data) +{ + GError* err; + gchar* debug_info; /* Print error details on the screen */ gst_message_parse_error(msg, &err, &debug_info); - g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); + g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(msg->src), err->message); g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error(&err); g_free(debug_info); @@ -285,15 +293,16 @@ static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data) { g_main_loop_quit(data->main_loop); } -void kinesis_video_init(CustomData *data) { +void kinesis_video_init(CustomData* data) +{ unique_ptr device_info_provider(new SampleDeviceInfoProvider()); unique_ptr client_callback_provider(new SampleClientCallbackProvider()); unique_ptr stream_callback_provider(new SampleStreamCallbackProvider()); - char const *accessKey; - char const *secretKey; - char const *sessionToken; - char const *defaultRegion; + char const* accessKey; + char const* secretKey; + char const* sessionToken; + char const* defaultRegion; string defaultRegionStr; string sessionTokenStr; if (nullptr == (accessKey = getenv(ACCESS_KEY_ENV_VAR))) { @@ -316,51 +325,26 @@ void kinesis_video_init(CustomData *data) { defaultRegionStr = string(defaultRegion); } - credentials_.reset(new Credentials(string(accessKey), - string(secretKey), - sessionTokenStr, - std::chrono::seconds(180))); + credentials_.reset(new Credentials(string(accessKey), string(secretKey), sessionTokenStr, std::chrono::seconds(180))); unique_ptr credential_provider(new SampleCredentialProvider(*credentials_.get())); - data->kinesis_video_producer = KinesisVideoProducer::createSync(std::move(device_info_provider), - std::move(client_callback_provider), - std::move(stream_callback_provider), - std::move(credential_provider), - API_CALL_CACHE_TYPE_ALL, - defaultRegionStr); + data->kinesis_video_producer = + KinesisVideoProducer::createSync(std::move(device_info_provider), std::move(client_callback_provider), std::move(stream_callback_provider), + std::move(credential_provider), API_CALL_CACHE_TYPE_ALL, defaultRegionStr); LOG_DEBUG("Client is ready"); } -void kinesis_stream_init(string stream_name, CustomData *data, string stream_handle_key) { +void kinesis_stream_init(string stream_name, CustomData* data, string stream_handle_key) +{ /* create a test stream */ unique_ptr stream_definition(new StreamDefinition( - stream_name.c_str(), - hours(DEFAULT_RETENTION_PERIOD_HOURS), - nullptr, - DEFAULT_KMS_KEY_ID, - DEFAULT_STREAMING_TYPE, - DEFAULT_CONTENT_TYPE, - duration_cast (seconds(DEFAULT_MAX_LATENCY_SECONDS)), - milliseconds(DEFAULT_FRAGMENT_DURATION_MILLISECONDS), - milliseconds(DEFAULT_TIMECODE_SCALE_MILLISECONDS), - DEFAULT_KEY_FRAME_FRAGMENTATION, - DEFAULT_FRAME_TIMECODES, - DEFAULT_ABSOLUTE_FRAGMENT_TIMES, - DEFAULT_FRAGMENT_ACKS, - DEFAULT_RESTART_ON_ERROR, - DEFAULT_RECALCULATE_METRICS, - true, - NAL_ADAPTATION_FLAG_NONE, - DEFAULT_STREAM_FRAMERATE, - DEFAULT_AVG_BANDWIDTH_BPS, - seconds(DEFAULT_BUFFER_DURATION_SECONDS), - seconds(DEFAULT_REPLAY_DURATION_SECONDS), - seconds(DEFAULT_CONNECTION_STALENESS_SECONDS), - DEFAULT_CODEC_ID, - DEFAULT_TRACKNAME, - nullptr, - 0)); + stream_name.c_str(), hours(DEFAULT_RETENTION_PERIOD_HOURS), nullptr, DEFAULT_KMS_KEY_ID, DEFAULT_STREAMING_TYPE, DEFAULT_CONTENT_TYPE, + duration_cast(seconds(DEFAULT_MAX_LATENCY_SECONDS)), milliseconds(DEFAULT_FRAGMENT_DURATION_MILLISECONDS), + milliseconds(DEFAULT_TIMECODE_SCALE_MILLISECONDS), DEFAULT_KEY_FRAME_FRAGMENTATION, DEFAULT_FRAME_TIMECODES, DEFAULT_ABSOLUTE_FRAGMENT_TIMES, + DEFAULT_FRAGMENT_ACKS, DEFAULT_RESTART_ON_ERROR, DEFAULT_RECALCULATE_METRICS, true, NAL_ADAPTATION_FLAG_NONE, DEFAULT_STREAM_FRAMERATE, + DEFAULT_AVG_BANDWIDTH_BPS, seconds(DEFAULT_BUFFER_DURATION_SECONDS), seconds(DEFAULT_REPLAY_DURATION_SECONDS), + seconds(DEFAULT_CONNECTION_STALENESS_SECONDS), DEFAULT_CODEC_ID, DEFAULT_TRACKNAME, nullptr, 0)); auto kvs_stream = data->kinesis_video_producer->createStreamSync(std::move(stream_definition)); data->kinesis_video_stream_handles[stream_handle_key] = kvs_stream; data->frame_data_size_map[stream_handle_key] = DEFAULT_BUFFER_SIZE; @@ -369,9 +353,10 @@ void kinesis_stream_init(string stream_name, CustomData *data, string stream_han } /* callback when each RTSP stream has been created */ -static void cb_rtsp_pad_created(GstElement *element, GstPad *pad, gpointer data) { - gchar *pad_name = gst_pad_get_name(pad); - GstElement *other = reinterpret_cast(data); +static void cb_rtsp_pad_created(GstElement* element, GstPad* pad, gpointer data) +{ + gchar* pad_name = gst_pad_get_name(pad); + GstElement* other = reinterpret_cast(data); g_print("New RTSP source found: %s\n", pad_name); if (gst_element_link(element, other)) { g_print("Source linked.\n"); @@ -381,14 +366,16 @@ static void cb_rtsp_pad_created(GstElement *element, GstPad *pad, gpointer data) g_free(pad_name); } -int gstreamer_init(int argc, char *argv[]) { +int gstreamer_init(int argc, char* argv[]) +{ PropertyConfigurator::doConfigure("../kvs_log_configuration"); if (argc < 3) { - LOG_ERROR( - "Usage: AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kinesis_video_gstreamer_sample_multistream_app base-stream-name rtsp-url-file-name\n" << - "base-stream-name: the application will create one stream for each rtsp url read. The base-stream-names will be suffixed with indexes to differentiate the streams\n" << - "rtsp-url-file-name: name of the file containing rtsp urls separated by new lines"); + LOG_ERROR("Usage: AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kinesis_video_gstreamer_sample_multistream_app " + "base-stream-name rtsp-url-file-name\n" + << "base-stream-name: the application will create one stream for each rtsp url read. The base-stream-names will be suffixed with " + "indexes to differentiate the streams\n" + << "rtsp-url-file-name: name of the file containing rtsp urls separated by new lines"); return 1; } @@ -401,16 +388,17 @@ int gstreamer_init(int argc, char *argv[]) { data.kinesis_video_stream_handles = map>(); data.stream_started = map(); - data.pipelines = vector(); + data.pipelines = vector(); data.frame_data_map = map(); data.frame_data_size_map = map(); data.first_pts_map = map(); - data.producer_start_time_map = map();; + data.producer_start_time_map = map(); + ; /* init GStreamer */ gst_init(&argc, &argv); - ifstream rtsp_url_file (argv[2]); + ifstream rtsp_url_file(argv[2]); if (!rtsp_url_file.is_open()) { LOG_ERROR("Failed to open rtsp-urls file"); return 1; @@ -442,46 +430,39 @@ int gstreamer_init(int argc, char *argv[]) { depay = gst_element_factory_make("rtph264depay", "depay"); source = gst_element_factory_make("rtspsrc", "source"); filter = gst_element_factory_make("capsfilter", "encoder_filter"); - GstCaps *h264_caps = gst_caps_new_simple("video/x-h264", - "stream-format", G_TYPE_STRING, "avc", - "alignment", G_TYPE_STRING, "au", - NULL); - g_object_set(G_OBJECT (filter), "caps", h264_caps, NULL); + GstCaps* h264_caps = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "avc", "alignment", G_TYPE_STRING, "au", NULL); + g_object_set(G_OBJECT(filter), "caps", h264_caps, NULL); gst_caps_unref(h264_caps); pipeline = gst_pipeline_new("rtsp-kinesis-pipeline"); - if (!pipeline || !source || !depay || !appsink) { + if (!pipeline || !source || !depay || !appsink) { g_printerr("Not all elements could be created:\n"); - if (!pipeline) g_printerr("\tCore pipeline\n"); - if (!source) g_printerr("\trtspsrc (gst-plugins-good)\n"); - if (!depay) g_printerr("\trtph264depay (gst-plugins-good)\n"); - if (!appsink) g_printerr("\tappsink (gst-plugins-base)\n"); + if (!pipeline) + g_printerr("\tCore pipeline\n"); + if (!source) + g_printerr("\trtspsrc (gst-plugins-good)\n"); + if (!depay) + g_printerr("\trtph264depay (gst-plugins-good)\n"); + if (!appsink) + g_printerr("\tappsink (gst-plugins-base)\n"); return 1; } - g_object_set(G_OBJECT (source), - "location", rtsp_urls[i].c_str(), - "short-header", true, // Necessary for target camera + g_object_set(G_OBJECT(source), "location", rtsp_urls[i].c_str(), "short-header", true, // Necessary for target camera NULL); - /* configure appsink */ - g_object_set(G_OBJECT (appsink), "emit-signals", TRUE, "sync", FALSE, NULL); + g_object_set(G_OBJECT(appsink), "emit-signals", TRUE, "sync", FALSE, NULL); g_signal_connect(appsink, "new-sample", G_CALLBACK(on_new_sample), &data); LOG_DEBUG("appsink configured"); g_signal_connect(source, "pad-added", G_CALLBACK(cb_rtsp_pad_created), depay); /* build the pipeline */ - gst_bin_add_many(GST_BIN (pipeline), source, - depay, filter, appsink, - NULL); + gst_bin_add_many(GST_BIN(pipeline), source, depay, filter, appsink, NULL); /* Leave the actual source out - this will be done when the pad is added */ - if (!gst_element_link_many(depay, filter, - appsink, - NULL)) { - + if (!gst_element_link_many(depay, filter, appsink, NULL)) { g_printerr("Elements could not be linked.\n"); gst_object_unref(pipeline); return 1; @@ -490,7 +471,7 @@ int gstreamer_init(int argc, char *argv[]) { data.pipelines.push_back(pipeline); } - for (GstElement *pipeline : data.pipelines) { + for (GstElement* pipeline : data.pipelines) { ret = gst_element_set_state(pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr("Unable to set the pipeline to the playing state.\n"); @@ -504,18 +485,19 @@ int gstreamer_init(int argc, char *argv[]) { g_main_loop_run(data.main_loop); CleanUp: - for (GstElement *pipeline : data.pipelines) { + for (GstElement* pipeline : data.pipelines) { gst_element_set_state(pipeline, GST_STATE_NULL); gst_object_unref(pipeline); } for (auto frame_data : data.frame_data_map) { - delete [] frame_data.second; + delete[] frame_data.second; } return 0; } -int main(int argc, char *argv[]) { +int main(int argc, char* argv[]) +{ return gstreamer_init(argc, argv); } diff --git a/samples/kvs_gstreamer_sample.cpp b/samples/kvs_gstreamer_sample.cpp index 6510ab40..88961256 100644 --- a/samples/kvs_gstreamer_sample.cpp +++ b/samples/kvs_gstreamer_sample.cpp @@ -18,7 +18,7 @@ using namespace log4cplus; extern "C" { #endif -int gstreamer_init(int, char **); +int gstreamer_init(int, char**); #ifdef __cplusplus } @@ -26,67 +26,54 @@ int gstreamer_init(int, char **); LOGGER_TAG("com.amazonaws.kinesis.video.gstreamer"); -#define DEFAULT_RETENTION_PERIOD_HOURS 2 -#define DEFAULT_KMS_KEY_ID "" -#define DEFAULT_STREAMING_TYPE STREAMING_TYPE_REALTIME -#define DEFAULT_CONTENT_TYPE "video/h264" -#define DEFAULT_MAX_LATENCY_SECONDS 60 +#define DEFAULT_RETENTION_PERIOD_HOURS 2 +#define DEFAULT_KMS_KEY_ID "" +#define DEFAULT_STREAMING_TYPE STREAMING_TYPE_REALTIME +#define DEFAULT_CONTENT_TYPE "video/h264" +#define DEFAULT_MAX_LATENCY_SECONDS 60 #define DEFAULT_FRAGMENT_DURATION_MILLISECONDS 2000 -#define DEFAULT_TIMECODE_SCALE_MILLISECONDS 1 -#define DEFAULT_KEY_FRAME_FRAGMENTATION TRUE -#define DEFAULT_FRAME_TIMECODES TRUE -#define DEFAULT_ABSOLUTE_FRAGMENT_TIMES TRUE -#define DEFAULT_FRAGMENT_ACKS TRUE -#define DEFAULT_RESTART_ON_ERROR TRUE -#define DEFAULT_RECALCULATE_METRICS TRUE -#define DEFAULT_STREAM_FRAMERATE 25 -#define DEFAULT_AVG_BANDWIDTH_BPS (4 * 1024 * 1024) -#define DEFAULT_BUFFER_DURATION_SECONDS 120 -#define DEFAULT_REPLAY_DURATION_SECONDS 40 -#define DEFAULT_CONNECTION_STALENESS_SECONDS 60 -#define DEFAULT_CODEC_ID "V_MPEG4/ISO/AVC" -#define DEFAULT_TRACKNAME "kinesis_video" -#define DEFAULT_FRAME_DURATION_MS 1 -#define DEFAULT_CREDENTIAL_ROTATION_SECONDS 3600 -#define DEFAULT_CREDENTIAL_EXPIRATION_SECONDS 180 - -typedef enum _StreamSource { - FILE_SOURCE, - LIVE_SOURCE, - RTSP_SOURCE -} StreamSource; +#define DEFAULT_TIMECODE_SCALE_MILLISECONDS 1 +#define DEFAULT_KEY_FRAME_FRAGMENTATION TRUE +#define DEFAULT_FRAME_TIMECODES TRUE +#define DEFAULT_ABSOLUTE_FRAGMENT_TIMES TRUE +#define DEFAULT_FRAGMENT_ACKS TRUE +#define DEFAULT_RESTART_ON_ERROR TRUE +#define DEFAULT_RECALCULATE_METRICS TRUE +#define DEFAULT_STREAM_FRAMERATE 25 +#define DEFAULT_AVG_BANDWIDTH_BPS (4 * 1024 * 1024) +#define DEFAULT_BUFFER_DURATION_SECONDS 120 +#define DEFAULT_REPLAY_DURATION_SECONDS 40 +#define DEFAULT_CONNECTION_STALENESS_SECONDS 60 +#define DEFAULT_CODEC_ID "V_MPEG4/ISO/AVC" +#define DEFAULT_TRACKNAME "kinesis_video" +#define DEFAULT_FRAME_DURATION_MS 1 +#define DEFAULT_CREDENTIAL_ROTATION_SECONDS 3600 +#define DEFAULT_CREDENTIAL_EXPIRATION_SECONDS 180 + +typedef enum _StreamSource { FILE_SOURCE, LIVE_SOURCE, RTSP_SOURCE } StreamSource; typedef struct _FileInfo { - _FileInfo(): - path(""), - last_fragment_ts(0) {} + _FileInfo() : path(""), last_fragment_ts(0) + { + } string path; uint64_t last_fragment_ts; } FileInfo; typedef struct _CustomData { - - _CustomData(): - streamSource(LIVE_SOURCE), - h264_stream_supported(false), - synthetic_dts(0), - last_unpersisted_file_idx(0), - stream_status(STATUS_SUCCESS), - base_pts(0), - max_frame_pts(0), - key_frame_pts(0), - main_loop(NULL), - first_pts(GST_CLOCK_TIME_NONE), - use_absolute_fragment_times(true) { + _CustomData() + : streamSource(LIVE_SOURCE), h264_stream_supported(false), synthetic_dts(0), last_unpersisted_file_idx(0), stream_status(STATUS_SUCCESS), + base_pts(0), max_frame_pts(0), key_frame_pts(0), main_loop(NULL), first_pts(GST_CLOCK_TIME_NONE), use_absolute_fragment_times(true) + { producer_start_time = chrono::duration_cast(systemCurrentTime().time_since_epoch()).count(); } - GMainLoop *main_loop; + GMainLoop* main_loop; unique_ptr kinesis_video_producer; shared_ptr kinesis_video_stream; bool stream_started; bool h264_stream_supported; - char *stream_name; + char* stream_name; mutex file_list_mtx; // list of files to upload. @@ -136,16 +123,20 @@ typedef struct _CustomData { uint64_t first_pts; } CustomData; -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { class SampleClientCallbackProvider : public ClientCallbackProvider { -public: - - UINT64 getCallbackCustomData() override { - return reinterpret_cast (this); + public: + UINT64 getCallbackCustomData() override + { + return reinterpret_cast(this); } - StorageOverflowPressureFunc getStorageOverflowPressureCallback() override { + StorageOverflowPressureFunc getStorageOverflowPressureCallback() override + { return storageOverflowPressure; } @@ -154,61 +145,64 @@ class SampleClientCallbackProvider : public ClientCallbackProvider { class SampleStreamCallbackProvider : public StreamCallbackProvider { UINT64 custom_data_; -public: - SampleStreamCallbackProvider(UINT64 custom_data) : custom_data_(custom_data) {} - UINT64 getCallbackCustomData() override { + public: + SampleStreamCallbackProvider(UINT64 custom_data) : custom_data_(custom_data) + { + } + + UINT64 getCallbackCustomData() override + { return custom_data_; } - StreamConnectionStaleFunc getStreamConnectionStaleCallback() override { + StreamConnectionStaleFunc getStreamConnectionStaleCallback() override + { return streamConnectionStaleHandler; }; - StreamErrorReportFunc getStreamErrorReportCallback() override { + StreamErrorReportFunc getStreamErrorReportCallback() override + { return streamErrorReportHandler; }; - DroppedFrameReportFunc getDroppedFrameReportCallback() override { + DroppedFrameReportFunc getDroppedFrameReportCallback() override + { return droppedFrameReportHandler; }; - FragmentAckReceivedFunc getFragmentAckReceivedCallback() override { + FragmentAckReceivedFunc getFragmentAckReceivedCallback() override + { return fragmentAckReceivedHandler; }; -private: - static STATUS - streamConnectionStaleHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UINT64 last_buffering_ack); + private: + static STATUS streamConnectionStaleHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 last_buffering_ack); - static STATUS - streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, UINT64 errored_timecode, - STATUS status_code); + static STATUS streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, UINT64 errored_timecode, + STATUS status_code); - static STATUS - droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UINT64 dropped_frame_timecode); + static STATUS droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 dropped_frame_timecode); - static STATUS - fragmentAckReceivedHandler( UINT64 custom_data, STREAM_HANDLE stream_handle, - UPLOAD_HANDLE upload_handle, PFragmentAck pFragmentAck); + static STATUS fragmentAckReceivedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, PFragmentAck pFragmentAck); }; class SampleCredentialProvider : public StaticCredentialProvider { // Test rotation period is 40 second for the grace period. const std::chrono::duration ROTATION_PERIOD = std::chrono::seconds(DEFAULT_CREDENTIAL_ROTATION_SECONDS); -public: - SampleCredentialProvider(const Credentials &credentials) : - StaticCredentialProvider(credentials) {} - void updateCredentials(Credentials &credentials) override { + public: + SampleCredentialProvider(const Credentials& credentials) : StaticCredentialProvider(credentials) + { + } + + void updateCredentials(Credentials& credentials) override + { // Copy the stored creds forward credentials = credentials_; // Update only the expiration - auto now_time = std::chrono::duration_cast( - systemCurrentTime().time_since_epoch()); + auto now_time = std::chrono::duration_cast(systemCurrentTime().time_since_epoch()); auto expiration_seconds = now_time + ROTATION_PERIOD; credentials.setExpiration(std::chrono::seconds(expiration_seconds.count())); LOG_INFO("New credentials expiration is " << credentials.getExpiration().count()); @@ -216,8 +210,9 @@ class SampleCredentialProvider : public StaticCredentialProvider { }; class SampleDeviceInfoProvider : public DefaultDeviceInfoProvider { -public: - device_info_t getDeviceInfo() override { + public: + device_info_t getDeviceInfo() override + { auto device_info = DefaultDeviceInfoProvider::getDeviceInfo(); // Set the storage size to 128mb device_info.storageInfo.storageSize = 128 * 1024 * 1024; @@ -226,25 +221,25 @@ class SampleDeviceInfoProvider : public DefaultDeviceInfoProvider { }; STATUS -SampleClientCallbackProvider::storageOverflowPressure(UINT64 custom_handle, UINT64 remaining_bytes) { +SampleClientCallbackProvider::storageOverflowPressure(UINT64 custom_handle, UINT64 remaining_bytes) +{ UNUSED_PARAM(custom_handle); LOG_WARN("Reporting storage overflow. Bytes remaining " << remaining_bytes); return STATUS_SUCCESS; } -STATUS SampleStreamCallbackProvider::streamConnectionStaleHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 last_buffering_ack) { +STATUS SampleStreamCallbackProvider::streamConnectionStaleHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 last_buffering_ack) +{ LOG_WARN("Reporting stream stale. Last ACK received " << last_buffering_ack); return STATUS_SUCCESS; } STATUS -SampleStreamCallbackProvider::streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UPLOAD_HANDLE upload_handle, UINT64 errored_timecode, STATUS status_code) { - LOG_ERROR("Reporting stream error. Errored timecode: " << errored_timecode << " Status: " - << status_code); - CustomData *data = reinterpret_cast(custom_data); +SampleStreamCallbackProvider::streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, + UINT64 errored_timecode, STATUS status_code) +{ + LOG_ERROR("Reporting stream error. Errored timecode: " << errored_timecode << " Status: " << status_code); + CustomData* data = reinterpret_cast(custom_data); bool terminate_pipeline = false; // Terminate pipeline if error is not retriable or if error is retriable but we are streaming file. @@ -267,21 +262,22 @@ SampleStreamCallbackProvider::streamErrorReportHandler(UINT64 custom_data, STREA } STATUS -SampleStreamCallbackProvider::droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UINT64 dropped_frame_timecode) { +SampleStreamCallbackProvider::droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 dropped_frame_timecode) +{ LOG_WARN("Reporting dropped frame. Frame timecode " << dropped_frame_timecode); return STATUS_SUCCESS; } STATUS -SampleStreamCallbackProvider::fragmentAckReceivedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UPLOAD_HANDLE upload_handle, PFragmentAck pFragmentAck) { - CustomData *data = reinterpret_cast(custom_data); +SampleStreamCallbackProvider::fragmentAckReceivedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, + PFragmentAck pFragmentAck) +{ + CustomData* data = reinterpret_cast(custom_data); if (data->streamSource == FILE_SOURCE && pFragmentAck->ackType == FRAGMENT_ACK_TYPE_PERSISTED) { std::unique_lock lk(data->file_list_mtx); uint32_t last_unpersisted_file_idx = data->last_unpersisted_file_idx.load(); uint64_t last_frag_ts = data->file_list.at(last_unpersisted_file_idx).last_fragment_ts / - duration_cast(milliseconds(DEFAULT_TIMECODE_SCALE_MILLISECONDS)).count(); + duration_cast(milliseconds(DEFAULT_TIMECODE_SCALE_MILLISECONDS)).count(); if (last_frag_ts != 0 && last_frag_ts == pFragmentAck->timestamp) { data->last_unpersisted_file_idx = last_unpersisted_file_idx + 1; LOG_INFO("Successfully persisted file " << data->file_list.at(last_unpersisted_file_idx).path); @@ -291,14 +287,15 @@ SampleStreamCallbackProvider::fragmentAckReceivedHandler(UINT64 custom_data, STR return STATUS_SUCCESS; } -} // namespace video -} // namespace kinesis -} // namespace amazonaws -} // namespace com; +} // namespace video +} // namespace kinesis +} // namespace amazonaws +} // namespace com -static void eos_cb(GstElement *sink, CustomData *data) { +static void eos_cb(GstElement* sink, CustomData* data) +{ // bookkeeping base_pts. add 1ms to avoid overlap. - data->base_pts += + data->max_frame_pts + duration_cast(milliseconds(1)).count(); + data->base_pts += +data->max_frame_pts + duration_cast(milliseconds(1)).count(); data->max_frame_pts = 0; { @@ -311,8 +308,8 @@ static void eos_cb(GstElement *sink, CustomData *data) { g_main_loop_quit(data->main_loop); } -void create_kinesis_video_frame(Frame *frame, const nanoseconds &pts, const nanoseconds &dts, FRAME_FLAGS flags, - void *data, size_t len) { +void create_kinesis_video_frame(Frame* frame, const nanoseconds& pts, const nanoseconds& dts, FRAME_FLAGS flags, void* data, size_t len) +{ frame->flags = flags; frame->decodingTs = static_cast(dts.count()) / DEFAULT_TIME_UNIT_IN_NANOS; frame->presentationTs = static_cast(pts.count()) / DEFAULT_TIME_UNIT_IN_NANOS; @@ -323,19 +320,22 @@ void create_kinesis_video_frame(Frame *frame, const nanoseconds &pts, const nano frame->trackId = DEFAULT_TRACK_ID; } -bool put_frame(shared_ptr kinesis_video_stream, void *data, size_t len, const nanoseconds &pts, const nanoseconds &dts, FRAME_FLAGS flags) { +bool put_frame(shared_ptr kinesis_video_stream, void* data, size_t len, const nanoseconds& pts, const nanoseconds& dts, + FRAME_FLAGS flags) +{ Frame frame; create_kinesis_video_frame(&frame, pts, dts, flags, data, len); return kinesis_video_stream->putFrame(frame); } -static GstFlowReturn on_new_sample(GstElement *sink, CustomData *data) { - GstBuffer *buffer; +static GstFlowReturn on_new_sample(GstElement* sink, CustomData* data) +{ + GstBuffer* buffer; bool isDroppable, isHeader, delta; size_t buffer_size; GstFlowReturn ret = GST_FLOW_OK; STATUS curr_stream_status = data->stream_status.load(); - GstSample *sample = nullptr; + GstSample* sample = nullptr; GstMapInfo info; if (STATUS_FAILED(curr_stream_status)) { @@ -345,30 +345,28 @@ static GstFlowReturn on_new_sample(GstElement *sink, CustomData *data) { } info.data = nullptr; - sample = gst_app_sink_pull_sample(GST_APP_SINK (sink)); + sample = gst_app_sink_pull_sample(GST_APP_SINK(sink)); // capture cpd at the first frame if (!data->stream_started) { data->stream_started = true; - GstCaps* gstcaps = (GstCaps*) gst_sample_get_caps(sample); - GstStructure * gststructforcaps = gst_caps_get_structure(gstcaps, 0); - const GValue *gstStreamFormat = gst_structure_get_value(gststructforcaps, "codec_data"); - gchar *cpd = gst_value_serialize(gstStreamFormat); + GstCaps* gstcaps = (GstCaps*) gst_sample_get_caps(sample); + GstStructure* gststructforcaps = gst_caps_get_structure(gstcaps, 0); + const GValue* gstStreamFormat = gst_structure_get_value(gststructforcaps, "codec_data"); + gchar* cpd = gst_value_serialize(gstStreamFormat); data->kinesis_video_stream->start(std::string(cpd)); g_free(cpd); } buffer = gst_sample_get_buffer(sample); isHeader = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_HEADER); - isDroppable = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_CORRUPTED) || - GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DECODE_ONLY) || - (GST_BUFFER_FLAGS(buffer) == GST_BUFFER_FLAG_DISCONT) || - (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT) && GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT)) || - // drop if buffer contains header only and has invalid timestamp - (isHeader && (!GST_BUFFER_PTS_IS_VALID(buffer) || !GST_BUFFER_DTS_IS_VALID(buffer))); + isDroppable = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_CORRUPTED) || GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DECODE_ONLY) || + (GST_BUFFER_FLAGS(buffer) == GST_BUFFER_FLAG_DISCONT) || + (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT) && GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT)) || + // drop if buffer contains header only and has invalid timestamp + (isHeader && (!GST_BUFFER_PTS_IS_VALID(buffer) || !GST_BUFFER_DTS_IS_VALID(buffer))); if (!isDroppable) { - delta = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT); FRAME_FLAGS kinesis_video_flags = delta ? FRAME_FLAG_NONE : FRAME_FLAG_KEY_FRAME; @@ -398,12 +396,12 @@ static GstFlowReturn on_new_sample(GstElement *sink, CustomData *data) { buffer->pts += data->producer_start_time - data->first_pts; } - if (!gst_buffer_map(buffer, &info, GST_MAP_READ)){ + if (!gst_buffer_map(buffer, &info, GST_MAP_READ)) { goto CleanUp; } - put_frame(data->kinesis_video_stream, info.data, info.size, std::chrono::nanoseconds(buffer->pts), - std::chrono::nanoseconds(buffer->dts), kinesis_video_flags); + put_frame(data->kinesis_video_stream, info.data, info.size, std::chrono::nanoseconds(buffer->pts), std::chrono::nanoseconds(buffer->dts), + kinesis_video_flags); } CleanUp: @@ -419,27 +417,23 @@ static GstFlowReturn on_new_sample(GstElement *sink, CustomData *data) { return ret; } -static bool format_supported_by_source(GstCaps *src_caps, GstCaps *query_caps, int width, int height, int framerate) { - gst_caps_set_simple(query_caps, - "width", G_TYPE_INT, width, - "height", G_TYPE_INT, height, - "framerate", GST_TYPE_FRACTION, framerate, 1, - NULL); +static bool format_supported_by_source(GstCaps* src_caps, GstCaps* query_caps, int width, int height, int framerate) +{ + gst_caps_set_simple(query_caps, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, framerate, 1, NULL); bool is_match = gst_caps_can_intersect(query_caps, src_caps); // in case the camera has fps as 10000000/333333 - if(!is_match) { - gst_caps_set_simple(query_caps, - "framerate", GST_TYPE_FRACTION_RANGE, framerate, 1, framerate+1, 1, - NULL); + if (!is_match) { + gst_caps_set_simple(query_caps, "framerate", GST_TYPE_FRACTION_RANGE, framerate, 1, framerate + 1, 1, NULL); is_match = gst_caps_can_intersect(query_caps, src_caps); } return is_match; } -static bool resolution_supported(GstCaps *src_caps, GstCaps *query_caps_raw, GstCaps *query_caps_h264, - CustomData &data, int width, int height, int framerate) { +static bool resolution_supported(GstCaps* src_caps, GstCaps* query_caps_raw, GstCaps* query_caps_h264, CustomData& data, int width, int height, + int framerate) +{ if (query_caps_h264 && format_supported_by_source(src_caps, query_caps_h264, width, height, framerate)) { LOG_DEBUG("src supports h264") data.h264_stream_supported = true; @@ -453,13 +447,14 @@ static bool resolution_supported(GstCaps *src_caps, GstCaps *query_caps_raw, Gst } /* This function is called when an error message is posted on the bus */ -static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data) { - GError *err; - gchar *debug_info; +static void error_cb(GstBus* bus, GstMessage* msg, CustomData* data) +{ + GError* err; + gchar* debug_info; /* Print error details on the screen */ gst_message_parse_error(msg, &err, &debug_info); - g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); + g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(msg->src), err->message); g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error(&err); g_free(debug_info); @@ -467,24 +462,24 @@ static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data) { g_main_loop_quit(data->main_loop); } -void kinesis_video_init(CustomData *data) { +void kinesis_video_init(CustomData* data) +{ unique_ptr device_info_provider(new SampleDeviceInfoProvider()); unique_ptr client_callback_provider(new SampleClientCallbackProvider()); - unique_ptr stream_callback_provider(new SampleStreamCallbackProvider( - reinterpret_cast(data))); + unique_ptr stream_callback_provider(new SampleStreamCallbackProvider(reinterpret_cast(data))); - char const *accessKey; - char const *secretKey; - char const *sessionToken; - char const *defaultRegion; + char const* accessKey; + char const* secretKey; + char const* sessionToken; + char const* defaultRegion; string defaultRegionStr; string sessionTokenStr; - char const *iot_get_credential_endpoint; - char const *cert_path; - char const *private_key_path; - char const *role_alias; - char const *ca_cert_path; + char const* iot_get_credential_endpoint; + char const* cert_path; + char const* private_key_path; + char const* role_alias; + char const* ca_cert_path; unique_ptr credential_provider; @@ -495,9 +490,7 @@ void kinesis_video_init(CustomData *data) { } LOG_INFO("Using region: " << defaultRegionStr); - if (nullptr != (accessKey = getenv(ACCESS_KEY_ENV_VAR)) && - nullptr != (secretKey = getenv(SECRET_KEY_ENV_VAR))) { - + if (nullptr != (accessKey = getenv(ACCESS_KEY_ENV_VAR)) && nullptr != (secretKey = getenv(SECRET_KEY_ENV_VAR))) { LOG_INFO("Using aws credentials for Kinesis Video Streams"); if (nullptr != (sessionToken = getenv(SESSION_TOKEN_ENV_VAR))) { LOG_INFO("Session token detected."); @@ -507,40 +500,30 @@ void kinesis_video_init(CustomData *data) { sessionTokenStr = ""; } - data->credential.reset(new Credentials(string(accessKey), - string(secretKey), - sessionTokenStr, - std::chrono::seconds(DEFAULT_CREDENTIAL_EXPIRATION_SECONDS))); + data->credential.reset( + new Credentials(string(accessKey), string(secretKey), sessionTokenStr, std::chrono::seconds(DEFAULT_CREDENTIAL_EXPIRATION_SECONDS))); credential_provider.reset(new SampleCredentialProvider(*data->credential.get())); - } else if (nullptr != (iot_get_credential_endpoint = getenv("IOT_GET_CREDENTIAL_ENDPOINT")) && - nullptr != (cert_path = getenv("CERT_PATH")) && - nullptr != (private_key_path = getenv("PRIVATE_KEY_PATH")) && - nullptr != (role_alias = getenv("ROLE_ALIAS")) && + } else if (nullptr != (iot_get_credential_endpoint = getenv("IOT_GET_CREDENTIAL_ENDPOINT")) && nullptr != (cert_path = getenv("CERT_PATH")) && + nullptr != (private_key_path = getenv("PRIVATE_KEY_PATH")) && nullptr != (role_alias = getenv("ROLE_ALIAS")) && nullptr != (ca_cert_path = getenv("CA_CERT_PATH"))) { LOG_INFO("Using IoT credentials for Kinesis Video Streams"); - credential_provider.reset(new IotCertCredentialProvider(iot_get_credential_endpoint, - cert_path, - private_key_path, - role_alias, - ca_cert_path, - data->stream_name)); + credential_provider.reset( + new IotCertCredentialProvider(iot_get_credential_endpoint, cert_path, private_key_path, role_alias, ca_cert_path, data->stream_name)); } else { LOG_AND_THROW("No valid credential method was found"); } - data->kinesis_video_producer = KinesisVideoProducer::createSync(std::move(device_info_provider), - std::move(client_callback_provider), - std::move(stream_callback_provider), - std::move(credential_provider), - API_CALL_CACHE_TYPE_ALL, - defaultRegionStr); + data->kinesis_video_producer = + KinesisVideoProducer::createSync(std::move(device_info_provider), std::move(client_callback_provider), std::move(stream_callback_provider), + std::move(credential_provider), API_CALL_CACHE_TYPE_ALL, defaultRegionStr); LOG_DEBUG("Client is ready"); } -void kinesis_video_stream_init(CustomData *data) { +void kinesis_video_stream_init(CustomData* data) +{ /* create a test stream */ map tags; char tag_name[MAX_TAG_NAME_LEN]; @@ -557,32 +540,12 @@ void kinesis_video_stream_init(CustomData *data) { } unique_ptr stream_definition(new StreamDefinition( - data->stream_name, - hours(DEFAULT_RETENTION_PERIOD_HOURS), - &tags, - DEFAULT_KMS_KEY_ID, - streaming_type, - DEFAULT_CONTENT_TYPE, - duration_cast (seconds(DEFAULT_MAX_LATENCY_SECONDS)), - milliseconds(DEFAULT_FRAGMENT_DURATION_MILLISECONDS), - milliseconds(DEFAULT_TIMECODE_SCALE_MILLISECONDS), - DEFAULT_KEY_FRAME_FRAGMENTATION, - DEFAULT_FRAME_TIMECODES, - data->use_absolute_fragment_times, - DEFAULT_FRAGMENT_ACKS, - DEFAULT_RESTART_ON_ERROR, - DEFAULT_RECALCULATE_METRICS, - true, - 0, - DEFAULT_STREAM_FRAMERATE, - DEFAULT_AVG_BANDWIDTH_BPS, - seconds(DEFAULT_BUFFER_DURATION_SECONDS), - seconds(DEFAULT_REPLAY_DURATION_SECONDS), - seconds(DEFAULT_CONNECTION_STALENESS_SECONDS), - DEFAULT_CODEC_ID, - DEFAULT_TRACKNAME, - nullptr, - 0)); + data->stream_name, hours(DEFAULT_RETENTION_PERIOD_HOURS), &tags, DEFAULT_KMS_KEY_ID, streaming_type, DEFAULT_CONTENT_TYPE, + duration_cast(seconds(DEFAULT_MAX_LATENCY_SECONDS)), milliseconds(DEFAULT_FRAGMENT_DURATION_MILLISECONDS), + milliseconds(DEFAULT_TIMECODE_SCALE_MILLISECONDS), DEFAULT_KEY_FRAME_FRAGMENTATION, DEFAULT_FRAME_TIMECODES, + data->use_absolute_fragment_times, DEFAULT_FRAGMENT_ACKS, DEFAULT_RESTART_ON_ERROR, DEFAULT_RECALCULATE_METRICS, true, 0, + DEFAULT_STREAM_FRAMERATE, DEFAULT_AVG_BANDWIDTH_BPS, seconds(DEFAULT_BUFFER_DURATION_SECONDS), seconds(DEFAULT_REPLAY_DURATION_SECONDS), + seconds(DEFAULT_CONNECTION_STALENESS_SECONDS), DEFAULT_CODEC_ID, DEFAULT_TRACKNAME, nullptr, 0)); data->kinesis_video_stream = data->kinesis_video_producer->createStreamSync(std::move(stream_definition)); // reset state @@ -599,10 +562,11 @@ void kinesis_video_stream_init(CustomData *data) { } /* callback when each RTSP stream has been created */ -static void pad_added_cb(GstElement *element, GstPad *pad, GstElement *target) { - GstPad *target_sink = gst_element_get_static_pad(GST_ELEMENT(target), "sink"); +static void pad_added_cb(GstElement* element, GstPad* pad, GstElement* target) +{ + GstPad* target_sink = gst_element_get_static_pad(GST_ELEMENT(target), "sink"); GstPadLinkReturn link_ret; - gchar *pad_name = gst_pad_get_name(pad); + gchar* pad_name = gst_pad_get_name(pad); g_print("New pad found: %s\n", pad_name); link_ret = gst_pad_link(pad, target_sink); @@ -617,8 +581,8 @@ static void pad_added_cb(GstElement *element, GstPad *pad, GstElement *target) { g_free(pad_name); } -int gstreamer_live_source_init(int argc, char* argv[], CustomData *data, GstElement *pipeline) { - +int gstreamer_live_source_init(int argc, char* argv[], CustomData* data, GstElement* pipeline) +{ bool vtenc = false, isOnRpi = false; /* init stream format */ @@ -626,33 +590,22 @@ int gstreamer_live_source_init(int argc, char* argv[], CustomData *data, GstElem // index 1 is stream name which is already processed for (int i = 2; i < argc; i++) { if (i < argc) { - if ((0 == STRCMPI(argv[i], "-w")) || - (0 == STRCMPI(argv[i], "/w")) || - (0 == STRCMPI(argv[i], "--w"))) { + if ((0 == STRCMPI(argv[i], "-w")) || (0 == STRCMPI(argv[i], "/w")) || (0 == STRCMPI(argv[i], "--w"))) { // process the width if (STATUS_FAILED(STRTOI32(argv[i + 1], NULL, 10, &width))) { return 1; } - } - else if ((0 == STRCMPI(argv[i], "-h")) || - (0 == STRCMPI(argv[i], "/h")) || - (0 == STRCMPI(argv[i], "--h"))) { + } else if ((0 == STRCMPI(argv[i], "-h")) || (0 == STRCMPI(argv[i], "/h")) || (0 == STRCMPI(argv[i], "--h"))) { // process the width if (STATUS_FAILED(STRTOI32(argv[i + 1], NULL, 10, &height))) { return 1; } - } - else if ((0 == STRCMPI(argv[i], "-f")) || - (0 == STRCMPI(argv[i], "/f")) || - (0 == STRCMPI(argv[i], "--f"))) { + } else if ((0 == STRCMPI(argv[i], "-f")) || (0 == STRCMPI(argv[i], "/f")) || (0 == STRCMPI(argv[i], "--f"))) { // process the width if (STATUS_FAILED(STRTOI32(argv[i + 1], NULL, 10, &framerate))) { return 1; } - } - else if ((0 == STRCMPI(argv[i], "-b")) || - (0 == STRCMPI(argv[i], "/b")) || - (0 == STRCMPI(argv[i], "--b"))) { + } else if ((0 == STRCMPI(argv[i], "-b")) || (0 == STRCMPI(argv[i], "/b")) || (0 == STRCMPI(argv[i], "--b"))) { // process the width if (STATUS_FAILED(STRTOI32(argv[i + 1], NULL, 10, &bitrateInKBPS))) { return 1; @@ -660,15 +613,10 @@ int gstreamer_live_source_init(int argc, char* argv[], CustomData *data, GstElem } // skip the index i++; - } - else if (0 == STRCMPI(argv[i], "-?") || - 0 == STRCMPI(argv[i], "--?") || - 0 == STRCMPI(argv[i], "--help")) { + } else if (0 == STRCMPI(argv[i], "-?") || 0 == STRCMPI(argv[i], "--?") || 0 == STRCMPI(argv[i], "--help")) { g_printerr("Invalid arguments\n"); return 1; - } - else if (argv[i][0] == '/' || - argv[i][0] == '-') { + } else if (argv[i][0] == '/' || argv[i][0] == '-') { // Unknown option g_printerr("Invalid arguments\n"); return 1; @@ -680,13 +628,15 @@ int gstreamer_live_source_init(int argc, char* argv[], CustomData *data, GstElem return 1; } - LOG_DEBUG("Streaming with live source and width: " << width << ", height: " << height << ", fps: " << framerate << ", bitrateInKBPS" << bitrateInKBPS); + LOG_DEBUG("Streaming with live source and width: " << width << ", height: " << height << ", fps: " << framerate << ", bitrateInKBPS" + << bitrateInKBPS); GstElement *source_filter, *filter, *appsink, *h264parse, *encoder, *source, *video_convert; /* create the elemnents */ /* - gst-launch-1.0 v4l2src device=/dev/video0 ! video/x-raw,format=I420,width=1280,height=720,framerate=15/1 ! x264enc pass=quant bframes=0 ! video/x-h264,profile=baseline,format=I420,width=1280,height=720,framerate=15/1 ! matroskamux ! filesink location=test.mkv + gst-launch-1.0 v4l2src device=/dev/video0 ! video/x-raw,format=I420,width=1280,height=720,framerate=15/1 ! x264enc pass=quant bframes=0 ! + video/x-h264,profile=baseline,format=I420,width=1280,height=720,framerate=15/1 ! matroskamux ! filesink location=test.mkv */ source_filter = gst_element_factory_make("capsfilter", "source_filter"); if (!source_filter) { @@ -760,9 +710,9 @@ int gstreamer_live_source_init(int argc, char* argv[], CustomData *data, GstElem /* configure source */ if (vtenc) { - g_object_set(G_OBJECT (source), "is-live", TRUE, NULL); + g_object_set(G_OBJECT(source), "is-live", TRUE, NULL); } else { - g_object_set(G_OBJECT (source), "do-timestamp", TRUE, "device", "/dev/video0", NULL); + g_object_set(G_OBJECT(source), "do-timestamp", TRUE, "device", "/dev/video0", NULL); } /* Determine whether device supports h264 encoding and select a streaming resolution supported by the device*/ @@ -771,18 +721,12 @@ int gstreamer_live_source_init(int argc, char* argv[], CustomData *data, GstElem return 1; } - GstPad *srcpad = gst_element_get_static_pad(source, "src"); - GstCaps *src_caps = gst_pad_query_caps(srcpad, NULL); + GstPad* srcpad = gst_element_get_static_pad(source, "src"); + GstCaps* src_caps = gst_pad_query_caps(srcpad, NULL); gst_element_set_state(source, GST_STATE_NULL); - GstCaps *query_caps_raw = gst_caps_new_simple("video/x-raw", - "width", G_TYPE_INT, width, - "height", G_TYPE_INT, height, - NULL); - GstCaps *query_caps_h264 = gst_caps_new_simple("video/x-h264", - "width", G_TYPE_INT, width, - "height", G_TYPE_INT, height, - NULL); + GstCaps* query_caps_raw = gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); + GstCaps* query_caps_h264 = gst_caps_new_simple("video/x-h264", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); if (width != 0 && height != 0) { if (!resolution_supported(src_caps, query_caps_raw, query_caps_h264, *data, width, height, framerate)) { @@ -829,55 +773,44 @@ int gstreamer_live_source_init(int argc, char* argv[], CustomData *data, GstElem /* source filter */ if (!data->h264_stream_supported) { - gst_caps_set_simple(query_caps_raw, - "format", G_TYPE_STRING, "I420", - NULL); - g_object_set(G_OBJECT (source_filter), "caps", query_caps_raw, NULL); + gst_caps_set_simple(query_caps_raw, "format", G_TYPE_STRING, "I420", NULL); + g_object_set(G_OBJECT(source_filter), "caps", query_caps_raw, NULL); } else { - gst_caps_set_simple(query_caps_h264, - "stream-format", G_TYPE_STRING, "byte-stream", - "alignment", G_TYPE_STRING, "au", - NULL); - g_object_set(G_OBJECT (source_filter), "caps", query_caps_h264, NULL); + gst_caps_set_simple(query_caps_h264, "stream-format", G_TYPE_STRING, "byte-stream", "alignment", G_TYPE_STRING, "au", NULL); + g_object_set(G_OBJECT(source_filter), "caps", query_caps_h264, NULL); } gst_caps_unref(query_caps_h264); gst_caps_unref(query_caps_raw); /* configure encoder */ - if (!data->h264_stream_supported){ + if (!data->h264_stream_supported) { if (vtenc) { - g_object_set(G_OBJECT (encoder), "allow-frame-reordering", FALSE, "realtime", TRUE, "max-keyframe-interval", - 45, "bitrate", bitrateInKBPS, NULL); + g_object_set(G_OBJECT(encoder), "allow-frame-reordering", FALSE, "realtime", TRUE, "max-keyframe-interval", 45, "bitrate", bitrateInKBPS, + NULL); } else if (isOnRpi) { - g_object_set(G_OBJECT (encoder), "control-rate", 2, "target-bitrate", bitrateInKBPS*1000, - "periodicty-idr", 45, "inline-header", FALSE, NULL); + g_object_set(G_OBJECT(encoder), "control-rate", 2, "target-bitrate", bitrateInKBPS * 1000, "periodicty-idr", 45, "inline-header", FALSE, + NULL); } else { - g_object_set(G_OBJECT (encoder), "bframes", 0, "key-int-max", 45, "bitrate", bitrateInKBPS, NULL); + g_object_set(G_OBJECT(encoder), "bframes", 0, "key-int-max", 45, "bitrate", bitrateInKBPS, NULL); } } - /* configure filter */ - GstCaps *h264_caps = gst_caps_new_simple("video/x-h264", - "stream-format", G_TYPE_STRING, "avc", - "alignment", G_TYPE_STRING, "au", - NULL); + GstCaps* h264_caps = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "avc", "alignment", G_TYPE_STRING, "au", NULL); if (!data->h264_stream_supported) { - gst_caps_set_simple(h264_caps, "profile", G_TYPE_STRING, "baseline", - NULL); + gst_caps_set_simple(h264_caps, "profile", G_TYPE_STRING, "baseline", NULL); } - g_object_set(G_OBJECT (filter), "caps", h264_caps, NULL); + g_object_set(G_OBJECT(filter), "caps", h264_caps, NULL); gst_caps_unref(h264_caps); /* configure appsink */ - g_object_set(G_OBJECT (appsink), "emit-signals", TRUE, "sync", FALSE, NULL); + g_object_set(G_OBJECT(appsink), "emit-signals", TRUE, "sync", FALSE, NULL); g_signal_connect(appsink, "new-sample", G_CALLBACK(on_new_sample), data); /* build the pipeline */ if (!data->h264_stream_supported) { LOG_DEBUG("Constructing pipeline with encoding element") - gst_bin_add_many(GST_BIN (pipeline), source, video_convert, source_filter, encoder, h264parse, filter, - appsink, NULL); + gst_bin_add_many(GST_BIN(pipeline), source, video_convert, source_filter, encoder, h264parse, filter, appsink, NULL); if (!gst_element_link_many(source, video_convert, source_filter, encoder, h264parse, filter, appsink, NULL)) { g_printerr("Elements could not be linked.\n"); gst_object_unref(pipeline); @@ -885,7 +818,7 @@ int gstreamer_live_source_init(int argc, char* argv[], CustomData *data, GstElem } } else { LOG_DEBUG("Constructing pipeline without encoding element") - gst_bin_add_many(GST_BIN (pipeline), source, source_filter, h264parse, filter, appsink, NULL); + gst_bin_add_many(GST_BIN(pipeline), source, source_filter, h264parse, filter, appsink, NULL); if (!gst_element_link_many(source, source_filter, h264parse, filter, appsink, NULL)) { g_printerr("Elements could not be linked.\n"); gst_object_unref(pipeline); @@ -896,8 +829,8 @@ int gstreamer_live_source_init(int argc, char* argv[], CustomData *data, GstElem return 0; } -int gstreamer_rtsp_source_init(CustomData *data, GstElement *pipeline) { - +int gstreamer_rtsp_source_init(CustomData* data, GstElement* pipeline) +{ GstElement *filter, *appsink, *depay, *source, *h264parse; filter = gst_element_factory_make("capsfilter", "filter"); @@ -912,35 +845,25 @@ int gstreamer_rtsp_source_init(CustomData *data, GstElement *pipeline) { } // configure filter - GstCaps *h264_caps = gst_caps_new_simple("video/x-h264", - "stream-format", G_TYPE_STRING, "avc", - "alignment", G_TYPE_STRING, "au", - NULL); - g_object_set(G_OBJECT (filter), "caps", h264_caps, NULL); + GstCaps* h264_caps = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "avc", "alignment", G_TYPE_STRING, "au", NULL); + g_object_set(G_OBJECT(filter), "caps", h264_caps, NULL); gst_caps_unref(h264_caps); // configure appsink - g_object_set(G_OBJECT (appsink), "emit-signals", TRUE, "sync", FALSE, NULL); + g_object_set(G_OBJECT(appsink), "emit-signals", TRUE, "sync", FALSE, NULL); g_signal_connect(appsink, "new-sample", G_CALLBACK(on_new_sample), data); // configure rtspsrc - g_object_set(G_OBJECT (source), - "location", data->rtsp_url.c_str(), - "short-header", true, // Necessary for target camera + g_object_set(G_OBJECT(source), "location", data->rtsp_url.c_str(), "short-header", true, // Necessary for target camera NULL); g_signal_connect(source, "pad-added", G_CALLBACK(pad_added_cb), depay); /* build the pipeline */ - gst_bin_add_many(GST_BIN (pipeline), source, - depay, h264parse, filter, appsink, - NULL); + gst_bin_add_many(GST_BIN(pipeline), source, depay, h264parse, filter, appsink, NULL); /* Leave the actual source out - this will be done when the pad is added */ - if (!gst_element_link_many(depay, filter, h264parse, - appsink, - NULL)) { - + if (!gst_element_link_many(depay, filter, h264parse, appsink, NULL)) { g_printerr("Elements could not be linked.\n"); gst_object_unref(pipeline); return 1; @@ -949,8 +872,8 @@ int gstreamer_rtsp_source_init(CustomData *data, GstElement *pipeline) { return 0; } -int gstreamer_file_source_init(CustomData *data, GstElement *pipeline) { - +int gstreamer_file_source_init(CustomData* data, GstElement* pipeline) +{ GstElement *demux, *appsink, *filesrc, *h264parse, *filter, *queue; string file_suffix; string file_path = data->file_list.at(data->current_file_idx).path; @@ -979,39 +902,31 @@ int gstreamer_file_source_init(CustomData *data, GstElement *pipeline) { } // configure filter - GstCaps *h264_caps = gst_caps_new_simple("video/x-h264", - "stream-format", G_TYPE_STRING, "avc", - "alignment", G_TYPE_STRING, "au", - NULL); - g_object_set(G_OBJECT (filter), "caps", h264_caps, NULL); + GstCaps* h264_caps = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "avc", "alignment", G_TYPE_STRING, "au", NULL); + g_object_set(G_OBJECT(filter), "caps", h264_caps, NULL); gst_caps_unref(h264_caps); // configure appsink - g_object_set(G_OBJECT (appsink), "emit-signals", TRUE, "sync", FALSE, NULL); + g_object_set(G_OBJECT(appsink), "emit-signals", TRUE, "sync", FALSE, NULL); g_signal_connect(appsink, "new-sample", G_CALLBACK(on_new_sample), data); g_signal_connect(appsink, "eos", G_CALLBACK(eos_cb), data); // configure filesrc - g_object_set(G_OBJECT (filesrc), "location", file_path.c_str(), NULL); + g_object_set(G_OBJECT(filesrc), "location", file_path.c_str(), NULL); // configure demux g_signal_connect(demux, "pad-added", G_CALLBACK(pad_added_cb), queue); - /* build the pipeline */ - gst_bin_add_many(GST_BIN (pipeline), demux, - filesrc, filter, appsink, h264parse, queue, - NULL); + gst_bin_add_many(GST_BIN(pipeline), demux, filesrc, filter, appsink, h264parse, queue, NULL); - if (!gst_element_link_many(filesrc, demux, - NULL)) { + if (!gst_element_link_many(filesrc, demux, NULL)) { g_printerr("Elements could not be linked.\n"); gst_object_unref(pipeline); return 1; } - if (!gst_element_link_many(queue, h264parse, filter, appsink, - NULL)) { + if (!gst_element_link_many(queue, h264parse, filter, appsink, NULL)) { g_printerr("Video elements could not be linked.\n"); gst_object_unref(pipeline); return 1; @@ -1020,13 +935,12 @@ int gstreamer_file_source_init(CustomData *data, GstElement *pipeline) { return 0; } - -int gstreamer_init(int argc, char* argv[], CustomData *data) { - +int gstreamer_init(int argc, char* argv[], CustomData* data) +{ /* init GStreamer */ gst_init(&argc, &argv); - GstElement *pipeline; + GstElement* pipeline; int ret; GstStateChangeReturn gst_ret; @@ -1051,14 +965,14 @@ int gstreamer_init(int argc, char* argv[], CustomData *data) { break; } - if (ret != 0){ + if (ret != 0) { return ret; } /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */ - GstBus *bus = gst_element_get_bus(pipeline); + GstBus* bus = gst_element_get_bus(pipeline); gst_bus_add_signal_watch(bus); - g_signal_connect (G_OBJECT(bus), "message::error", (GCallback) error_cb, data); + g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_cb, data); gst_object_unref(bus); /* start streaming */ @@ -1081,12 +995,13 @@ int gstreamer_init(int argc, char* argv[], CustomData *data) { return 0; } -int main(int argc, char* argv[]) { +int main(int argc, char* argv[]) +{ PropertyConfigurator::doConfigure("../kvs_log_configuration"); if (argc < 2) { LOG_ERROR( - "Usage: AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kinesis_video_gstreamer_sample_app my-stream-name -w width -h height -f framerate -b bitrateInKBPS\n \ + "Usage: AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kinesis_video_gstreamer_sample_app my-stream-name -w width -h height -f framerate -b bitrateInKBPS\n \ or AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kinesis_video_gstreamer_sample_app my-stream-name\n \ or AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kinesis_video_gstreamer_sample_app my-stream-name rtsp-url\n \ or AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kinesis_video_gstreamer_sample_app my-stream-name path/to/file1 path/to/file2 ...\n"); @@ -1116,12 +1031,10 @@ int main(int argc, char* argv[]) { data.streamSource = RTSP_SOURCE; data.rtsp_url = string(argv[2]); - } else if (suffix.compare("mkv") == 0 || - suffix.compare("mp4") == 0 || - suffix.compare(".ts") == 0) { + } else if (suffix.compare("mkv") == 0 || suffix.compare("mp4") == 0 || suffix.compare(".ts") == 0) { data.streamSource = FILE_SOURCE; // skip over stream name - for(int i = 2; i < argc; ++i) { + for (int i = 2; i < argc; ++i) { string file_path = string(argv[i]); // file path should be at least 4 char (shortest example: a.ts) if (file_path.size() < 4) { @@ -1137,10 +1050,10 @@ int main(int argc, char* argv[]) { } /* init Kinesis Video */ - try{ + try { kinesis_video_init(&data); kinesis_video_stream_init(&data); - } catch (runtime_error &err) { + } catch (runtime_error& err) { LOG_ERROR("Failed to initialize kinesis video with an exception: " << err.what()); return 1; } @@ -1152,8 +1065,7 @@ int main(int argc, char* argv[]) { uint32_t i = data.last_unpersisted_file_idx.load(); bool continue_uploading = true; - for(; i < data.file_list.size() && continue_uploading; ++i) { - + for (; i < data.file_list.size() && continue_uploading; ++i) { data.current_file_idx = i; LOG_DEBUG("Attempt to upload file: " << data.file_list[i].path); @@ -1198,7 +1110,7 @@ int main(int argc, char* argv[]) { i = data.last_unpersisted_file_idx.load(); LOG_ERROR("Failed to upload file " << data.file_list[i].path << " after retrying. Terminating."); data.kinesis_video_stream->stop(); - do_retry = false; // exit while loop + do_retry = false; // exit while loop } else { // flush out buffers data.kinesis_video_stream->resetStream(); @@ -1207,7 +1119,7 @@ int main(int argc, char* argv[]) { data.stream_started = false; } } - } while(do_retry); + } while (do_retry); } else { // non file uploading scenario diff --git a/samples/kvssink_gstreamer_sample.cpp b/samples/kvssink_gstreamer_sample.cpp index 195254b6..c0d7775b 100644 --- a/samples/kvssink_gstreamer_sample.cpp +++ b/samples/kvssink_gstreamer_sample.cpp @@ -20,7 +20,7 @@ using namespace log4cplus; extern "C" { #endif -int gstreamer_init(int, char **); +int gstreamer_init(int, char**); #ifdef __cplusplus } @@ -28,45 +28,32 @@ int gstreamer_init(int, char **); LOGGER_TAG("com.amazonaws.kinesis.video.gstreamer"); -typedef enum _StreamSource { - FILE_SOURCE, - LIVE_SOURCE, - RTSP_SOURCE -} StreamSource; +typedef enum _StreamSource { FILE_SOURCE, LIVE_SOURCE, RTSP_SOURCE } StreamSource; typedef struct _FileInfo { - _FileInfo() : - path(""), - last_fragment_ts(0) {} + _FileInfo() : path(""), last_fragment_ts(0) + { + } string path; uint64_t last_fragment_ts; } FileInfo; typedef struct _CustomData { - - _CustomData() : - streamSource(LIVE_SOURCE), - h264_stream_supported(false), - synthetic_dts(0), - last_unpersisted_file_idx(0), - stream_status(STATUS_SUCCESS), - base_pts(0), - max_frame_pts(0), - key_frame_pts(0), - main_loop(NULL), - first_pts(GST_CLOCK_TIME_NONE), - use_absolute_fragment_times(true), - max_runtime(0) { + _CustomData() + : streamSource(LIVE_SOURCE), h264_stream_supported(false), synthetic_dts(0), last_unpersisted_file_idx(0), stream_status(STATUS_SUCCESS), + base_pts(0), max_frame_pts(0), key_frame_pts(0), main_loop(NULL), first_pts(GST_CLOCK_TIME_NONE), use_absolute_fragment_times(true), + max_runtime(0) + { producer_start_time = chrono::duration_cast(systemCurrentTime().time_since_epoch()).count(); } - GMainLoop *main_loop; + GMainLoop* main_loop; unique_ptr kinesis_video_producer; shared_ptr kinesis_video_stream; bool stream_started; bool h264_stream_supported; - char *stream_name; + char* stream_name; mutex file_list_mtx; // list of files to upload. @@ -120,30 +107,26 @@ typedef struct _CustomData { int max_runtime; } CustomData; -// CustomData +// CustomData CustomData data_global; -static bool format_supported_by_source(GstCaps *src_caps, GstCaps *query_caps, int width, int height, int framerate) { - gst_caps_set_simple(query_caps, - "width", G_TYPE_INT, width, - "height", G_TYPE_INT, height, - "framerate", GST_TYPE_FRACTION, framerate, 1, - NULL); +static bool format_supported_by_source(GstCaps* src_caps, GstCaps* query_caps, int width, int height, int framerate) +{ + gst_caps_set_simple(query_caps, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, framerate, 1, NULL); bool is_match = gst_caps_can_intersect(query_caps, src_caps); // in case the camera has fps as 10000000/333333 if (!is_match) { - gst_caps_set_simple(query_caps, - "framerate", GST_TYPE_FRACTION_RANGE, framerate, 1, framerate + 1, 1, - NULL); + gst_caps_set_simple(query_caps, "framerate", GST_TYPE_FRACTION_RANGE, framerate, 1, framerate + 1, 1, NULL); is_match = gst_caps_can_intersect(query_caps, src_caps); } return is_match; } -static bool resolution_supported(GstCaps *src_caps, GstCaps *query_caps_raw, GstCaps *query_caps_h264, - CustomData &data, int width, int height, int framerate) { +static bool resolution_supported(GstCaps* src_caps, GstCaps* query_caps_raw, GstCaps* query_caps_h264, CustomData& data, int width, int height, + int framerate) +{ if (query_caps_h264 && format_supported_by_source(src_caps, query_caps_h264, width, height, framerate)) { LOG_DEBUG("src supports h264") data.h264_stream_supported = true; @@ -157,7 +140,8 @@ static bool resolution_supported(GstCaps *src_caps, GstCaps *query_caps_raw, Gst } /* callback when eos (End of Stream) is posted on bus */ -static void eos_cb(GstElement *sink, GstMessage *message, CustomData *data) { +static void eos_cb(GstElement* sink, GstMessage* message, CustomData* data) +{ if (data->streamSource == FILE_SOURCE) { // bookkeeping base_pts. add 1ms to avoid overlap. data->base_pts += +data->max_frame_pts + duration_cast(milliseconds(1)).count(); @@ -174,9 +158,10 @@ static void eos_cb(GstElement *sink, GstMessage *message, CustomData *data) { } /* This function is called when an error message is posted on the bus */ -static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data) { - GError *err; - gchar *debug_info; +static void error_cb(GstBus* bus, GstMessage* msg, CustomData* data) +{ + GError* err; + gchar* debug_info; /* Print error details on the screen */ gst_message_parse_error(msg, &err, &debug_info); @@ -190,10 +175,11 @@ static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data) { } /* callback when each RTSP stream has been created */ -static void pad_added_cb(GstElement *element, GstPad *pad, GstElement *target) { - GstPad *target_sink = gst_element_get_static_pad(GST_ELEMENT(target), "sink"); +static void pad_added_cb(GstElement* element, GstPad* pad, GstElement* target) +{ + GstPad* target_sink = gst_element_get_static_pad(GST_ELEMENT(target), "sink"); GstPadLinkReturn link_ret; - gchar *pad_name = gst_pad_get_name(pad); + gchar* pad_name = gst_pad_get_name(pad); g_print("New pad found: %s\n", pad_name); link_ret = gst_pad_link(pad, target_sink); @@ -209,57 +195,53 @@ static void pad_added_cb(GstElement *element, GstPad *pad, GstElement *target) { } /* Function will wait maxruntime before closing stream */ -void timer(CustomData *data) { - THREAD_SLEEP(data->max_runtime); - LOG_DEBUG("max runtime elapsed. exiting"); - g_main_loop_quit(data->main_loop); - data->stream_status = STATUS_SUCCESS; +void timer(CustomData* data) +{ + THREAD_SLEEP(data->max_runtime); + LOG_DEBUG("max runtime elapsed. exiting"); + g_main_loop_quit(data->main_loop); + data->stream_status = STATUS_SUCCESS; } /* Function handles sigint signal */ -void sigint_handler(int sigint){ +void sigint_handler(int sigint) +{ LOG_DEBUG("SIGINT received. Exiting graceully"); - - if(data_global.main_loop != NULL){ + + if (data_global.main_loop != NULL) { g_main_loop_quit(data_global.main_loop); } data_global.stream_status = STATUS_KVS_GSTREAMER_SAMPLE_INTERRUPTED; } -void determine_credentials(GstElement *kvssink, CustomData *data) { - - char const *iot_credential_endpoint; - char const *cert_path; - char const *private_key_path; - char const *role_alias; - char const *ca_cert_path; - char const *credential_path; - if (nullptr != (iot_credential_endpoint = getenv("IOT_GET_CREDENTIAL_ENDPOINT")) && - nullptr != (cert_path = getenv("CERT_PATH")) && - nullptr != (private_key_path = getenv("PRIVATE_KEY_PATH")) && - nullptr != (role_alias = getenv("ROLE_ALIAS")) && +void determine_credentials(GstElement* kvssink, CustomData* data) +{ + char const* iot_credential_endpoint; + char const* cert_path; + char const* private_key_path; + char const* role_alias; + char const* ca_cert_path; + char const* credential_path; + if (nullptr != (iot_credential_endpoint = getenv("IOT_GET_CREDENTIAL_ENDPOINT")) && nullptr != (cert_path = getenv("CERT_PATH")) && + nullptr != (private_key_path = getenv("PRIVATE_KEY_PATH")) && nullptr != (role_alias = getenv("ROLE_ALIAS")) && nullptr != (ca_cert_path = getenv("CA_CERT_PATH"))) { - // set the IoT Credentials if provided in envvar - GstStructure *iot_credentials = gst_structure_new( - "iot-certificate", - "iot-thing-name", G_TYPE_STRING, data->stream_name, - "endpoint", G_TYPE_STRING, iot_credential_endpoint, - "cert-path", G_TYPE_STRING, cert_path, - "key-path", G_TYPE_STRING, private_key_path, - "ca-path", G_TYPE_STRING, ca_cert_path, - "role-aliases", G_TYPE_STRING, role_alias, NULL); - - g_object_set(G_OBJECT (kvssink), "iot-certificate", iot_credentials, NULL); + // set the IoT Credentials if provided in envvar + GstStructure* iot_credentials = + gst_structure_new("iot-certificate", "iot-thing-name", G_TYPE_STRING, data->stream_name, "endpoint", G_TYPE_STRING, + iot_credential_endpoint, "cert-path", G_TYPE_STRING, cert_path, "key-path", G_TYPE_STRING, private_key_path, "ca-path", + G_TYPE_STRING, ca_cert_path, "role-aliases", G_TYPE_STRING, role_alias, NULL); + + g_object_set(G_OBJECT(kvssink), "iot-certificate", iot_credentials, NULL); gst_structure_free(iot_credentials); - // kvssink will search for long term credentials in envvar automatically so no need to include here - // if no long credentials or IoT credentials provided will look for credential file as last resort - } else if(nullptr != (credential_path = getenv("AWS_CREDENTIAL_PATH"))){ - g_object_set(G_OBJECT (kvssink), "credential-path", credential_path, NULL); + // kvssink will search for long term credentials in envvar automatically so no need to include here + // if no long credentials or IoT credentials provided will look for credential file as last resort + } else if (nullptr != (credential_path = getenv("AWS_CREDENTIAL_PATH"))) { + g_object_set(G_OBJECT(kvssink), "credential-path", credential_path, NULL); } } -int gstreamer_live_source_init(int argc, char *argv[], CustomData *data, GstElement *pipeline) { - +int gstreamer_live_source_init(int argc, char* argv[], CustomData* data, GstElement* pipeline) +{ bool vtenc = false, isOnRpi = false; /* init stream format */ @@ -267,51 +249,38 @@ int gstreamer_live_source_init(int argc, char *argv[], CustomData *data, GstElem // index 1 is stream name which is already processed for (int i = 2; i < argc; i++) { if (i < argc) { - if ((0 == STRCMPI(argv[i], "-w")) || - (0 == STRCMPI(argv[i], "/w")) || - (0 == STRCMPI(argv[i], "--w"))) { + if ((0 == STRCMPI(argv[i], "-w")) || (0 == STRCMPI(argv[i], "/w")) || (0 == STRCMPI(argv[i], "--w"))) { // process the width if (STATUS_FAILED(STRTOI32(argv[i + 1], NULL, 10, &width))) { return 1; } - } else if ((0 == STRCMPI(argv[i], "-h")) || - (0 == STRCMPI(argv[i], "/h")) || - (0 == STRCMPI(argv[i], "--h"))) { + } else if ((0 == STRCMPI(argv[i], "-h")) || (0 == STRCMPI(argv[i], "/h")) || (0 == STRCMPI(argv[i], "--h"))) { // process the height if (STATUS_FAILED(STRTOI32(argv[i + 1], NULL, 10, &height))) { return 1; } - } else if ((0 == STRCMPI(argv[i], "-f")) || - (0 == STRCMPI(argv[i], "/f")) || - (0 == STRCMPI(argv[i], "--f"))) { + } else if ((0 == STRCMPI(argv[i], "-f")) || (0 == STRCMPI(argv[i], "/f")) || (0 == STRCMPI(argv[i], "--f"))) { // process the framerate if (STATUS_FAILED(STRTOI32(argv[i + 1], NULL, 10, &framerate))) { return 1; } - } else if ((0 == STRCMPI(argv[i], "-b")) || - (0 == STRCMPI(argv[i], "/b")) || - (0 == STRCMPI(argv[i], "--b"))) { + } else if ((0 == STRCMPI(argv[i], "-b")) || (0 == STRCMPI(argv[i], "/b")) || (0 == STRCMPI(argv[i], "--b"))) { // process the bitrate if (STATUS_FAILED(STRTOI32(argv[i + 1], NULL, 10, &bitrateInKBPS))) { return 1; } - } else if ((0 == STRCMPI(argv[i], "-runtime")) || - (0 == STRCMPI(argv[i], "/runtime")) || - (0 == STRCMPI(argv[i], "--runtime"))) { + } else if ((0 == STRCMPI(argv[i], "-runtime")) || (0 == STRCMPI(argv[i], "/runtime")) || (0 == STRCMPI(argv[i], "--runtime"))) { // process the max runtime if (STATUS_FAILED(STRTOI32(argv[i + 1], NULL, 10, &(data->max_runtime)))) { return 1; } - // skip the index + // skip the index } i++; - } else if (0 == STRCMPI(argv[i], "-?") || - 0 == STRCMPI(argv[i], "--?") || - 0 == STRCMPI(argv[i], "--help")) { + } else if (0 == STRCMPI(argv[i], "-?") || 0 == STRCMPI(argv[i], "--?") || 0 == STRCMPI(argv[i], "--help")) { g_printerr("Invalid arguments\n"); return 1; - } else if (argv[i][0] == '/' || - argv[i][0] == '-') { + } else if (argv[i][0] == '/' || argv[i][0] == '-') { // Unknown option g_printerr("Invalid arguments\n"); return 1; @@ -323,13 +292,13 @@ int gstreamer_live_source_init(int argc, char *argv[], CustomData *data, GstElem return 1; } - if(framerate <= 0 || bitrateInKBPS <= 0) { - g_printerr("Invalid input arguments\n"); - return 1; + if (framerate <= 0 || bitrateInKBPS <= 0) { + g_printerr("Invalid input arguments\n"); + return 1; } - LOG_DEBUG("Streaming with live source and width: " << width << ", height: " << height << ", fps: " << framerate - << ", bitrateInKBPS" << bitrateInKBPS); + LOG_DEBUG("Streaming with live source and width: " << width << ", height: " << height << ", fps: " << framerate << ", bitrateInKBPS" + << bitrateInKBPS); GstElement *source_filter, *filter, *kvssink, *h264parse, *encoder, *source, *video_convert; @@ -417,18 +386,12 @@ int gstreamer_live_source_init(int argc, char *argv[], CustomData *data, GstElem return 1; } - GstPad *srcpad = gst_element_get_static_pad(source, "src"); - GstCaps *src_caps = gst_pad_query_caps(srcpad, NULL); + GstPad* srcpad = gst_element_get_static_pad(source, "src"); + GstCaps* src_caps = gst_pad_query_caps(srcpad, NULL); gst_element_set_state(source, GST_STATE_NULL); - GstCaps *query_caps_raw = gst_caps_new_simple("video/x-raw", - "width", G_TYPE_INT, width, - "height", G_TYPE_INT, height, - NULL); - GstCaps *query_caps_h264 = gst_caps_new_simple("video/x-h264", - "width", G_TYPE_INT, width, - "height", G_TYPE_INT, height, - NULL); + GstCaps* query_caps_raw = gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); + GstCaps* query_caps_h264 = gst_caps_new_simple("video/x-h264", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); if (width != 0 && height != 0) { if (!resolution_supported(src_caps, query_caps_raw, query_caps_h264, *data, width, height, framerate)) { @@ -455,8 +418,7 @@ int gstreamer_live_source_init(int argc, char *argv[], CustomData *data, GstElem } } if (!found_resolution) { - g_printerr( - "Default list of resolutions (1920x1080, 1280x720, 640x480) are not supported by video source\n"); + g_printerr("Default list of resolutions (1920x1080, 1280x720, 640x480) are not supported by video source\n"); return 1; } } @@ -476,15 +438,10 @@ int gstreamer_live_source_init(int argc, char *argv[], CustomData *data, GstElem /* source filter */ if (!data->h264_stream_supported) { - gst_caps_set_simple(query_caps_raw, - "format", G_TYPE_STRING, "I420", - NULL); + gst_caps_set_simple(query_caps_raw, "format", G_TYPE_STRING, "I420", NULL); g_object_set(G_OBJECT(source_filter), "caps", query_caps_raw, NULL); } else { - gst_caps_set_simple(query_caps_h264, - "stream-format", G_TYPE_STRING, "byte-stream", - "alignment", G_TYPE_STRING, "au", - NULL); + gst_caps_set_simple(query_caps_h264, "stream-format", G_TYPE_STRING, "byte-stream", "alignment", G_TYPE_STRING, "au", NULL); g_object_set(G_OBJECT(source_filter), "caps", query_caps_h264, NULL); } gst_caps_unref(query_caps_h264); @@ -493,25 +450,20 @@ int gstreamer_live_source_init(int argc, char *argv[], CustomData *data, GstElem /* configure encoder */ if (!data->h264_stream_supported) { if (vtenc) { - g_object_set(G_OBJECT(encoder), "allow-frame-reordering", FALSE, "realtime", TRUE, "max-keyframe-interval", - 45, "bitrate", bitrateInKBPS, NULL); + g_object_set(G_OBJECT(encoder), "allow-frame-reordering", FALSE, "realtime", TRUE, "max-keyframe-interval", 45, "bitrate", bitrateInKBPS, + NULL); } else if (isOnRpi) { - g_object_set(G_OBJECT(encoder), "control-rate", 2, "target-bitrate", bitrateInKBPS * 1000, - "periodicty-idr", 45, "inline-header", FALSE, NULL); + g_object_set(G_OBJECT(encoder), "control-rate", 2, "target-bitrate", bitrateInKBPS * 1000, "periodicty-idr", 45, "inline-header", FALSE, + NULL); } else { g_object_set(G_OBJECT(encoder), "bframes", 0, "key-int-max", 45, "bitrate", bitrateInKBPS, NULL); } } - /* configure filter */ - GstCaps *h264_caps = gst_caps_new_simple("video/x-h264", - "stream-format", G_TYPE_STRING, "avc", - "alignment", G_TYPE_STRING, "au", - NULL); + GstCaps* h264_caps = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "avc", "alignment", G_TYPE_STRING, "au", NULL); if (!data->h264_stream_supported) { - gst_caps_set_simple(h264_caps, "profile", G_TYPE_STRING, "baseline", - NULL); + gst_caps_set_simple(h264_caps, "profile", G_TYPE_STRING, "baseline", NULL); } g_object_set(G_OBJECT(filter), "caps", h264_caps, NULL); gst_caps_unref(h264_caps); @@ -522,8 +474,7 @@ int gstreamer_live_source_init(int argc, char *argv[], CustomData *data, GstElem /* build the pipeline */ if (!data->h264_stream_supported) { - gst_bin_add_many(GST_BIN(pipeline), source, video_convert, source_filter, encoder, h264parse, filter, - kvssink, NULL); + gst_bin_add_many(GST_BIN(pipeline), source, video_convert, source_filter, encoder, h264parse, filter, kvssink, NULL); LOG_DEBUG("Constructing pipeline with encoding element") if (!gst_element_link_many(source, video_convert, source_filter, encoder, h264parse, filter, kvssink, NULL)) { g_printerr("Elements could not be linked.\n"); @@ -543,17 +494,16 @@ int gstreamer_live_source_init(int argc, char *argv[], CustomData *data, GstElem return 0; } -int gstreamer_rtsp_source_init(int argc, char *argv[], CustomData *data, GstElement *pipeline) { +int gstreamer_rtsp_source_init(int argc, char* argv[], CustomData* data, GstElement* pipeline) +{ // process runtime if provided - if (argc == 5){ - if ((0 == STRCMPI(argv[3], "-runtime")) || - (0 == STRCMPI(argv[3], "/runtime")) || - (0 == STRCMPI(argv[3], "--runtime"))){ - // process the max runtime - if (STATUS_FAILED(STRTOI32(argv[4], NULL, 10, &(data->max_runtime)))) { + if (argc == 5) { + if ((0 == STRCMPI(argv[3], "-runtime")) || (0 == STRCMPI(argv[3], "/runtime")) || (0 == STRCMPI(argv[3], "--runtime"))) { + // process the max runtime + if (STATUS_FAILED(STRTOI32(argv[4], NULL, 10, &(data->max_runtime)))) { return 1; - } - } + } + } } GstElement *filter, *kvssink, *depay, *source, *h264parse; @@ -569,35 +519,25 @@ int gstreamer_rtsp_source_init(int argc, char *argv[], CustomData *data, GstElem } // configure filter - GstCaps *h264_caps = gst_caps_new_simple("video/x-h264", - "stream-format", G_TYPE_STRING, "avc", - "alignment", G_TYPE_STRING, "au", - NULL); + GstCaps* h264_caps = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "avc", "alignment", G_TYPE_STRING, "au", NULL); g_object_set(G_OBJECT(filter), "caps", h264_caps, NULL); gst_caps_unref(h264_caps); // configure kvssink g_object_set(G_OBJECT(kvssink), "stream-name", data->stream_name, "storage-size", 128, NULL); determine_credentials(kvssink, data); - + // configure rtspsrc - g_object_set(G_OBJECT(source), - "location", data->rtsp_url.c_str(), - "short-header", true, // Necessary for target camera + g_object_set(G_OBJECT(source), "location", data->rtsp_url.c_str(), "short-header", true, // Necessary for target camera NULL); g_signal_connect(source, "pad-added", G_CALLBACK(pad_added_cb), depay); /* build the pipeline */ - gst_bin_add_many(GST_BIN(pipeline), source, - depay, h264parse, filter, kvssink, - NULL); + gst_bin_add_many(GST_BIN(pipeline), source, depay, h264parse, filter, kvssink, NULL); /* Leave the actual source out - this will be done when the pad is added */ - if (!gst_element_link_many(depay, filter, h264parse, - kvssink, - NULL)) { - + if (!gst_element_link_many(depay, filter, h264parse, kvssink, NULL)) { g_printerr("Elements could not be linked.\n"); gst_object_unref(pipeline); return 1; @@ -606,8 +546,8 @@ int gstreamer_rtsp_source_init(int argc, char *argv[], CustomData *data, GstElem return 0; } -int gstreamer_file_source_init(CustomData *data, GstElement *pipeline) { - +int gstreamer_file_source_init(CustomData* data, GstElement* pipeline) +{ GstElement *demux, *kvssink, *filesrc, *h264parse, *filter, *queue; string file_suffix; string file_path = data->file_list.at(data->current_file_idx).path; @@ -631,17 +571,13 @@ int gstreamer_file_source_init(CustomData *data, GstElement *pipeline) { return 1; } - if (!demux || !filesrc || !h264parse || !kvssink || !pipeline || !filter) { g_printerr("Not all elements could be created:\n"); return 1; } // configure filter - GstCaps *h264_caps = gst_caps_new_simple("video/x-h264", - "stream-format", G_TYPE_STRING, "avc", - "alignment", G_TYPE_STRING, "au", - NULL); + GstCaps* h264_caps = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING, "avc", "alignment", G_TYPE_STRING, "au", NULL); g_object_set(G_OBJECT(filter), "caps", h264_caps, NULL); gst_caps_unref(h264_caps); @@ -655,21 +591,16 @@ int gstreamer_file_source_init(CustomData *data, GstElement *pipeline) { // configure demux g_signal_connect(demux, "pad-added", G_CALLBACK(pad_added_cb), queue); - /* build the pipeline */ - gst_bin_add_many(GST_BIN(pipeline), demux, - filesrc, filter, kvssink, h264parse, queue, - NULL); + gst_bin_add_many(GST_BIN(pipeline), demux, filesrc, filter, kvssink, h264parse, queue, NULL); - if (!gst_element_link_many(filesrc, demux, - NULL)) { + if (!gst_element_link_many(filesrc, demux, NULL)) { g_printerr("Elements could not be linked.\n"); gst_object_unref(pipeline); return 1; } - if (!gst_element_link_many(queue, h264parse, filter, kvssink, - NULL)) { + if (!gst_element_link_many(queue, h264parse, filter, kvssink, NULL)) { g_printerr("Video elements could not be linked.\n"); gst_object_unref(pipeline); return 1; @@ -678,12 +609,12 @@ int gstreamer_file_source_init(CustomData *data, GstElement *pipeline) { return 0; } -int gstreamer_init(int argc, char *argv[], CustomData *data) { - +int gstreamer_init(int argc, char* argv[], CustomData* data) +{ /* init GStreamer */ gst_init(&argc, &argv); - GstElement *pipeline; + GstElement* pipeline; int ret; GstStateChangeReturn gst_ret; @@ -713,7 +644,7 @@ int gstreamer_init(int argc, char *argv[], CustomData *data) { } /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */ - GstBus *bus = gst_element_get_bus(pipeline); + GstBus* bus = gst_element_get_bus(pipeline); gst_bus_add_signal_watch(bus); g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_cb, data); g_signal_connect(G_OBJECT(bus), "message::eos", G_CALLBACK(eos_cb), data); @@ -723,21 +654,20 @@ int gstreamer_init(int argc, char *argv[], CustomData *data) { if (gst_ret == GST_STATE_CHANGE_FAILURE) { g_printerr("Unable to set the pipeline to the playing state.\n"); gst_object_unref(pipeline); - data->stream_status = STATUS_KVS_GSTREAMER_SAMPLE_ERROR; + data->stream_status = STATUS_KVS_GSTREAMER_SAMPLE_ERROR; return 1; } // set timer if valid runtime provided (non-positive values are ignored) - if (data->streamSource != FILE_SOURCE && data->max_runtime > 0){ - LOG_DEBUG("Timeout is " << data->max_runtime << " seconds."); - std::thread stream_timer(timer, data); - stream_timer.detach(); + if (data->streamSource != FILE_SOURCE && data->max_runtime > 0) { + LOG_DEBUG("Timeout is " << data->max_runtime << " seconds."); + std::thread stream_timer(timer, data); + stream_timer.detach(); } LOG_DEBUG("before main loop"); data->main_loop = g_main_loop_new(NULL, FALSE); g_main_loop_run(data->main_loop); LOG_DEBUG("after main loop") - /* free resources */ gst_bus_remove_signal_watch(bus); gst_element_set_state(pipeline, GST_STATE_NULL); @@ -747,14 +677,15 @@ int gstreamer_init(int argc, char *argv[], CustomData *data) { return 0; } -int main(int argc, char *argv[]) { +int main(int argc, char* argv[]) +{ PropertyConfigurator::doConfigure("../kvs_log_configuration"); signal(SIGINT, sigint_handler); if (argc < 2) { LOG_ERROR( - "Usage: AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kvssink_gstreamer_sample_app my-stream-name -w width -h height -f framerate -b bitrateInKBPS -runtime runtimeInSeconds\n \ + "Usage: AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kvssink_gstreamer_sample_app my-stream-name -w width -h height -f framerate -b bitrateInKBPS -runtime runtimeInSeconds\n \ or AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kvssink_gstreamer_sample_app my-stream-name\n \ or AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kvssink_gstreamer_sample_app my-stream-name rtsp-url -runtime runtimeInSeconds\n \ or AWS_ACCESS_KEY_ID=SAMPLEKEY AWS_SECRET_ACCESS_KEY=SAMPLESECRET ./kvssink_gstreamer_sample_app my-stream-name path/to/file1 path/to/file2 ...\n"); @@ -783,9 +714,7 @@ int main(int argc, char *argv[]) { data_global.streamSource = RTSP_SOURCE; data_global.rtsp_url = string(argv[2]); - } else if (suffix.compare("mkv") == 0 || - suffix.compare("mp4") == 0 || - suffix.compare(".ts") == 0) { + } else if (suffix.compare("mkv") == 0 || suffix.compare("mp4") == 0 || suffix.compare(".ts") == 0) { data_global.streamSource = FILE_SOURCE; // skip over stream name for (int i = 2; i < argc; ++i) { @@ -811,7 +740,6 @@ int main(int argc, char *argv[]) { bool continue_uploading = true; for (; i < data_global.file_list.size() && continue_uploading; ++i) { - data_global.current_file_idx = i; LOG_DEBUG("Attempt to upload file: " << data_global.file_list[i].path); @@ -822,26 +750,26 @@ int main(int argc, char *argv[]) { // check if any stream error occurred. stream_status = data_global.stream_status.load(); - - if (STATUS_FAILED(stream_status)) { + + if (STATUS_FAILED(stream_status)) { continue_uploading = false; - do_retry = false; + do_retry = false; if (stream_status == GST_FLOW_ERROR) { LOG_ERROR("Fatal stream error occurred: " << stream_status << ". Terminating."); - } else if(stream_status == STATUS_KVS_GSTREAMER_SAMPLE_INTERRUPTED){ - LOG_ERROR("File upload interrupted. Terminating."); - continue_uploading = false; - }else { // non fatal case. retry upload + } else if (stream_status == STATUS_KVS_GSTREAMER_SAMPLE_INTERRUPTED) { + LOG_ERROR("File upload interrupted. Terminating."); + continue_uploading = false; + } else { // non fatal case. retry upload LOG_ERROR("stream error occurred: " << stream_status << ". Terminating."); do_retry = true; } } else { LOG_INFO("Finished sending file to kvs producer: " << data_global.file_list[i].path); - data_global.last_unpersisted_file_idx += 1; + data_global.last_unpersisted_file_idx += 1; // check if we just finished sending the last file. if (i == data_global.file_list.size() - 1) { LOG_INFO("All files have been persisted"); - do_retry = false; + do_retry = false; } } } @@ -851,7 +779,7 @@ int main(int argc, char *argv[]) { if (file_retry_count == 0) { i = data_global.last_unpersisted_file_idx.load(); LOG_ERROR("Failed to upload file " << data_global.file_list[i].path << " after retrying. Terminating."); - do_retry = false; // exit while loop + do_retry = false; // exit while loop } else { // reset state data_global.stream_status = STATUS_SUCCESS; @@ -865,16 +793,15 @@ int main(int argc, char *argv[]) { if (gstreamer_init(argc, argv, &data_global) != 0) { return 1; } - stream_status = data_global.stream_status.load(); + stream_status = data_global.stream_status.load(); if (STATUS_SUCCEEDED(stream_status)) { LOG_INFO("Stream succeeded"); - } else if(stream_status == STATUS_KVS_GSTREAMER_SAMPLE_INTERRUPTED){ - LOG_INFO("Stream Interrupted"); - } else { + } else if (stream_status == STATUS_KVS_GSTREAMER_SAMPLE_INTERRUPTED) { + LOG_INFO("Stream Interrupted"); + } else { LOG_INFO("Stream Failed"); } } return 0; } - diff --git a/scripts/check-clang.sh b/scripts/check-clang.sh new file mode 100644 index 00000000..5c1fb665 --- /dev/null +++ b/scripts/check-clang.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash + +if [[ -z $CLANG_FORMAT ]] ; then + CLANG_FORMAT=clang-format +fi + +if NOT type $CLANG_FORMAT 2> /dev/null ; then + echo "No appropriate clang-format found." + exit 1 +fi + +FAIL=0 +SOURCE_FILES=`find src samples -type f \( -name '*.h' -o -name '*.c' \) -not -path "*/external/*"` +echo "Performing clang format compliance check...." +for i in $SOURCE_FILES +do + $CLANG_FORMAT -output-replacements-xml $i | grep -c " /dev/null + if [ $? -ne 1 ] + then + echo "$i failed clang-format check." + FAIL=1 + fi +done + +if [ $FAIL -ne 1 ]; then + echo "Clang check passed for the project" +fi + +exit $FAIL \ No newline at end of file diff --git a/scripts/clang-format.sh b/scripts/clang-format.sh new file mode 100644 index 00000000..11ac507c --- /dev/null +++ b/scripts/clang-format.sh @@ -0,0 +1,39 @@ +#!/bin/bash +parse_args() { + while [[ $# -gt 0 ]] + do + key="$1" + case $key in + -f) + echo "Clang formatting the files specified" + clang-format -style=file -i -fallback-style=none $2 + shift # past argument + shift # past value + ;; + -d) + echo "Clang formatting all files in the directory" + find $2 -iname *.h -o -iname *.cpp -o -iname *.h | xargs clang-format -style=file -i -fallback-style=none + shift # past argument + shift # past value + ;; + -g) + echo "Clang formatting only git diff'ed output" + clang-format -style=file -fallback-style=none -i `git ls-files -om "*.[ch]"` + shift # past argument + ;; + -h|--help) + echo "-f: Pass list of files to be clang formatted" + echo "-a: Clang format all files in the project" + echo "-d: Clang format all files in the directory passed after this option" + echo "-g: Clang formatting only git diff'ed output" + exit 0 + ;; + *) # unknown option + echo "Unknown option $key" + exit 1 + ;; + esac + done +} + +parse_args $@ diff --git a/src/Auth.cpp b/src/Auth.cpp index d34c8c30..a82644d0 100644 --- a/src/Auth.cpp +++ b/src/Auth.cpp @@ -3,52 +3,57 @@ LOGGER_TAG("com.amazonaws.kinesis.video"); -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { using std::mutex; -CredentialProvider::CredentialProvider() - : next_rotation_time_(0), - security_token_(NULL) { +CredentialProvider::CredentialProvider() : next_rotation_time_(0), security_token_(NULL) +{ MEMSET(&callbacks_, 0, SIZEOF(callbacks_)); } -void CredentialProvider::getCredentials(Credentials& credentials) { - +void CredentialProvider::getCredentials(Credentials& credentials) +{ // synchronize credential access since multiple clients may call simultaneously std::lock_guard guard(credential_mutex_); refreshCredentials(); credentials = credentials_; } -void CredentialProvider::getUpdatedCredentials(Credentials& credentials) { - +void CredentialProvider::getUpdatedCredentials(Credentials& credentials) +{ // synchronize credential access since multiple clients may call simultaneously std::lock_guard guard(credential_mutex_); refreshCredentials(true); credentials = credentials_; } -void CredentialProvider::refreshCredentials(bool forceUpdate) { +void CredentialProvider::refreshCredentials(bool forceUpdate) +{ auto now_time = systemCurrentTime().time_since_epoch(); // update if we've exceeded the refresh interval with grace period if (forceUpdate || now_time + CredentialProviderGracePeriod > next_rotation_time_) { - LOG_DEBUG("Refreshing credentials. Force refreshing: " << forceUpdate - << " Now time is: " << now_time.count() - << " Expiration: " << next_rotation_time_.count()); + LOG_DEBUG("Refreshing credentials. Force refreshing: " << forceUpdate << " Now time is: " << now_time.count() + << " Expiration: " << next_rotation_time_.count()); updateCredentials(credentials_); next_rotation_time_ = credentials_.getExpiration(); } } -void CredentialProvider::updateCredentials(Credentials &credentials) { +void CredentialProvider::updateCredentials(Credentials& credentials) +{ // no-op } -CredentialProvider::~CredentialProvider() { +CredentialProvider::~CredentialProvider() +{ freeAwsCredentials(&security_token_); } -CredentialProvider::callback_t CredentialProvider::getCallbacks(PClientCallbacks clientCallbacks) { +CredentialProvider::callback_t CredentialProvider::getCallbacks(PClientCallbacks clientCallbacks) +{ UNUSED_PARAM(clientCallbacks); MEMSET(&callbacks_, 0, SIZEOF(callbacks_)); @@ -65,30 +70,37 @@ CredentialProvider::callback_t CredentialProvider::getCallbacks(PClientCallbacks return callbacks_; } -GetDeviceCertificateFunc CredentialProvider::getDeviceCertificateCallback() { +GetDeviceCertificateFunc CredentialProvider::getDeviceCertificateCallback() +{ // we are using a security token, so this callback should be null. return nullptr; } -GetDeviceFingerprintFunc CredentialProvider::getDeviceFingerPrintCallback() { +GetDeviceFingerprintFunc CredentialProvider::getDeviceFingerPrintCallback() +{ // we are using a security token, so this callback should be null. return nullptr; } -GetSecurityTokenFunc CredentialProvider::getSecurityTokenCallback() { +GetSecurityTokenFunc CredentialProvider::getSecurityTokenCallback() +{ return getSecurityTokenHandler; } -GetStreamingTokenFunc CredentialProvider::getStreamingTokenCallback() { +GetStreamingTokenFunc CredentialProvider::getStreamingTokenCallback() +{ return getStreamingTokenHandler; } -DeviceCertToTokenFunc CredentialProvider::deviceCertToTokenCallback() { +DeviceCertToTokenFunc CredentialProvider::deviceCertToTokenCallback() +{ // We are using a security token, so this callback should be null. return nullptr; } -STATUS CredentialProvider::getStreamingTokenHandler(UINT64 custom_data, PCHAR stream_name, STREAM_ACCESS_MODE access_mode, PServiceCallContext p_service_call_context) { +STATUS CredentialProvider::getStreamingTokenHandler(UINT64 custom_data, PCHAR stream_name, STREAM_ACCESS_MODE access_mode, + PServiceCallContext p_service_call_context) +{ LOG_DEBUG("getStreamingTokenHandler invoked"); STATUS status = STATUS_SUCCESS; UNUSED_PARAM(stream_name); @@ -112,22 +124,18 @@ STATUS CredentialProvider::getStreamingTokenHandler(UINT64 custom_data, PCHAR st freeAwsCredentials(&this_obj->security_token_); // Store the buffer so we can release it at the end - if(IS_EMPTY_STRING(session_token.c_str())) { - status = createAwsCredentials((PCHAR) access_key.c_str(), access_key_len, (PCHAR) secret_key.c_str(), secret_key_len, - nullptr, 0, expiration, &this_obj->security_token_); + if (IS_EMPTY_STRING(session_token.c_str())) { + status = createAwsCredentials((PCHAR) access_key.c_str(), access_key_len, (PCHAR) secret_key.c_str(), secret_key_len, nullptr, 0, expiration, + &this_obj->security_token_); } else { status = createAwsCredentials((PCHAR) access_key.c_str(), access_key_len, (PCHAR) secret_key.c_str(), secret_key_len, - (PCHAR) session_token.c_str(), session_token_len, expiration, &this_obj->security_token_); - + (PCHAR) session_token.c_str(), session_token_len, expiration, &this_obj->security_token_); } - if(STATUS_SUCCEEDED(status)) { - status = getStreamingTokenResultEvent( - p_service_call_context->customData, SERVICE_CALL_RESULT_OK, - reinterpret_cast(this_obj->security_token_), - this_obj->security_token_->size, - expiration); + if (STATUS_SUCCEEDED(status)) { + status = getStreamingTokenResultEvent(p_service_call_context->customData, SERVICE_CALL_RESULT_OK, + reinterpret_cast(this_obj->security_token_), this_obj->security_token_->size, expiration); } else { LOG_ERROR("getStreamingTokenHandler failed with code " << std::hex << status); } @@ -135,7 +143,8 @@ STATUS CredentialProvider::getStreamingTokenHandler(UINT64 custom_data, PCHAR st return status; } -STATUS CredentialProvider::getSecurityTokenHandler(UINT64 custom_data, PBYTE* pp_token, PUINT32 p_size, PUINT64 p_expiration) { +STATUS CredentialProvider::getSecurityTokenHandler(UINT64 custom_data, PBYTE* pp_token, PUINT32 p_size, PUINT64 p_expiration) +{ LOG_DEBUG("getSecurityTokenHandler invoked"); auto this_obj = reinterpret_cast(custom_data); @@ -143,7 +152,6 @@ STATUS CredentialProvider::getSecurityTokenHandler(UINT64 custom_data, PBYTE* pp Credentials credentials; this_obj->getCredentials(credentials); - auto access_key = credentials.getAccessKey(); auto access_key_len = access_key.length(); auto secret_key = credentials.getSecretKey(); @@ -158,19 +166,18 @@ STATUS CredentialProvider::getSecurityTokenHandler(UINT64 custom_data, PBYTE* pp *p_expiration = credentials.getExpiration().count() * HUNDREDS_OF_NANOS_IN_A_SECOND; // Store the buffer so we can release it at the end - if(IS_EMPTY_STRING(session_token.c_str())) { + if (IS_EMPTY_STRING(session_token.c_str())) { // Store the buffer so we can release it at the end - status = createAwsCredentials((PCHAR) access_key.c_str(), access_key_len, (PCHAR) secret_key.c_str(), secret_key_len, - nullptr, 0, *p_expiration, &this_obj->security_token_); + status = createAwsCredentials((PCHAR) access_key.c_str(), access_key_len, (PCHAR) secret_key.c_str(), secret_key_len, nullptr, 0, + *p_expiration, &this_obj->security_token_); } else { // Store the buffer so we can release it at the end status = createAwsCredentials((PCHAR) access_key.c_str(), access_key_len, (PCHAR) secret_key.c_str(), secret_key_len, - (PCHAR) session_token.c_str(), session_token_len, *p_expiration, &this_obj->security_token_); - + (PCHAR) session_token.c_str(), session_token_len, *p_expiration, &this_obj->security_token_); } - if(STATUS_SUCCEEDED(status)) { + if (STATUS_SUCCEEDED(status)) { *pp_token = (PBYTE) (this_obj->security_token_); *p_size = this_obj->security_token_->size; } else { diff --git a/src/Auth.h b/src/Auth.h index c00a8878..4f43a8f2 100644 --- a/src/Auth.h +++ b/src/Auth.h @@ -12,108 +12,114 @@ #include #include "GetTime.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { #define STRING_TO_PCHAR(s) ((PCHAR) ((s).c_str())) /** -* Simple data object around aws credentials -*/ + * Simple data object around aws credentials + */ class Credentials { -public: - Credentials() : access_key_(""), secret_key_(""), session_token_(""), expiration_(std::chrono::seconds(MAX_UINT64)) {} + public: + Credentials() : access_key_(""), secret_key_(""), session_token_(""), expiration_(std::chrono::seconds(MAX_UINT64)) + { + } /** - * Initializes object with access_key, secret_key, session_token and expiration. + * Initializes object with access_key, secret_key, session_token and expiration. * NOTE: Session token defaults to empty. * NOTE: Expiration defaults to max uint 64 - */ - Credentials(const std::string& access_key, - const std::string& secret_key, - const std::string& session_token = "", - const std::chrono::duration expiration = std::chrono::seconds(MAX_UINT64)) : - access_key_(access_key), secret_key_(secret_key), session_token_(session_token), expiration_(expiration) + */ + Credentials(const std::string& access_key, const std::string& secret_key, const std::string& session_token = "", + const std::chrono::duration expiration = std::chrono::seconds(MAX_UINT64)) + : access_key_(access_key), secret_key_(secret_key), session_token_(session_token), expiration_(expiration) { } /** * Virtual destructor */ - virtual ~Credentials() {} + virtual ~Credentials() + { + } /** - * Gets the underlying access key credential - */ + * Gets the underlying access key credential + */ inline const std::string& getAccessKey() const { return access_key_; } /** - * Gets the underlying secret key credential - */ + * Gets the underlying secret key credential + */ inline const std::string& getSecretKey() const { return secret_key_; } /** - * Gets the underlying session token - */ + * Gets the underlying session token + */ inline const std::string& getSessionToken() const { return session_token_; } /** - * Gets the underlying session token, return NULL if not exist - */ + * Gets the underlying session token, return NULL if not exist + */ inline const PCHAR getSessionTokenIfExist() const { return session_token_ == "" ? NULL : (PCHAR) session_token_.c_str(); } /** - * Gets the underlying session tokens expiration - */ + * Gets the underlying session tokens expiration + */ inline const std::chrono::duration& getExpiration() const { return expiration_; } /** - * Sets the underlying access key credential. Copies from parameter access_key. - */ + * Sets the underlying access key credential. Copies from parameter access_key. + */ inline void setAccessKey(const std::string& access_key) { access_key_ = access_key; } /** - * Sets the underlying secret key credential. Copies from parameter secret_key - */ + * Sets the underlying secret key credential. Copies from parameter secret_key + */ inline void setSecretKey(const std::string& secret_key) { secret_key_ = secret_key; } /** - * Sets the underlying session token. Copies from parameter session_token - */ + * Sets the underlying session token. Copies from parameter session_token + */ inline void setSessionToken(const std::string& session_token) { session_token_ = session_token; } /** - * Sets the underlying session tokens expiration. Copies from parameter expiration - */ + * Sets the underlying session tokens expiration. Copies from parameter expiration + */ inline void setExpiration(const std::chrono::duration expiration) { expiration_ = expiration; } - Credentials& operator=(const Credentials& other) { + Credentials& operator=(const Credentials& other) + { access_key_ = other.getAccessKey(); secret_key_ = other.getSecretKey(); session_token_ = other.getSessionToken(); @@ -122,7 +128,7 @@ class Credentials { return *this; } -private: + private: std::string access_key_; std::string secret_key_; std::string session_token_; @@ -130,7 +136,7 @@ class Credentials { }; class CredentialProvider { -public: + public: using callback_t = AuthCallbacks; virtual void getCredentials(Credentials& credentials); virtual void getUpdatedCredentials(Credentials& credentials); @@ -169,12 +175,13 @@ class CredentialProvider { */ virtual DeviceCertToTokenFunc deviceCertToTokenCallback(); -protected: + protected: CredentialProvider(); - const std::chrono::duration CredentialProviderGracePeriod = std::chrono::seconds(5 + (MIN_STREAMING_TOKEN_EXPIRATION_DURATION + STREAMING_TOKEN_EXPIRATION_GRACE_PERIOD) / HUNDREDS_OF_NANOS_IN_A_SECOND); + const std::chrono::duration CredentialProviderGracePeriod = + std::chrono::seconds(5 + (MIN_STREAMING_TOKEN_EXPIRATION_DURATION + STREAMING_TOKEN_EXPIRATION_GRACE_PERIOD) / HUNDREDS_OF_NANOS_IN_A_SECOND); -private: + private: void refreshCredentials(bool forceUpdate = false); virtual void updateCredentials(Credentials& credentials) = 0; @@ -194,8 +201,9 @@ class CredentialProvider { }; class EmptyCredentialProvider : public CredentialProvider { -private: - void updateCredentials(Credentials& credentials) override { + private: + void updateCredentials(Credentials& credentials) override + { credentials.setAccessKey(""); credentials.setSecretKey(""); credentials.setSessionToken(""); @@ -204,13 +212,14 @@ class EmptyCredentialProvider : public CredentialProvider { }; class StaticCredentialProvider : public CredentialProvider { -public: - - StaticCredentialProvider(const Credentials& credentials) : credentials_(credentials) {} - -protected: + public: + StaticCredentialProvider(const Credentials& credentials) : credentials_(credentials) + { + } - void updateCredentials(Credentials& credentials) override { + protected: + void updateCredentials(Credentials& credentials) override + { // Copy the stored creds forward credentials = credentials_; diff --git a/src/CachingEndpointOnlyCallbackProvider.cpp b/src/CachingEndpointOnlyCallbackProvider.cpp index 2cb5050c..537bc8a3 100644 --- a/src/CachingEndpointOnlyCallbackProvider.cpp +++ b/src/CachingEndpointOnlyCallbackProvider.cpp @@ -3,58 +3,43 @@ #include "CachingEndpointOnlyCallbackProvider.h" #include "Logger.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { LOGGER_TAG("com.amazonaws.kinesis.video"); using std::move; -using std::unique_ptr; using std::string; +using std::unique_ptr; -CachingEndpointOnlyCallbackProvider::CachingEndpointOnlyCallbackProvider( - std::unique_ptr client_callback_provider, - std::unique_ptr stream_callback_provider, - std::unique_ptr credentials_provider, - const string& region, - const string& control_plane_uri, - const std::string &user_agent_name, - const std::string &custom_user_agent, - const std::string &cert_path, - std::chrono::duration caching_update_period) : CachingEndpointOnlyCallbackProvider( - std::move(client_callback_provider), - std::move(stream_callback_provider), - std::move(credentials_provider), - region, - control_plane_uri, - user_agent_name, - custom_user_agent, - cert_path, - caching_update_period.count() * HUNDREDS_OF_NANOS_IN_A_SECOND) { +CachingEndpointOnlyCallbackProvider::CachingEndpointOnlyCallbackProvider(std::unique_ptr client_callback_provider, + std::unique_ptr stream_callback_provider, + std::unique_ptr credentials_provider, + const string& region, const string& control_plane_uri, + const std::string& user_agent_name, const std::string& custom_user_agent, + const std::string& cert_path, + std::chrono::duration caching_update_period) + : CachingEndpointOnlyCallbackProvider(std::move(client_callback_provider), std::move(stream_callback_provider), std::move(credentials_provider), + region, control_plane_uri, user_agent_name, custom_user_agent, cert_path, + caching_update_period.count() * HUNDREDS_OF_NANOS_IN_A_SECOND) +{ } -CachingEndpointOnlyCallbackProvider::CachingEndpointOnlyCallbackProvider( - unique_ptr client_callback_provider, - unique_ptr stream_callback_provider, - unique_ptr credentials_provider, - const string& region, - const string& control_plane_uri, - const std::string &user_agent_name, - const std::string &custom_user_agent, - const std::string &cert_path, - uint64_t cache_update_period) : DefaultCallbackProvider( - std::move(client_callback_provider), - std::move(stream_callback_provider), - std::move(credentials_provider), - region, - control_plane_uri, - user_agent_name, - custom_user_agent, - cert_path, - true, - cache_update_period) { +CachingEndpointOnlyCallbackProvider::CachingEndpointOnlyCallbackProvider(unique_ptr client_callback_provider, + unique_ptr stream_callback_provider, + unique_ptr credentials_provider, const string& region, + const string& control_plane_uri, const std::string& user_agent_name, + const std::string& custom_user_agent, const std::string& cert_path, + uint64_t cache_update_period) + : DefaultCallbackProvider(std::move(client_callback_provider), std::move(stream_callback_provider), std::move(credentials_provider), region, + control_plane_uri, user_agent_name, custom_user_agent, cert_path, true, cache_update_period) +{ } -CachingEndpointOnlyCallbackProvider::~CachingEndpointOnlyCallbackProvider() { +CachingEndpointOnlyCallbackProvider::~CachingEndpointOnlyCallbackProvider() +{ } } // namespace video diff --git a/src/CachingEndpointOnlyCallbackProvider.h b/src/CachingEndpointOnlyCallbackProvider.h index bbef3435..5f09d973 100644 --- a/src/CachingEndpointOnlyCallbackProvider.h +++ b/src/CachingEndpointOnlyCallbackProvider.h @@ -19,32 +19,27 @@ #include #include -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { class CachingEndpointOnlyCallbackProvider : public DefaultCallbackProvider { -public: - explicit CachingEndpointOnlyCallbackProvider( - std::unique_ptr client_callback_provider, - std::unique_ptr stream_callback_provider, - std::unique_ptr credentials_provider, - const std::string ®ion, - const std::string &control_plane_uri, - const std::string &user_agent_name, - const std::string &custom_user_agent, - const std::string &cert_path, - uint64_t cache_update_period); + public: + explicit CachingEndpointOnlyCallbackProvider(std::unique_ptr client_callback_provider, + std::unique_ptr stream_callback_provider, + std::unique_ptr credentials_provider, const std::string& region, + const std::string& control_plane_uri, const std::string& user_agent_name, + const std::string& custom_user_agent, const std::string& cert_path, uint64_t cache_update_period); explicit CachingEndpointOnlyCallbackProvider( - std::unique_ptr client_callback_provider, - std::unique_ptr stream_callback_provider, - std::unique_ptr credentials_provider = - (std::unique_ptr) new EmptyCredentialProvider(), - const std::string ®ion = DEFAULT_AWS_REGION, - const std::string &control_plane_uri = EMPTY_STRING, - const std::string &user_agent_name = EMPTY_STRING, - const std::string &custom_user_agent = EMPTY_STRING, - const std::string &cert_path = EMPTY_STRING, - std::chrono::duration caching_update_period = std::chrono::seconds(DEFAULT_ENDPOINT_CACHE_UPDATE_PERIOD / HUNDREDS_OF_NANOS_IN_A_SECOND)); + std::unique_ptr client_callback_provider, std::unique_ptr stream_callback_provider, + std::unique_ptr credentials_provider = (std::unique_ptr) new EmptyCredentialProvider(), + const std::string& region = DEFAULT_AWS_REGION, const std::string& control_plane_uri = EMPTY_STRING, + const std::string& user_agent_name = EMPTY_STRING, const std::string& custom_user_agent = EMPTY_STRING, + const std::string& cert_path = EMPTY_STRING, + std::chrono::duration caching_update_period = std::chrono::seconds(DEFAULT_ENDPOINT_CACHE_UPDATE_PERIOD / + HUNDREDS_OF_NANOS_IN_A_SECOND)); virtual ~CachingEndpointOnlyCallbackProvider(); }; diff --git a/src/CallbackProvider.cpp b/src/CallbackProvider.cpp index ba3b9669..9a604833 100644 --- a/src/CallbackProvider.cpp +++ b/src/CallbackProvider.cpp @@ -2,12 +2,16 @@ #include "CallbackProvider.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { -CallbackProvider::callback_t CallbackProvider::getCallbacks() { +CallbackProvider::callback_t CallbackProvider::getCallbacks() +{ MEMSET(&callbacks_, 0, SIZEOF(callbacks_)); callbacks_.customData = reinterpret_cast(this); - callbacks_.version = CALLBACKS_CURRENT_VERSION; // from kinesis video client include + callbacks_.version = CALLBACKS_CURRENT_VERSION; // from kinesis video client include callbacks_.getDeviceCertificateFn = getDeviceCertificateCallback(); callbacks_.getSecurityTokenFn = getSecurityTokenCallback(); callbacks_.getDeviceFingerprintFn = getDeviceFingerprintCallback(); @@ -52,140 +56,174 @@ CallbackProvider::callback_t CallbackProvider::getCallbacks() { return callbacks_; } -void CallbackProvider::shutdown() { +void CallbackProvider::shutdown() +{ // No-op } -void CallbackProvider::shutdownStream(STREAM_HANDLE stream_handle) { +void CallbackProvider::shutdownStream(STREAM_HANDLE stream_handle) +{ UNUSED_PARAM(stream_handle); // No-op } -CreateMutexFunc CallbackProvider::getCreateMutexCallback() { +CreateMutexFunc CallbackProvider::getCreateMutexCallback() +{ return nullptr; } -LockMutexFunc CallbackProvider::getLockMutexCallback() { +LockMutexFunc CallbackProvider::getLockMutexCallback() +{ return nullptr; } -UnlockMutexFunc CallbackProvider::getUnlockMutexCallback() { +UnlockMutexFunc CallbackProvider::getUnlockMutexCallback() +{ return nullptr; } -TryLockMutexFunc CallbackProvider::getTryLockMutexCallback() { +TryLockMutexFunc CallbackProvider::getTryLockMutexCallback() +{ return nullptr; } -FreeMutexFunc CallbackProvider::getFreeMutexCallback() { +FreeMutexFunc CallbackProvider::getFreeMutexCallback() +{ return nullptr; } -CreateConditionVariableFunc CallbackProvider::getCreateConditionVariableCallback() { +CreateConditionVariableFunc CallbackProvider::getCreateConditionVariableCallback() +{ return nullptr; } -SignalConditionVariableFunc CallbackProvider::getSignalConditionVariableCallback() { +SignalConditionVariableFunc CallbackProvider::getSignalConditionVariableCallback() +{ return nullptr; } -BroadcastConditionVariableFunc CallbackProvider::getBroadcastConditionVariableCallback() { +BroadcastConditionVariableFunc CallbackProvider::getBroadcastConditionVariableCallback() +{ return nullptr; } -WaitConditionVariableFunc CallbackProvider::getWaitConditionVariableCallback() { +WaitConditionVariableFunc CallbackProvider::getWaitConditionVariableCallback() +{ return nullptr; } -FreeConditionVariableFunc CallbackProvider::getFreeConditionVariableCallback() { +FreeConditionVariableFunc CallbackProvider::getFreeConditionVariableCallback() +{ return nullptr; } -GetCurrentTimeFunc CallbackProvider::getCurrentTimeCallback() { +GetCurrentTimeFunc CallbackProvider::getCurrentTimeCallback() +{ return nullptr; } -GetRandomNumberFunc CallbackProvider::getRandomNumberCallback() { +GetRandomNumberFunc CallbackProvider::getRandomNumberCallback() +{ return nullptr; } -LogPrintFunc CallbackProvider::getLogPrintCallback() { +LogPrintFunc CallbackProvider::getLogPrintCallback() +{ return nullptr; } -ClientReadyFunc CallbackProvider::getClientReadyCallback() { +ClientReadyFunc CallbackProvider::getClientReadyCallback() +{ return nullptr; } -StreamDataAvailableFunc CallbackProvider::getStreamDataAvailableCallback() { +StreamDataAvailableFunc CallbackProvider::getStreamDataAvailableCallback() +{ return nullptr; } -StreamReadyFunc CallbackProvider::getStreamReadyCallback() { +StreamReadyFunc CallbackProvider::getStreamReadyCallback() +{ return nullptr; } -StreamClosedFunc CallbackProvider::getStreamClosedCallback() { +StreamClosedFunc CallbackProvider::getStreamClosedCallback() +{ return nullptr; } -DroppedFragmentReportFunc CallbackProvider::getDroppedFragmentReportCallback() { +DroppedFragmentReportFunc CallbackProvider::getDroppedFragmentReportCallback() +{ return nullptr; } -StreamErrorReportFunc CallbackProvider::getStreamErrorReportCallback() { +StreamErrorReportFunc CallbackProvider::getStreamErrorReportCallback() +{ return nullptr; } -StreamConnectionStaleFunc CallbackProvider::getStreamConnectionStaleCallback() { +StreamConnectionStaleFunc CallbackProvider::getStreamConnectionStaleCallback() +{ return nullptr; } -DroppedFrameReportFunc CallbackProvider::getDroppedFrameReportCallback() { +DroppedFrameReportFunc CallbackProvider::getDroppedFrameReportCallback() +{ return nullptr; } -BufferDurationOverflowPressureFunc CallbackProvider::getBufferDurationOverflowPressureCallback() { +BufferDurationOverflowPressureFunc CallbackProvider::getBufferDurationOverflowPressureCallback() +{ return nullptr; } -StreamLatencyPressureFunc CallbackProvider::getStreamLatencyPressureCallback() { +StreamLatencyPressureFunc CallbackProvider::getStreamLatencyPressureCallback() +{ return nullptr; } -FragmentAckReceivedFunc CallbackProvider::getFragmentAckReceivedCallback() { +FragmentAckReceivedFunc CallbackProvider::getFragmentAckReceivedCallback() +{ return nullptr; } -StorageOverflowPressureFunc CallbackProvider::getStorageOverflowPressureCallback() { +StorageOverflowPressureFunc CallbackProvider::getStorageOverflowPressureCallback() +{ return nullptr; } -StreamUnderflowReportFunc CallbackProvider::getStreamUnderflowReportCallback() { +StreamUnderflowReportFunc CallbackProvider::getStreamUnderflowReportCallback() +{ return nullptr; } -GetDeviceFingerprintFunc CallbackProvider::getDeviceFingerprintCallback() { +GetDeviceFingerprintFunc CallbackProvider::getDeviceFingerprintCallback() +{ return nullptr; } -GetSecurityTokenFunc CallbackProvider::getSecurityTokenCallback() { +GetSecurityTokenFunc CallbackProvider::getSecurityTokenCallback() +{ return nullptr; } -GetDeviceCertificateFunc CallbackProvider::getDeviceCertificateCallback() { +GetDeviceCertificateFunc CallbackProvider::getDeviceCertificateCallback() +{ return nullptr; } -DeviceCertToTokenFunc CallbackProvider::getDeviceCertToTokenCallback() { +DeviceCertToTokenFunc CallbackProvider::getDeviceCertToTokenCallback() +{ return nullptr; } -StreamShutdownFunc CallbackProvider::getStreamShutdownCallback() { +StreamShutdownFunc CallbackProvider::getStreamShutdownCallback() +{ return nullptr; } -ClientShutdownFunc CallbackProvider::getClientShutdownCallback() { +ClientShutdownFunc CallbackProvider::getClientShutdownCallback() +{ return nullptr; } diff --git a/src/CallbackProvider.h b/src/CallbackProvider.h index 97eb292e..e0f5e5c0 100644 --- a/src/CallbackProvider.h +++ b/src/CallbackProvider.h @@ -4,78 +4,81 @@ #include "com/amazonaws/kinesis/video/client/Include.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { /** -* Interface extracted from the callbacks that the Kinesis Video SDK exposes for implementation by clients. -* Some of the callbacks are optional and if left null will have defaults from the SDK used. Other callbacks must be -* implemented by the user of the SDK. -* The key to understanding the Kinesis Video SDK is that it is at its heart a state machine. The calls from the -* application drive the state machine and move it via public API calls from one state to the next. -* In order for the state machine to transition from one state to the next, it often needs the client to do work -* because the SDK itself has no threads and no place to do any meaningful work (i.e. network IO). -* This is where the callbacks defined in this class come in. They do the heavy lifting for calls that are OS -* dependant or might require a thread to handle a RPC call that may block. Most of the callbacks fall into this -* category; however there are others, that are notification callbacks which provide feedback to the application -* about a stream in the running state. These callbacks must be defined but the aren't strictly required to do anything -* interesting, but your application can take advantage of them to implement smarter congestion avoidance. -* -* The break down is as follows -* -* Required callbacks: -* -* Authentication callback (you must implement *one* out of this group depending on your auth method): -* getDeviceCertificateCallback(); -* getSecurityTokenCallback(); -* getDeviceFingerprintCallback(); -* -* Stream notification callbacks (you must define them, but you are not required to do anything to run): -* getStreamUnderflowReportCallback(); -* getStorageOverflowPressureCallback(); -* getStreamLatencyPressureCallback(); -* getStreamConnectionStaleCallback(); -* getDroppedFrameReportCallback(); -* getDroppedFragmentReportCallback(); -* getStreamErrorReportCallback(); -* getStreamReadyCallback(); -* getStreamDataAvailableCallback(); -* -* State Machine Driven Callbacks (you must implement all of them and they must do the right thing): -* getCreateStreamCallback(); -* getDescribeStreamCallback(); -* getStreamingEndpointCallback(); -* getStreamingTokenCallback(); -* getPutStreamCallback(); -* getTagResourceCallback(); -* -* Device/Client level Callbacks -* -* getCreateDeviceCallback(); -* getDeviceCertToTokenCallback(); -* getClientReadyCallback(); -* -* Optional callbacks: -* -* OS dependent implementations (you don't have to define these): -* getCreateMutexCallback(); -* getLockMutexCallback(); -* getUnlockMutexCallback(); -* getTryLockMutexCallback(); -* getFreeMutexCallback(); -* getCreateConditionVariableCallback(); -* getSignalConditionVariableCallback(); -* getBroadcastConditionVariableCallback(); -* getWaitConditionVariableCallback(); -* getFreeConditionVariableCallback(); -* getCurrentTimeCallback(); -* getRandomNumberCallback(); -* getLogPrintCallback(); -* -* The optional callbacks are virtual, but there are default implementations defined for them that return nullptr, -* which will therefore use the defaults provided by the Kinesis Video SDK. -*/ + * Interface extracted from the callbacks that the Kinesis Video SDK exposes for implementation by clients. + * Some of the callbacks are optional and if left null will have defaults from the SDK used. Other callbacks must be + * implemented by the user of the SDK. + * The key to understanding the Kinesis Video SDK is that it is at its heart a state machine. The calls from the + * application drive the state machine and move it via public API calls from one state to the next. + * In order for the state machine to transition from one state to the next, it often needs the client to do work + * because the SDK itself has no threads and no place to do any meaningful work (i.e. network IO). + * This is where the callbacks defined in this class come in. They do the heavy lifting for calls that are OS + * dependant or might require a thread to handle a RPC call that may block. Most of the callbacks fall into this + * category; however there are others, that are notification callbacks which provide feedback to the application + * about a stream in the running state. These callbacks must be defined but the aren't strictly required to do anything + * interesting, but your application can take advantage of them to implement smarter congestion avoidance. + * + * The break down is as follows + * + * Required callbacks: + * + * Authentication callback (you must implement *one* out of this group depending on your auth method): + * getDeviceCertificateCallback(); + * getSecurityTokenCallback(); + * getDeviceFingerprintCallback(); + * + * Stream notification callbacks (you must define them, but you are not required to do anything to run): + * getStreamUnderflowReportCallback(); + * getStorageOverflowPressureCallback(); + * getStreamLatencyPressureCallback(); + * getStreamConnectionStaleCallback(); + * getDroppedFrameReportCallback(); + * getDroppedFragmentReportCallback(); + * getStreamErrorReportCallback(); + * getStreamReadyCallback(); + * getStreamDataAvailableCallback(); + * + * State Machine Driven Callbacks (you must implement all of them and they must do the right thing): + * getCreateStreamCallback(); + * getDescribeStreamCallback(); + * getStreamingEndpointCallback(); + * getStreamingTokenCallback(); + * getPutStreamCallback(); + * getTagResourceCallback(); + * + * Device/Client level Callbacks + * + * getCreateDeviceCallback(); + * getDeviceCertToTokenCallback(); + * getClientReadyCallback(); + * + * Optional callbacks: + * + * OS dependent implementations (you don't have to define these): + * getCreateMutexCallback(); + * getLockMutexCallback(); + * getUnlockMutexCallback(); + * getTryLockMutexCallback(); + * getFreeMutexCallback(); + * getCreateConditionVariableCallback(); + * getSignalConditionVariableCallback(); + * getBroadcastConditionVariableCallback(); + * getWaitConditionVariableCallback(); + * getFreeConditionVariableCallback(); + * getCurrentTimeCallback(); + * getRandomNumberCallback(); + * getLogPrintCallback(); + * + * The optional callbacks are virtual, but there are default implementations defined for them that return nullptr, + * which will therefore use the defaults provided by the Kinesis Video SDK. + */ class CallbackProvider { -public: + public: using callback_t = ClientCallbacks; /** @@ -307,17 +310,17 @@ class CallbackProvider { virtual StreamErrorReportFunc getStreamErrorReportCallback(); /** - * The function returned by this callback takes three arguments: - * @param 1 UINT64 - Custom handle passed by the caller. - * @param 2 STREAM_HANDLE - Kinesis Video metadata for the stream which is reporting a stale connection. - * @param 3 UINT64 - Duration of the last buffering ACK received in 100ns. - * - * Optional Callback. - * - * The callback returned shall take the appropriate action (decided by the implementor) to handle the stale connection. - * - * @return a function pointer conforming to the description above. - */ + * The function returned by this callback takes three arguments: + * @param 1 UINT64 - Custom handle passed by the caller. + * @param 2 STREAM_HANDLE - Kinesis Video metadata for the stream which is reporting a stale connection. + * @param 3 UINT64 - Duration of the last buffering ACK received in 100ns. + * + * Optional Callback. + * + * The callback returned shall take the appropriate action (decided by the implementor) to handle the stale connection. + * + * @return a function pointer conforming to the description above. + */ virtual StreamConnectionStaleFunc getStreamConnectionStaleCallback(); /** @@ -522,36 +525,38 @@ class CallbackProvider { virtual DeviceCertToTokenFunc getDeviceCertToTokenCallback(); /** - * Shuts down a stream. - * - * The function returned by this callback takes two arguments: - * - * Optional Callback. - * - * @param 1 UINT64 - Custom handle passed by the caller. - * @param 2 STREAM_HANDLE - - The stream to shutdown. - * - * @return a function pointer conforming to the description above. - */ + * Shuts down a stream. + * + * The function returned by this callback takes two arguments: + * + * Optional Callback. + * + * @param 1 UINT64 - Custom handle passed by the caller. + * @param 2 STREAM_HANDLE - - The stream to shutdown. + * + * @return a function pointer conforming to the description above. + */ virtual StreamShutdownFunc getStreamShutdownCallback(); /** - * Shuts down a client. - * - * The function returned by this callback takes two arguments: - * - * Optional Callback. - * - * @param 1 UINT64 - Custom handle passed by the caller. - * @param 2 CLIENT_HANDLE - The client handle. - * - * @return a function pointer conforming to the description above. - */ + * Shuts down a client. + * + * The function returned by this callback takes two arguments: + * + * Optional Callback. + * + * @param 1 UINT64 - Custom handle passed by the caller. + * @param 2 CLIENT_HANDLE - The client handle. + * + * @return a function pointer conforming to the description above. + */ virtual ClientShutdownFunc getClientShutdownCallback(); - virtual ~CallbackProvider() {} + virtual ~CallbackProvider() + { + } -protected: + protected: callback_t callbacks_; }; diff --git a/src/ClientCallbackProvider.h b/src/ClientCallbackProvider.h index 550eb7bc..36d79f01 100644 --- a/src/ClientCallbackProvider.h +++ b/src/ClientCallbackProvider.h @@ -4,17 +4,20 @@ #include "com/amazonaws/kinesis/video/client/Include.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { /** -* Kinesis Video client level callback provider -* -* getClientReadyCallback(); -* getStorageOverflowPressureCallback(); -* -*/ + * Kinesis Video client level callback provider + * + * getClientReadyCallback(); + * getStorageOverflowPressureCallback(); + * + */ class ClientCallbackProvider { -public: + public: /** * Returns the custom data for this object to be used with the callbacks. * @@ -29,7 +32,8 @@ class ClientCallbackProvider { * * @return a function pointer conforming to the description above. */ - virtual ClientReadyFunc getClientReadyCallback() { + virtual ClientReadyFunc getClientReadyCallback() + { return nullptr; }; @@ -40,11 +44,12 @@ class ClientCallbackProvider { * * @return a function pointer conforming to the description above. */ - virtual StorageOverflowPressureFunc getStorageOverflowPressureCallback() { + virtual StorageOverflowPressureFunc getStorageOverflowPressureCallback() + { return nullptr; }; - virtual ~ClientCallbackProvider() {}; + virtual ~ClientCallbackProvider(){}; }; } // namespace video diff --git a/src/DefaultCallbackProvider.cpp b/src/DefaultCallbackProvider.cpp index 66a371d1..c5aa5f72 100644 --- a/src/DefaultCallbackProvider.cpp +++ b/src/DefaultCallbackProvider.cpp @@ -3,47 +3,49 @@ #include "DefaultCallbackProvider.h" #include "Logger.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { LOGGER_TAG("com.amazonaws.kinesis.video"); +using std::async; +using std::condition_variable; +using std::function; +using std::future; +using std::future_status; +using std::launch; +using std::lock_guard; +using std::make_shared; using std::move; -using std::unique_ptr; +using std::mutex; +using std::shared_ptr; using std::string; using std::thread; -using std::shared_ptr; -using std::make_shared; -using std::future; -using std::function; +using std::tuple; +using std::unique_ptr; using std::vector; -using std::mutex; -using std::lock_guard; using std::chrono::seconds; -using std::future_status; -using std::condition_variable; -using std::tuple; -using std::async; -using std::launch; -#define CURL_CLOSE_HANDLE_DELAY_IN_MILLIS 200 -#define MAX_CUSTOM_USER_AGENT_STRING_LENGTH 128 -#define CPP_SDK_CUSTOM_USERAGENT "CPPSDK" +#define CURL_CLOSE_HANDLE_DELAY_IN_MILLIS 200 +#define MAX_CUSTOM_USER_AGENT_STRING_LENGTH 128 +#define CPP_SDK_CUSTOM_USERAGENT "CPPSDK" -UINT64 DefaultCallbackProvider::getCurrentTimeHandler(UINT64 custom_data) { +UINT64 DefaultCallbackProvider::getCurrentTimeHandler(UINT64 custom_data) +{ UNUSED_PARAM(custom_data); - return std::chrono::duration_cast(systemCurrentTime().time_since_epoch()) - .count() / DEFAULT_TIME_UNIT_IN_NANOS; + return std::chrono::duration_cast(systemCurrentTime().time_since_epoch()).count() / DEFAULT_TIME_UNIT_IN_NANOS; } -STATUS DefaultCallbackProvider::createDeviceHandler( - UINT64 custom_data, PCHAR device_name, PServiceCallContext service_call_ctx) { +STATUS DefaultCallbackProvider::createDeviceHandler(UINT64 custom_data, PCHAR device_name, PServiceCallContext service_call_ctx) +{ UNUSED_PARAM(custom_data); UNUSED_PARAM(device_name); LOG_DEBUG("createDeviceHandler invoked"); // TODO: Implement the upsert of the device in the backend. Returning a dummy arn string device_arn = "arn:aws:kinesisvideo:us-west-2:11111111111:mediastream/device"; - STATUS status = createDeviceResultEvent(service_call_ctx->customData, SERVICE_CALL_RESULT_OK, - const_cast(device_arn.c_str())); + STATUS status = createDeviceResultEvent(service_call_ctx->customData, SERVICE_CALL_RESULT_OK, const_cast(device_arn.c_str())); if (STATUS_FAILED(status)) { LOG_ERROR("createDeviceResultEvent failed with: " << status); } @@ -51,44 +53,31 @@ STATUS DefaultCallbackProvider::createDeviceHandler( return status; } -STATUS DefaultCallbackProvider::streamDataAvailableHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - PCHAR stream_name, - UPLOAD_HANDLE stream_upload_handle, - UINT64 duration_available, - UINT64 size_available) { - LOG_TRACE("streamDataAvailableHandler invoked for stream: " - << stream_name - << " and stream upload handle: " - << stream_upload_handle); +STATUS DefaultCallbackProvider::streamDataAvailableHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, PCHAR stream_name, + UPLOAD_HANDLE stream_upload_handle, UINT64 duration_available, UINT64 size_available) +{ + LOG_TRACE("streamDataAvailableHandler invoked for stream: " << stream_name << " and stream upload handle: " << stream_upload_handle); - auto this_obj = reinterpret_cast(custom_data); + auto this_obj = reinterpret_cast(custom_data); auto stream_data_available_callback = this_obj->stream_callback_provider_->getStreamDataAvailableCallback(); if (nullptr != stream_data_available_callback) { - return stream_data_available_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), - stream_handle, - stream_name, - stream_upload_handle, - duration_available, - size_available); + return stream_data_available_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), stream_handle, stream_name, + stream_upload_handle, duration_available, size_available); } else { return STATUS_SUCCESS; } } -STATUS DefaultCallbackProvider::streamClosedHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UPLOAD_HANDLE stream_upload_handle) { +STATUS DefaultCallbackProvider::streamClosedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE stream_upload_handle) +{ LOG_DEBUG("streamClosedHandler invoked for upload handle: " << stream_upload_handle); - auto this_obj = reinterpret_cast(custom_data); + auto this_obj = reinterpret_cast(custom_data); auto stream_eos_callback = this_obj->stream_callback_provider_->getStreamClosedCallback(); if (nullptr != stream_eos_callback) { - STATUS status = stream_eos_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), - stream_handle, - stream_upload_handle); + STATUS status = stream_eos_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), stream_handle, stream_upload_handle); if (STATUS_FAILED(status)) { LOG_ERROR("streamClosedHandler failed with: " << status); } @@ -107,28 +96,24 @@ STATUS DefaultCallbackProvider::streamClosedHandler(UINT64 custom_data, * @param STATUS status code of the failure * @return Status of the callback */ -STATUS DefaultCallbackProvider::streamErrorHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UPLOAD_HANDLE upload_handle, - UINT64 fragment_timecode, - STATUS status) { +STATUS DefaultCallbackProvider::streamErrorHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, + UINT64 fragment_timecode, STATUS status) +{ LOG_DEBUG("streamErrorHandler invoked"); auto this_obj = reinterpret_cast(custom_data); // Call the client callback if any specified auto stream_error_callback = this_obj->stream_callback_provider_->getStreamErrorReportCallback(); if (nullptr != stream_error_callback) { - return stream_error_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), - stream_handle, - upload_handle, - fragment_timecode, + return stream_error_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), stream_handle, upload_handle, fragment_timecode, status); } else { return STATUS_SUCCESS; } } -STATUS DefaultCallbackProvider::clientReadyHandler(UINT64 custom_data, CLIENT_HANDLE client_handle) { +STATUS DefaultCallbackProvider::clientReadyHandler(UINT64 custom_data, CLIENT_HANDLE client_handle) +{ LOG_DEBUG("clientReadyHandler invoked"); auto this_obj = reinterpret_cast(custom_data); @@ -141,7 +126,8 @@ STATUS DefaultCallbackProvider::clientReadyHandler(UINT64 custom_data, CLIENT_HA } } -STATUS DefaultCallbackProvider::storageOverflowPressureHandler(UINT64 custom_data, UINT64 bytes_remaining) { +STATUS DefaultCallbackProvider::storageOverflowPressureHandler(UINT64 custom_data, UINT64 bytes_remaining) +{ LOG_DEBUG("storageOverflowPressureHandler invoked"); auto this_obj = reinterpret_cast(custom_data); @@ -154,7 +140,8 @@ STATUS DefaultCallbackProvider::storageOverflowPressureHandler(UINT64 custom_dat } } -STATUS DefaultCallbackProvider::streamUnderflowReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle) { +STATUS DefaultCallbackProvider::streamUnderflowReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle) +{ LOG_DEBUG("streamUnderflowReportHandler invoked"); auto this_obj = reinterpret_cast(custom_data); @@ -167,92 +154,79 @@ STATUS DefaultCallbackProvider::streamUnderflowReportHandler(UINT64 custom_data, } } -STATUS DefaultCallbackProvider::streamLatencyPressureHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 buffer_duration) { +STATUS DefaultCallbackProvider::streamLatencyPressureHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 buffer_duration) +{ LOG_DEBUG("streamLatencyPressureHandler invoked"); auto this_obj = reinterpret_cast(custom_data); // Call the client callback if any specified auto stream_latency_callback = this_obj->stream_callback_provider_->getStreamLatencyPressureCallback(); if (nullptr != stream_latency_callback) { - return stream_latency_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), - stream_handle, - buffer_duration); + return stream_latency_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), stream_handle, buffer_duration); } else { return STATUS_SUCCESS; } } -STATUS DefaultCallbackProvider::droppedFrameReportHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 timecode) { +STATUS DefaultCallbackProvider::droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 timecode) +{ LOG_DEBUG("droppedFrameReportHandler invoked"); auto this_obj = reinterpret_cast(custom_data); // Call the client callback if any specified auto dropped_frame_callback = this_obj->stream_callback_provider_->getDroppedFrameReportCallback(); if (nullptr != dropped_frame_callback) { - return dropped_frame_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), - stream_handle, - timecode); + return dropped_frame_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), stream_handle, timecode); } else { return STATUS_SUCCESS; } } -STATUS DefaultCallbackProvider::droppedFragmentReportHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 timecode) { +STATUS DefaultCallbackProvider::droppedFragmentReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 timecode) +{ LOG_DEBUG("droppedFragmentReportHandler invoked"); auto this_obj = reinterpret_cast(custom_data); // Call the client callback if any specified auto dropped_fragment_callback = this_obj->stream_callback_provider_->getDroppedFragmentReportCallback(); if (nullptr != dropped_fragment_callback) { - return dropped_fragment_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), - stream_handle, - timecode); + return dropped_fragment_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), stream_handle, timecode); } else { return STATUS_SUCCESS; } } -STATUS DefaultCallbackProvider::bufferDurationOverflowPressureHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 remaining_duration) { +STATUS DefaultCallbackProvider::bufferDurationOverflowPressureHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 remaining_duration) +{ LOG_DEBUG("bufferDurationOverflowPressureHandler invoked"); auto this_obj = reinterpret_cast(custom_data); // Call the client callback if any specified auto buffer_duration_overflow_pressure_callback = this_obj->stream_callback_provider_->getBufferDurationOverflowPressureCallback(); if (nullptr != buffer_duration_overflow_pressure_callback) { - return buffer_duration_overflow_pressure_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), - stream_handle, + return buffer_duration_overflow_pressure_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), stream_handle, remaining_duration); } else { return STATUS_SUCCESS; } } -STATUS DefaultCallbackProvider::streamConnectionStaleHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 last_ack_duration) { +STATUS DefaultCallbackProvider::streamConnectionStaleHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 last_ack_duration) +{ LOG_DEBUG("streamConnectionStaleHandler invoked"); auto this_obj = reinterpret_cast(custom_data); // Call the client callback if any specified auto connection_stale_callback = this_obj->stream_callback_provider_->getStreamConnectionStaleCallback(); if (nullptr != connection_stale_callback) { - return connection_stale_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), - stream_handle, - last_ack_duration); + return connection_stale_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), stream_handle, last_ack_duration); } else { return STATUS_SUCCESS; } } -STATUS DefaultCallbackProvider::streamReadyHandler(UINT64 custom_data, STREAM_HANDLE stream_handle) { +STATUS DefaultCallbackProvider::streamReadyHandler(UINT64 custom_data, STREAM_HANDLE stream_handle) +{ LOG_DEBUG("streamReadyHandler invoked"); auto this_obj = reinterpret_cast(custom_data); @@ -265,34 +239,26 @@ STATUS DefaultCallbackProvider::streamReadyHandler(UINT64 custom_data, STREAM_HA } } -STATUS DefaultCallbackProvider::fragmentAckReceivedHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UPLOAD_HANDLE uploadHandle, - PFragmentAck fragment_ack) { +STATUS DefaultCallbackProvider::fragmentAckReceivedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE uploadHandle, + PFragmentAck fragment_ack) +{ LOG_DEBUG("fragmentAckReceivedHandler invoked"); auto this_obj = reinterpret_cast(custom_data); // Call the client callback if any specified auto fragment_ack_callback = this_obj->stream_callback_provider_->getFragmentAckReceivedCallback(); if (nullptr != fragment_ack_callback) { - return fragment_ack_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), - stream_handle, - uploadHandle, - fragment_ack); + return fragment_ack_callback(this_obj->stream_callback_provider_->getCallbackCustomData(), stream_handle, uploadHandle, fragment_ack); } else { return STATUS_SUCCESS; } } -VOID DefaultCallbackProvider::logPrintHandler(UINT32 level, PCHAR tag, PCHAR fmt, ...) { - static log4cplus::LogLevel picLevelToLog4cplusLevel[] = { - log4cplus::TRACE_LOG_LEVEL, - log4cplus::TRACE_LOG_LEVEL, - log4cplus::DEBUG_LOG_LEVEL, - log4cplus::INFO_LOG_LEVEL, - log4cplus::WARN_LOG_LEVEL, - log4cplus::ERROR_LOG_LEVEL, - log4cplus::FATAL_LOG_LEVEL}; +VOID DefaultCallbackProvider::logPrintHandler(UINT32 level, PCHAR tag, PCHAR fmt, ...) +{ + static log4cplus::LogLevel picLevelToLog4cplusLevel[] = {log4cplus::TRACE_LOG_LEVEL, log4cplus::TRACE_LOG_LEVEL, log4cplus::DEBUG_LOG_LEVEL, + log4cplus::INFO_LOG_LEVEL, log4cplus::WARN_LOG_LEVEL, log4cplus::ERROR_LOG_LEVEL, + log4cplus::FATAL_LOG_LEVEL}; UNUSED_PARAM(tag); va_list valist; log4cplus::LogLevel logLevel = log4cplus::TRACE_LOG_LEVEL; @@ -307,106 +273,63 @@ VOID DefaultCallbackProvider::logPrintHandler(UINT32 level, PCHAR tag, PCHAR fmt // Modified _snpbuf.print_va_list(va_list) to accept va_list instead of _snpbuf.print(arg...) LOG4CPLUS_SUPPRESS_DOWHILE_WARNING() do { - log4cplus::Logger const & _l - = log4cplus::detail::macros_get_logger (logger); - if (_l.isEnabledFor (logLevel)) { - LOG4CPLUS_MACRO_INSTANTIATE_SNPRINTF_BUF (_snpbuf); - log4cplus::tchar const * _logEvent; - _snpbuf.print_va_list (_logEvent, fmt, valist); - log4cplus::detail::macro_forced_log (_l, - logLevel, _logEvent, - __FILE__, __LINE__, LOG4CPLUS_MACRO_FUNCTION ()); + log4cplus::Logger const& _l = log4cplus::detail::macros_get_logger(logger); + if (_l.isEnabledFor(logLevel)) { + LOG4CPLUS_MACRO_INSTANTIATE_SNPRINTF_BUF(_snpbuf); + log4cplus::tchar const* _logEvent; + _snpbuf.print_va_list(_logEvent, fmt, valist); + log4cplus::detail::macro_forced_log(_l, logLevel, _logEvent, __FILE__, __LINE__, LOG4CPLUS_MACRO_FUNCTION()); } - } while(0); + } while (0); LOG4CPLUS_RESTORE_DOWHILE_WARNING() va_end(valist); } -DefaultCallbackProvider::DefaultCallbackProvider( - unique_ptr client_callback_provider, - unique_ptr stream_callback_provider, - unique_ptr credentials_provider, - const string& region, - const string& control_plane_uri, - const std::string &user_agent_name, - const std::string &custom_user_agent, - const std::string &cert_path, - bool is_caching_endpoint, - std::chrono::duration caching_update_period) : DefaultCallbackProvider ( - std::move(client_callback_provider), - std::move(stream_callback_provider), - std::move(credentials_provider), - region, - control_plane_uri, - user_agent_name, - custom_user_agent, - cert_path, - is_caching_endpoint, - caching_update_period.count() * HUNDREDS_OF_NANOS_IN_A_SECOND) { -} - -DefaultCallbackProvider::DefaultCallbackProvider( - unique_ptr client_callback_provider, - unique_ptr stream_callback_provider, - unique_ptr credentials_provider, - const string& region, - const string& control_plane_uri, - const std::string &user_agent_name, - const std::string &custom_user_agent, - const std::string &cert_path, - API_CALL_CACHE_TYPE api_call_caching, - std::chrono::duration caching_update_period) : DefaultCallbackProvider ( - std::move(client_callback_provider), - std::move(stream_callback_provider), - std::move(credentials_provider), - region, - control_plane_uri, - user_agent_name, - custom_user_agent, - cert_path, - api_call_caching, - caching_update_period.count() * HUNDREDS_OF_NANOS_IN_A_SECOND) { -} - -DefaultCallbackProvider::DefaultCallbackProvider( - unique_ptr client_callback_provider, - unique_ptr stream_callback_provider, - unique_ptr credentials_provider, - const string& region, - const string& control_plane_uri, - const std::string &user_agent_name, - const std::string &custom_user_agent, - const std::string &cert_path, - bool is_caching_endpoint, - uint64_t caching_update_period) : DefaultCallbackProvider ( - std::move(client_callback_provider), - std::move(stream_callback_provider), - std::move(credentials_provider), - region, - control_plane_uri, - user_agent_name, - custom_user_agent, - cert_path, - is_caching_endpoint ? API_CALL_CACHE_TYPE_ENDPOINT_ONLY : API_CALL_CACHE_TYPE_NONE, - caching_update_period) { -} - -DefaultCallbackProvider::DefaultCallbackProvider( - unique_ptr client_callback_provider, - unique_ptr stream_callback_provider, - unique_ptr credentials_provider, - const string& region, - const string& control_plane_uri, - const std::string &user_agent_name, - const std::string &custom_user_agent, - const std::string &cert_path, - API_CALL_CACHE_TYPE api_call_caching, - uint64_t caching_update_period) - : region_(region), - service_(std::string(KINESIS_VIDEO_SERVICE_NAME)), - control_plane_uri_(control_plane_uri), - cert_path_(cert_path) { +DefaultCallbackProvider::DefaultCallbackProvider(unique_ptr client_callback_provider, + unique_ptr stream_callback_provider, + unique_ptr credentials_provider, const string& region, + const string& control_plane_uri, const std::string& user_agent_name, + const std::string& custom_user_agent, const std::string& cert_path, bool is_caching_endpoint, + std::chrono::duration caching_update_period) + : DefaultCallbackProvider(std::move(client_callback_provider), std::move(stream_callback_provider), std::move(credentials_provider), region, + control_plane_uri, user_agent_name, custom_user_agent, cert_path, is_caching_endpoint, + caching_update_period.count() * HUNDREDS_OF_NANOS_IN_A_SECOND) +{ +} + +DefaultCallbackProvider::DefaultCallbackProvider(unique_ptr client_callback_provider, + unique_ptr stream_callback_provider, + unique_ptr credentials_provider, const string& region, + const string& control_plane_uri, const std::string& user_agent_name, + const std::string& custom_user_agent, const std::string& cert_path, + API_CALL_CACHE_TYPE api_call_caching, std::chrono::duration caching_update_period) + : DefaultCallbackProvider(std::move(client_callback_provider), std::move(stream_callback_provider), std::move(credentials_provider), region, + control_plane_uri, user_agent_name, custom_user_agent, cert_path, api_call_caching, + caching_update_period.count() * HUNDREDS_OF_NANOS_IN_A_SECOND) +{ +} + +DefaultCallbackProvider::DefaultCallbackProvider(unique_ptr client_callback_provider, + unique_ptr stream_callback_provider, + unique_ptr credentials_provider, const string& region, + const string& control_plane_uri, const std::string& user_agent_name, + const std::string& custom_user_agent, const std::string& cert_path, bool is_caching_endpoint, + uint64_t caching_update_period) + : DefaultCallbackProvider(std::move(client_callback_provider), std::move(stream_callback_provider), std::move(credentials_provider), region, + control_plane_uri, user_agent_name, custom_user_agent, cert_path, + is_caching_endpoint ? API_CALL_CACHE_TYPE_ENDPOINT_ONLY : API_CALL_CACHE_TYPE_NONE, caching_update_period) +{ +} + +DefaultCallbackProvider::DefaultCallbackProvider(unique_ptr client_callback_provider, + unique_ptr stream_callback_provider, + unique_ptr credentials_provider, const string& region, + const string& control_plane_uri, const std::string& user_agent_name, + const std::string& custom_user_agent, const std::string& cert_path, + API_CALL_CACHE_TYPE api_call_caching, uint64_t caching_update_period) + : region_(region), service_(std::string(KINESIS_VIDEO_SERVICE_NAME)), control_plane_uri_(control_plane_uri), cert_path_(cert_path) +{ STATUS retStatus = STATUS_SUCCESS; client_callback_provider_ = std::move(client_callback_provider); stream_callback_provider_ = std::move(stream_callback_provider); @@ -416,11 +339,7 @@ DefaultCallbackProvider::DefaultCallbackProvider( if (control_plane_uri_.empty()) { // Create a fully qualified URI - control_plane_uri_ = CONTROL_PLANE_URI_PREFIX - + std::string(KINESIS_VIDEO_SERVICE_NAME) - + "." - + region_ - + CONTROL_PLANE_URI_POSTFIX; + control_plane_uri_ = CONTROL_PLANE_URI_PREFIX + std::string(KINESIS_VIDEO_SERVICE_NAME) + "." + region_ + CONTROL_PLANE_URI_POSTFIX; // If region is in CN, add CN region uri postfix if (region_.rfind("cn-", 0) == 0) { control_plane_uri_ += ".cn"; @@ -430,16 +349,10 @@ DefaultCallbackProvider::DefaultCallbackProvider( getStreamCallbacks(); getProducerCallbacks(); getPlatformCallbacks(); - if (STATUS_FAILED(retStatus = createAbstractDefaultCallbacksProvider( - DEFAULT_CALLBACK_CHAIN_COUNT, - api_call_caching, - caching_update_period, - STRING_TO_PCHAR(region), - STRING_TO_PCHAR(control_plane_uri), - STRING_TO_PCHAR(cert_path), - STRING_TO_PCHAR (user_agent_name), - STRING_TO_PCHAR(custom_user_agent_), - &client_callbacks_))) { + if (STATUS_FAILED(retStatus = createAbstractDefaultCallbacksProvider(DEFAULT_CALLBACK_CHAIN_COUNT, api_call_caching, caching_update_period, + STRING_TO_PCHAR(region), STRING_TO_PCHAR(control_plane_uri), + STRING_TO_PCHAR(cert_path), STRING_TO_PCHAR(user_agent_name), + STRING_TO_PCHAR(custom_user_agent_), &client_callbacks_))) { std::stringstream status_strstrm; status_strstrm << std::hex << retStatus; LOG_AND_THROW("Unable to create default callback provider. Error status: 0x" + status_strstrm.str()); @@ -451,14 +364,16 @@ DefaultCallbackProvider::DefaultCallbackProvider( createContinuousRetryStreamCallbacks(client_callbacks_, &pContinuoutsRetryStreamCallbacks); } -DefaultCallbackProvider::~DefaultCallbackProvider() { +DefaultCallbackProvider::~DefaultCallbackProvider() +{ freeCallbacksProvider(&client_callbacks_); } -StreamCallbacks DefaultCallbackProvider::getStreamCallbacks() { +StreamCallbacks DefaultCallbackProvider::getStreamCallbacks() +{ MEMSET(&stream_callbacks_, 0, SIZEOF(stream_callbacks_)); stream_callbacks_.customData = reinterpret_cast(this); - stream_callbacks_.version = STREAM_CALLBACKS_CURRENT_VERSION; // from kinesis video cproducer include + stream_callbacks_.version = STREAM_CALLBACKS_CURRENT_VERSION; // from kinesis video cproducer include stream_callbacks_.streamReadyFn = getStreamReadyCallback(); stream_callbacks_.streamClosedFn = getStreamClosedCallback(); stream_callbacks_.streamLatencyPressureFn = getStreamLatencyPressureCallback(); @@ -474,129 +389,158 @@ StreamCallbacks DefaultCallbackProvider::getStreamCallbacks() { return stream_callbacks_; } -ProducerCallbacks DefaultCallbackProvider::getProducerCallbacks() { +ProducerCallbacks DefaultCallbackProvider::getProducerCallbacks() +{ MEMSET(&producer_callbacks_, 0, SIZEOF(producer_callbacks_)); producer_callbacks_.customData = reinterpret_cast(this); - producer_callbacks_.version = PRODUCER_CALLBACKS_CURRENT_VERSION; // from kinesis video cproducer include + producer_callbacks_.version = PRODUCER_CALLBACKS_CURRENT_VERSION; // from kinesis video cproducer include producer_callbacks_.storageOverflowPressureFn = getStorageOverflowPressureCallback(); producer_callbacks_.clientReadyFn = getClientReadyCallback(); producer_callbacks_.clientShutdownFn = getClientShutdownCallback(); return producer_callbacks_; } -PlatformCallbacks DefaultCallbackProvider::getPlatformCallbacks() { +PlatformCallbacks DefaultCallbackProvider::getPlatformCallbacks() +{ MEMSET(&platform_callbacks_, 0, SIZEOF(platform_callbacks_)); platform_callbacks_.customData = reinterpret_cast(this); - platform_callbacks_.version = PLATFORM_CALLBACKS_CURRENT_VERSION; // from kinesis video cproducer include + platform_callbacks_.version = PLATFORM_CALLBACKS_CURRENT_VERSION; // from kinesis video cproducer include platform_callbacks_.logPrintFn = getLogPrintCallback(); return platform_callbacks_; } -DefaultCallbackProvider::callback_t DefaultCallbackProvider::getCallbacks() { +DefaultCallbackProvider::callback_t DefaultCallbackProvider::getCallbacks() +{ return *client_callbacks_; } -GetStreamingTokenFunc DefaultCallbackProvider::getStreamingTokenCallback() { +GetStreamingTokenFunc DefaultCallbackProvider::getStreamingTokenCallback() +{ return auth_callbacks_.getStreamingTokenFn; } -GetSecurityTokenFunc DefaultCallbackProvider::getSecurityTokenCallback() { +GetSecurityTokenFunc DefaultCallbackProvider::getSecurityTokenCallback() +{ return auth_callbacks_.getSecurityTokenFn; } -DeviceCertToTokenFunc DefaultCallbackProvider::getDeviceCertToTokenCallback() { +DeviceCertToTokenFunc DefaultCallbackProvider::getDeviceCertToTokenCallback() +{ return auth_callbacks_.deviceCertToTokenFn; } -GetDeviceCertificateFunc DefaultCallbackProvider::getDeviceCertificateCallback() { +GetDeviceCertificateFunc DefaultCallbackProvider::getDeviceCertificateCallback() +{ return auth_callbacks_.getDeviceCertificateFn; } -GetDeviceFingerprintFunc DefaultCallbackProvider::getDeviceFingerprintCallback() { - return auth_callbacks_.getDeviceFingerprintFn; +GetDeviceFingerprintFunc DefaultCallbackProvider::getDeviceFingerprintCallback() +{ + return auth_callbacks_.getDeviceFingerprintFn; } -GetCurrentTimeFunc DefaultCallbackProvider::getCurrentTimeCallback() { +GetCurrentTimeFunc DefaultCallbackProvider::getCurrentTimeCallback() +{ return getCurrentTimeHandler; } -DroppedFragmentReportFunc DefaultCallbackProvider::getDroppedFragmentReportCallback() { +DroppedFragmentReportFunc DefaultCallbackProvider::getDroppedFragmentReportCallback() +{ return droppedFragmentReportHandler; } -BufferDurationOverflowPressureFunc DefaultCallbackProvider::getBufferDurationOverflowPressureCallback(){ +BufferDurationOverflowPressureFunc DefaultCallbackProvider::getBufferDurationOverflowPressureCallback() +{ return bufferDurationOverflowPressureHandler; } -StreamReadyFunc DefaultCallbackProvider::getStreamReadyCallback() { +StreamReadyFunc DefaultCallbackProvider::getStreamReadyCallback() +{ return streamReadyHandler; } -StreamClosedFunc DefaultCallbackProvider::getStreamClosedCallback() { +StreamClosedFunc DefaultCallbackProvider::getStreamClosedCallback() +{ return streamClosedHandler; } -FragmentAckReceivedFunc DefaultCallbackProvider::getFragmentAckReceivedCallback() { +FragmentAckReceivedFunc DefaultCallbackProvider::getFragmentAckReceivedCallback() +{ return fragmentAckReceivedHandler; } -StreamUnderflowReportFunc DefaultCallbackProvider::getStreamUnderflowReportCallback() { +StreamUnderflowReportFunc DefaultCallbackProvider::getStreamUnderflowReportCallback() +{ return streamUnderflowReportHandler; } -StorageOverflowPressureFunc DefaultCallbackProvider::getStorageOverflowPressureCallback() { +StorageOverflowPressureFunc DefaultCallbackProvider::getStorageOverflowPressureCallback() +{ return storageOverflowPressureHandler; } -StreamLatencyPressureFunc DefaultCallbackProvider::getStreamLatencyPressureCallback() { +StreamLatencyPressureFunc DefaultCallbackProvider::getStreamLatencyPressureCallback() +{ return streamLatencyPressureHandler; } -DroppedFrameReportFunc DefaultCallbackProvider::getDroppedFrameReportCallback() { +DroppedFrameReportFunc DefaultCallbackProvider::getDroppedFrameReportCallback() +{ return droppedFrameReportHandler; } -StreamErrorReportFunc DefaultCallbackProvider::getStreamErrorReportCallback() { +StreamErrorReportFunc DefaultCallbackProvider::getStreamErrorReportCallback() +{ return streamErrorHandler; } -ClientReadyFunc DefaultCallbackProvider::getClientReadyCallback() { +ClientReadyFunc DefaultCallbackProvider::getClientReadyCallback() +{ return clientReadyHandler; } -CreateDeviceFunc DefaultCallbackProvider::getCreateDeviceCallback() { +CreateDeviceFunc DefaultCallbackProvider::getCreateDeviceCallback() +{ return createDeviceHandler; } -StreamDataAvailableFunc DefaultCallbackProvider::getStreamDataAvailableCallback() { +StreamDataAvailableFunc DefaultCallbackProvider::getStreamDataAvailableCallback() +{ return streamDataAvailableHandler; } -StreamConnectionStaleFunc DefaultCallbackProvider::getStreamConnectionStaleCallback() { +StreamConnectionStaleFunc DefaultCallbackProvider::getStreamConnectionStaleCallback() +{ return streamConnectionStaleHandler; } -CreateStreamFunc DefaultCallbackProvider::getCreateStreamCallback() { +CreateStreamFunc DefaultCallbackProvider::getCreateStreamCallback() +{ return nullptr; } -DescribeStreamFunc DefaultCallbackProvider::getDescribeStreamCallback() { +DescribeStreamFunc DefaultCallbackProvider::getDescribeStreamCallback() +{ return nullptr; } -GetStreamingEndpointFunc DefaultCallbackProvider::getStreamingEndpointCallback() { +GetStreamingEndpointFunc DefaultCallbackProvider::getStreamingEndpointCallback() +{ return nullptr; } -PutStreamFunc DefaultCallbackProvider::getPutStreamCallback() { +PutStreamFunc DefaultCallbackProvider::getPutStreamCallback() +{ return nullptr; } -TagResourceFunc DefaultCallbackProvider::getTagResourceCallback() { +TagResourceFunc DefaultCallbackProvider::getTagResourceCallback() +{ return nullptr; } -LogPrintFunc DefaultCallbackProvider::getLogPrintCallback() { +LogPrintFunc DefaultCallbackProvider::getLogPrintCallback() +{ return logPrintHandler; } diff --git a/src/DefaultCallbackProvider.h b/src/DefaultCallbackProvider.h index c5216258..a4e57180 100644 --- a/src/DefaultCallbackProvider.h +++ b/src/DefaultCallbackProvider.h @@ -19,58 +19,44 @@ #include #include -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { class DefaultCallbackProvider : public CallbackProvider { -public: + public: using callback_t = ClientCallbacks; - explicit DefaultCallbackProvider( - std::unique_ptr client_callback_provider, - std::unique_ptr stream_callback_provider, - std::unique_ptr credentials_provider, - const std::string ®ion = DEFAULT_AWS_REGION, - const std::string &control_plane_uri = EMPTY_STRING, - const std::string &user_agent_name = EMPTY_STRING, - const std::string &custom_user_agent = EMPTY_STRING, - const std::string &cert_path = EMPTY_STRING, - bool is_caching_endpoint = false, - uint64_t caching_update_period = DEFAULT_ENDPOINT_CACHE_UPDATE_PERIOD); - - explicit DefaultCallbackProvider( - std::unique_ptr client_callback_provider, - std::unique_ptr stream_callback_provider, - std::unique_ptr credentials_provider = (std::unique_ptr) new EmptyCredentialProvider(), - const std::string ®ion = DEFAULT_AWS_REGION, - const std::string &control_plane_uri = EMPTY_STRING, - const std::string &user_agent_name = EMPTY_STRING, - const std::string &custom_user_agent = EMPTY_STRING, - const std::string &cert_path = EMPTY_STRING, - bool is_caching_endpoint = false, - std::chrono::duration caching_update_period = std::chrono::seconds(DEFAULT_ENDPOINT_CACHE_UPDATE_PERIOD / HUNDREDS_OF_NANOS_IN_A_SECOND)); + explicit DefaultCallbackProvider(std::unique_ptr client_callback_provider, + std::unique_ptr stream_callback_provider, + std::unique_ptr credentials_provider, const std::string& region = DEFAULT_AWS_REGION, + const std::string& control_plane_uri = EMPTY_STRING, const std::string& user_agent_name = EMPTY_STRING, + const std::string& custom_user_agent = EMPTY_STRING, const std::string& cert_path = EMPTY_STRING, + bool is_caching_endpoint = false, uint64_t caching_update_period = DEFAULT_ENDPOINT_CACHE_UPDATE_PERIOD); explicit DefaultCallbackProvider( - std::unique_ptr client_callback_provider, - std::unique_ptr stream_callback_provider, - std::unique_ptr credentials_provider = (std::unique_ptr) new EmptyCredentialProvider(), - const std::string ®ion = DEFAULT_AWS_REGION, - const std::string &control_plane_uri = EMPTY_STRING, - const std::string &user_agent_name = EMPTY_STRING, - const std::string &custom_user_agent = EMPTY_STRING, - const std::string &cert_path = EMPTY_STRING, - API_CALL_CACHE_TYPE api_call_caching = API_CALL_CACHE_TYPE_ALL, - std::chrono::duration caching_update_period = std::chrono::seconds(DEFAULT_ENDPOINT_CACHE_UPDATE_PERIOD / HUNDREDS_OF_NANOS_IN_A_SECOND)); + std::unique_ptr client_callback_provider, std::unique_ptr stream_callback_provider, + std::unique_ptr credentials_provider = (std::unique_ptr) new EmptyCredentialProvider(), + const std::string& region = DEFAULT_AWS_REGION, const std::string& control_plane_uri = EMPTY_STRING, + const std::string& user_agent_name = EMPTY_STRING, const std::string& custom_user_agent = EMPTY_STRING, + const std::string& cert_path = EMPTY_STRING, bool is_caching_endpoint = false, + std::chrono::duration caching_update_period = std::chrono::seconds(DEFAULT_ENDPOINT_CACHE_UPDATE_PERIOD / + HUNDREDS_OF_NANOS_IN_A_SECOND)); explicit DefaultCallbackProvider( - std::unique_ptr client_callback_provider, - std::unique_ptr stream_callback_provider, - std::unique_ptr credentials_provider, - const std::string ®ion, - const std::string &control_plane_uri, - const std::string &user_agent_name, - const std::string &custom_user_agent, - const std::string &cert_path, - API_CALL_CACHE_TYPE api_call_caching, - uint64_t caching_update_period); + std::unique_ptr client_callback_provider, std::unique_ptr stream_callback_provider, + std::unique_ptr credentials_provider = (std::unique_ptr) new EmptyCredentialProvider(), + const std::string& region = DEFAULT_AWS_REGION, const std::string& control_plane_uri = EMPTY_STRING, + const std::string& user_agent_name = EMPTY_STRING, const std::string& custom_user_agent = EMPTY_STRING, + const std::string& cert_path = EMPTY_STRING, API_CALL_CACHE_TYPE api_call_caching = API_CALL_CACHE_TYPE_ALL, + std::chrono::duration caching_update_period = std::chrono::seconds(DEFAULT_ENDPOINT_CACHE_UPDATE_PERIOD / + HUNDREDS_OF_NANOS_IN_A_SECOND)); + + explicit DefaultCallbackProvider(std::unique_ptr client_callback_provider, + std::unique_ptr stream_callback_provider, + std::unique_ptr credentials_provider, const std::string& region, + const std::string& control_plane_uri, const std::string& user_agent_name, const std::string& custom_user_agent, + const std::string& cert_path, API_CALL_CACHE_TYPE api_call_caching, uint64_t caching_update_period); virtual ~DefaultCallbackProvider(); @@ -254,9 +240,7 @@ class DefaultCallbackProvider : public CallbackProvider { * * @return Status of the callback */ - static STATUS streamLatencyPressureHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 buffer_duration); + static STATUS streamLatencyPressureHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 buffer_duration); /** * Reports a dropped frame for the stream. @@ -267,9 +251,7 @@ class DefaultCallbackProvider : public CallbackProvider { * * @return Status of the callback */ - static STATUS droppedFrameReportHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 timecode); + static STATUS droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 timecode); /** * Reports temporal buffer pressure. @@ -280,9 +262,7 @@ class DefaultCallbackProvider : public CallbackProvider { * * @return Status of the callback */ - static STATUS bufferDurationOverflowPressureHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 remaining_duration); + static STATUS bufferDurationOverflowPressureHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 remaining_duration); /** * Reports a dropped fragment for the stream. @@ -293,9 +273,7 @@ class DefaultCallbackProvider : public CallbackProvider { * * @return Status of the callback */ - static STATUS droppedFragmentReportHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 timecode); + static STATUS droppedFragmentReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 timecode); /** * Reports stream staleness as the last buffering ack is greater than @@ -307,9 +285,7 @@ class DefaultCallbackProvider : public CallbackProvider { * * @return Status of the callback */ - static STATUS streamConnectionStaleHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 last_ack_duration); + static STATUS streamConnectionStaleHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 last_ack_duration); /** * Reports a ready state for the stream. @@ -330,11 +306,8 @@ class DefaultCallbackProvider : public CallbackProvider { * @param 4 PFragmentAck - The constructed fragment ack. * * @return Status of the callback - */ - static STATUS fragmentAckReceivedHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UPLOAD_HANDLE upload_handle, - PFragmentAck fragment_ack); + */ + static STATUS fragmentAckReceivedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, PFragmentAck fragment_ack); /** * Creates/updates the device in the cloud. Currently, no-op. @@ -345,8 +318,7 @@ class DefaultCallbackProvider : public CallbackProvider { * @param service_call_ctx service call context passed from Kinesis Video PIC * @return Status of the callback */ - static STATUS - createDeviceHandler(UINT64 custom_data, PCHAR device_name, PServiceCallContext service_call_ctx); + static STATUS createDeviceHandler(UINT64 custom_data, PCHAR device_name, PServiceCallContext service_call_ctx); /** * Handles stream fragment errors. @@ -358,8 +330,8 @@ class DefaultCallbackProvider : public CallbackProvider { * @param STATUS status code of the failure * @return Status of the callback */ - static STATUS - streamErrorHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, UINT64 fragment_timecode, STATUS status); + static STATUS streamErrorHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, UINT64 fragment_timecode, + STATUS status); /** * Gets triggered when data becomes available @@ -372,12 +344,8 @@ class DefaultCallbackProvider : public CallbackProvider { * @param size_available_in_bytes * @return */ - static STATUS streamDataAvailableHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - PCHAR stream_name, - UPLOAD_HANDLE stream_upload_handle, - UINT64 duration_available, - UINT64 size_available); + static STATUS streamDataAvailableHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, PCHAR stream_name, UPLOAD_HANDLE stream_upload_handle, + UINT64 duration_available, UINT64 size_available); /** * Gets triggered the closing of the stream @@ -387,9 +355,7 @@ class DefaultCallbackProvider : public CallbackProvider { * @param stream_upload_handle opaque handle to the current stream upload from the client. * @return */ - static STATUS streamClosedHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UPLOAD_HANDLE stream_upload_handle); + static STATUS streamClosedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE stream_upload_handle); /** * Use log4cplus to print the logs @@ -400,7 +366,7 @@ class DefaultCallbackProvider : public CallbackProvider { */ static VOID logPrintHandler(UINT32 level, PCHAR tag, PCHAR fmt, ...); -protected: + protected: StreamCallbacks getStreamCallbacks(); ProducerCallbacks getProducerCallbacks(); PlatformCallbacks getPlatformCallbacks(); @@ -428,17 +394,17 @@ class DefaultCallbackProvider : public CallbackProvider { /** * Stores the credentials provider */ - std::unique_ptr credentials_provider_; + std::unique_ptr credentials_provider_; /** * Stores the client level callbacks */ - std::unique_ptr client_callback_provider_; + std::unique_ptr client_callback_provider_; /** * Stores the stream level API */ - std::unique_ptr stream_callback_provider_; + std::unique_ptr stream_callback_provider_; /** * Stores all callbacks from PIC diff --git a/src/DefaultDeviceInfoProvider.cpp b/src/DefaultDeviceInfoProvider.cpp index 0163711d..e8736b83 100644 --- a/src/DefaultDeviceInfoProvider.cpp +++ b/src/DefaultDeviceInfoProvider.cpp @@ -5,7 +5,10 @@ #include -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { LOGGER_TAG("com.amazonaws.kinesis.video"); @@ -15,23 +18,23 @@ using std::string; * Default storage size is 256MB which will be suitable for * the default 3 minutes buffer at 10MBps + some room for defragmentation. */ -#define DEFAULT_STORAGE_SIZE 256 * 1024 * 1024 +#define DEFAULT_STORAGE_SIZE 256 * 1024 * 1024 /** * Default max stream count */ -#define DEFAULT_MAX_STREAM_COUNT 16 +#define DEFAULT_MAX_STREAM_COUNT 16 -DefaultDeviceInfoProvider::DefaultDeviceInfoProvider(const std::string &custom_useragent, const std::string &cert_path) - : custom_useragent_(custom_useragent), - cert_path_(cert_path){ +DefaultDeviceInfoProvider::DefaultDeviceInfoProvider(const std::string& custom_useragent, const std::string& cert_path) + : custom_useragent_(custom_useragent), cert_path_(cert_path) +{ memset(&device_info_, 0, sizeof(device_info_)); device_info_.version = DEVICE_INFO_CURRENT_VERSION; device_info_.clientInfo.version = CLIENT_INFO_CURRENT_VERSION; // Set the device name - const string &device_id = "Kinesis_Video_Device"; - size_t bytes_written = device_id.copy(reinterpret_cast(&(device_info_.name)), device_id.size(), 0); + const string& device_id = "Kinesis_Video_Device"; + size_t bytes_written = device_id.copy(reinterpret_cast(&(device_info_.name)), device_id.size(), 0); // Null terminate the array. device_info_.name[bytes_written] = '\0'; @@ -78,21 +81,22 @@ DefaultDeviceInfoProvider::DefaultDeviceInfoProvider(const std::string &custom_u device_info_.clientInfo.loggerLogLevel = logLevel; } -DeviceInfoProvider::device_info_t DefaultDeviceInfoProvider::getDeviceInfo() { +DeviceInfoProvider::device_info_t DefaultDeviceInfoProvider::getDeviceInfo() +{ return device_info_; } -const string DefaultDeviceInfoProvider::getCustomUserAgent() { +const string DefaultDeviceInfoProvider::getCustomUserAgent() +{ return custom_useragent_; } -const string DefaultDeviceInfoProvider::getCertPath() { +const string DefaultDeviceInfoProvider::getCertPath() +{ return cert_path_; } - } // namespace video } // namespace kinesis } // namespace amazonaws } // namespace com - diff --git a/src/DefaultDeviceInfoProvider.h b/src/DefaultDeviceInfoProvider.h index 8db160e7..1e8c8af6 100644 --- a/src/DefaultDeviceInfoProvider.h +++ b/src/DefaultDeviceInfoProvider.h @@ -5,16 +5,19 @@ #include "DeviceInfoProvider.h" #include -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { class DefaultDeviceInfoProvider : public DeviceInfoProvider { -public: - DefaultDeviceInfoProvider(const std::string &custom_useragent = "", const std::string &cert_path = ""); + public: + DefaultDeviceInfoProvider(const std::string& custom_useragent = "", const std::string& cert_path = ""); device_info_t getDeviceInfo() override; const std::string getCustomUserAgent() override; const std::string getCertPath() override; -protected: + protected: DeviceInfo device_info_; const std::string cert_path_; const std::string custom_useragent_; @@ -24,4 +27,3 @@ class DefaultDeviceInfoProvider : public DeviceInfoProvider { } // namespace kinesis } // namespace amazonaws } // namespace com - diff --git a/src/DeviceInfoProvider.h b/src/DeviceInfoProvider.h index ef312650..6e934de5 100644 --- a/src/DeviceInfoProvider.h +++ b/src/DeviceInfoProvider.h @@ -3,13 +3,16 @@ #include "com/amazonaws/kinesis/video/client/Include.h" #include -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { /** -* Interface for the client implementation which will provide the Kinesis Video DeviceInfo struct. -*/ + * Interface for the client implementation which will provide the Kinesis Video DeviceInfo struct. + */ class DeviceInfoProvider { -public: + public: using device_info_t = DeviceInfo; /** @@ -30,19 +33,22 @@ class DeviceInfoProvider { * Return user's custom user agent string which will be appended to the default user agent string * */ - virtual const std::string getCustomUserAgent() { + virtual const std::string getCustomUserAgent() + { return ""; } - virtual const std::string getCertPath() { + virtual const std::string getCertPath() + { return ""; } - virtual ~DeviceInfoProvider() {} + virtual ~DeviceInfoProvider() + { + } }; } // namespace video } // namespace kinesis } // namespace amazonaws } // namespace com - diff --git a/src/GetTime.cpp b/src/GetTime.cpp index 4bcf74a2..ebc51a1c 100644 --- a/src/GetTime.cpp +++ b/src/GetTime.cpp @@ -1,10 +1,14 @@ #include "GetTime.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { -std::chrono::time_point systemCurrentTime() { - return std::chrono::system_clock::now(); - // if you local time has 10 minutes drift - //return std::chrono::system_clock::now() + std::chrono::microseconds(std::chrono::minutes(10)); +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { +std::chrono::time_point systemCurrentTime() +{ + return std::chrono::system_clock::now(); + // if you local time has 10 minutes drift + // return std::chrono::system_clock::now() + std::chrono::microseconds(std::chrono::minutes(10)); } } // namespace video diff --git a/src/GetTime.h b/src/GetTime.h index a4310770..124ee395 100644 --- a/src/GetTime.h +++ b/src/GetTime.h @@ -5,13 +5,15 @@ #pragma once -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { std::chrono::time_point systemCurrentTime(); - } // namespace video } // namespace kinesis } // namespace amazonaws } // namespace com -#endif /* __TIME_DEFINITIONS__ */ +#endif /* __TIME_DEFINITIONS__ */ diff --git a/src/JNI/com/amazonaws/kinesis/video/producer/jni/KinesisVideoClientWrapper.cpp b/src/JNI/com/amazonaws/kinesis/video/producer/jni/KinesisVideoClientWrapper.cpp index 621673a9..33a321b5 100644 --- a/src/JNI/com/amazonaws/kinesis/video/producer/jni/KinesisVideoClientWrapper.cpp +++ b/src/JNI/com/amazonaws/kinesis/video/producer/jni/KinesisVideoClientWrapper.cpp @@ -1,7 +1,7 @@ /** * Implementation of Kinesis Video Producer client wrapper */ -#define LOG_CLASS "KinesisVideoClientWrapper" +#define LOG_CLASS "KinesisVideoClientWrapper" #define MAX_LOG_MESSAGE_LENGTH 1024 * 10 #include "com/amazonaws/kinesis/video/producer/jni/KinesisVideoClientWrapper.h" @@ -11,10 +11,7 @@ JavaVM* KinesisVideoClientWrapper::mJvm = NULL; jobject KinesisVideoClientWrapper::mGlobalJniObjRef = NULL; jmethodID KinesisVideoClientWrapper::mLogPrintMethodId = NULL; - -KinesisVideoClientWrapper::KinesisVideoClientWrapper(JNIEnv* env, - jobject thiz, - jobject deviceInfo): mClientHandle(INVALID_CLIENT_HANDLE_VALUE) +KinesisVideoClientWrapper::KinesisVideoClientWrapper(JNIEnv* env, jobject thiz, jobject deviceInfo) : mClientHandle(INVALID_CLIENT_HANDLE_VALUE) { UINT32 retStatus; @@ -55,11 +52,10 @@ KinesisVideoClientWrapper::KinesisVideoClientWrapper(JNIEnv* env, KinesisVideoClientWrapper::~KinesisVideoClientWrapper() { STATUS retStatus = STATUS_SUCCESS; - if (IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (IS_VALID_CLIENT_HANDLE(mClientHandle)) { if (STATUS_FAILED(retStatus = freeKinesisVideoClient(&mClientHandle))) { DLOGE("Failed to free the producer client object"); - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); throwNativeException(env, EXCEPTION_NAME, "Failed to free the producer client object.", retStatus); return; @@ -83,19 +79,17 @@ jobject KinesisVideoClientWrapper::getGlobalRef() void KinesisVideoClientWrapper::stopKinesisVideoStreams() { STATUS retStatus = STATUS_SUCCESS; - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; } - if (STATUS_FAILED(retStatus = ::stopKinesisVideoStreams(mClientHandle))) - { + if (STATUS_FAILED(retStatus = ::stopKinesisVideoStreams(mClientHandle))) { DLOGE("Failed to stop the streams with status code 0x%08x", retStatus); - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); throwNativeException(env, EXCEPTION_NAME, "Failed to stop the streams.", retStatus); return; @@ -105,25 +99,22 @@ void KinesisVideoClientWrapper::stopKinesisVideoStreams() void KinesisVideoClientWrapper::stopKinesisVideoStream(jlong streamHandle) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; } - if (!IS_VALID_STREAM_HANDLE(streamHandle)) - { + if (!IS_VALID_STREAM_HANDLE(streamHandle)) { DLOGE("Invalid stream handle 0x%016" PRIx64, (UINT64) streamHandle); throwNativeException(env, EXCEPTION_NAME, "Invalid stream handle.", STATUS_INVALID_OPERATION); return; } - if (STATUS_FAILED(retStatus = ::stopKinesisVideoStream(streamHandle))) - { + if (STATUS_FAILED(retStatus = ::stopKinesisVideoStream(streamHandle))) { DLOGE("Failed to stop kinesis video stream with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to stop kinesis video stream.", retStatus); return; @@ -134,25 +125,22 @@ void KinesisVideoClientWrapper::freeKinesisVideoStream(jlong streamHandle) { STATUS retStatus = STATUS_SUCCESS; STREAM_HANDLE handle = (STREAM_HANDLE) streamHandle; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; } - if (!IS_VALID_STREAM_HANDLE(streamHandle)) - { + if (!IS_VALID_STREAM_HANDLE(streamHandle)) { DLOGE("Invalid stream handle 0x%016" PRIx64, (UINT64) streamHandle); throwNativeException(env, EXCEPTION_NAME, "Invalid stream handle.", STATUS_INVALID_OPERATION); return; } - if (STATUS_FAILED(retStatus = ::freeKinesisVideoStream(&handle))) - { + if (STATUS_FAILED(retStatus = ::freeKinesisVideoStream(&handle))) { DLOGE("Failed to free kinesis video stream with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to free kinesis video stream.", retStatus); return; @@ -162,18 +150,16 @@ void KinesisVideoClientWrapper::freeKinesisVideoStream(jlong streamHandle) void KinesisVideoClientWrapper::getKinesisVideoMetrics(jobject kinesisVideoMetrics) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; } - if (NULL == kinesisVideoMetrics) - { + if (NULL == kinesisVideoMetrics) { DLOGE("KinesisVideoMetrics object is null"); throwNativeException(env, EXCEPTION_NAME, "KinesisVideoMetrics object is null.", STATUS_NULL_ARG); return; @@ -182,16 +168,15 @@ void KinesisVideoClientWrapper::getKinesisVideoMetrics(jobject kinesisVideoMetri ClientMetrics metrics; metrics.version = CLIENT_METRICS_CURRENT_VERSION; - if (STATUS_FAILED(retStatus = ::getKinesisVideoMetrics(mClientHandle, &metrics))) - { + if (STATUS_FAILED(retStatus = ::getKinesisVideoMetrics(mClientHandle, &metrics))) { DLOGE("Failed to get KinesisVideoMetrics with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to get KinesisVideoMetrics.", retStatus); return; } - //get the class + // get the class jclass metricsClass = env->GetObjectClass(kinesisVideoMetrics); - if (metricsClass == NULL){ + if (metricsClass == NULL) { DLOGE("Failed to get metrics class object"); throwNativeException(env, EXCEPTION_NAME, "Failed to get metrics class object.", STATUS_INVALID_OPERATION); return; @@ -199,46 +184,36 @@ void KinesisVideoClientWrapper::getKinesisVideoMetrics(jobject kinesisVideoMetri // Set the Java object jmethodID setterMethodId = env->GetMethodID(metricsClass, "setMetrics", "(JJJJJJ)V"); - if (setterMethodId == NULL) - { + if (setterMethodId == NULL) { DLOGE("Failed to get the setter method id."); throwNativeException(env, EXCEPTION_NAME, "Failed to get setter method id.", STATUS_INVALID_OPERATION); return; } // call the setter method - env->CallVoidMethod(kinesisVideoMetrics, - setterMethodId, - metrics.contentStoreSize, - metrics.contentStoreAllocatedSize, - metrics.contentStoreAvailableSize, - metrics.totalContentViewsSize, - metrics.totalFrameRate, - metrics.totalTransferRate); + env->CallVoidMethod(kinesisVideoMetrics, setterMethodId, metrics.contentStoreSize, metrics.contentStoreAllocatedSize, + metrics.contentStoreAvailableSize, metrics.totalContentViewsSize, metrics.totalFrameRate, metrics.totalTransferRate); } void KinesisVideoClientWrapper::getKinesisVideoStreamMetrics(jlong streamHandle, jobject kinesisVideoStreamMetrics) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; } - if (!IS_VALID_STREAM_HANDLE(streamHandle)) - { - DLOGE("Invalid stream handle 0x%016" PRIx64 , (UINT64) streamHandle); + if (!IS_VALID_STREAM_HANDLE(streamHandle)) { + DLOGE("Invalid stream handle 0x%016" PRIx64, (UINT64) streamHandle); throwNativeException(env, EXCEPTION_NAME, "Invalid stream handle.", STATUS_INVALID_OPERATION); return; } - if (NULL == kinesisVideoStreamMetrics) - { + if (NULL == kinesisVideoStreamMetrics) { DLOGE("KinesisVideoStreamMetrics object is null"); throwNativeException(env, EXCEPTION_NAME, "KinesisVideoStreamMetrics object is null.", STATUS_NULL_ARG); return; @@ -247,16 +222,15 @@ void KinesisVideoClientWrapper::getKinesisVideoStreamMetrics(jlong streamHandle, StreamMetrics metrics; metrics.version = STREAM_METRICS_CURRENT_VERSION; - if (STATUS_FAILED(retStatus = ::getKinesisVideoStreamMetrics(streamHandle, &metrics))) - { + if (STATUS_FAILED(retStatus = ::getKinesisVideoStreamMetrics(streamHandle, &metrics))) { DLOGE("Failed to get StreamMetrics with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to get StreamMetrics.", retStatus); return; } - //get the class + // get the class jclass metricsClass = env->GetObjectClass(kinesisVideoStreamMetrics); - if (metricsClass == NULL){ + if (metricsClass == NULL) { DLOGE("Failed to get metrics class object"); throwNativeException(env, EXCEPTION_NAME, "Failed to get metrics class object.", STATUS_INVALID_OPERATION); return; @@ -264,22 +238,15 @@ void KinesisVideoClientWrapper::getKinesisVideoStreamMetrics(jlong streamHandle, // Set the Java object jmethodID setterMethodId = env->GetMethodID(metricsClass, "setMetrics", "(JJJJDJ)V"); - if (setterMethodId == NULL) - { + if (setterMethodId == NULL) { DLOGE("Failed to get the setter method id."); throwNativeException(env, EXCEPTION_NAME, "Failed to get setter method id.", STATUS_INVALID_OPERATION); return; } // call the setter method - env->CallVoidMethod(kinesisVideoStreamMetrics, - setterMethodId, - metrics.overallViewSize, - metrics.currentViewSize, - metrics.overallViewDuration, - metrics.currentViewDuration, - metrics.currentFrameRate, - metrics.currentTransferRate); + env->CallVoidMethod(kinesisVideoStreamMetrics, setterMethodId, metrics.overallViewSize, metrics.currentViewSize, metrics.overallViewDuration, + metrics.currentViewDuration, metrics.currentFrameRate, metrics.currentTransferRate); } STREAM_HANDLE KinesisVideoClientWrapper::createKinesisVideoStream(jobject streamInfo) @@ -287,12 +254,11 @@ STREAM_HANDLE KinesisVideoClientWrapper::createKinesisVideoStream(jobject stream STATUS retStatus = STATUS_SUCCESS; STREAM_HANDLE streamHandle = INVALID_STREAM_HANDLE_VALUE; UINT32 i; - JNIEnv *env; + JNIEnv* env; StreamInfo kinesisVideoStreamInfo; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); goto CleanUp; @@ -300,15 +266,13 @@ STREAM_HANDLE KinesisVideoClientWrapper::createKinesisVideoStream(jobject stream // Convert the StreamInfo object MEMSET(&kinesisVideoStreamInfo, 0x00, SIZEOF(kinesisVideoStreamInfo)); - if (!setStreamInfo(env, streamInfo, &kinesisVideoStreamInfo)) - { + if (!setStreamInfo(env, streamInfo, &kinesisVideoStreamInfo)) { DLOGE("Failed converting stream info object."); throwNativeException(env, EXCEPTION_NAME, "Failed converting stream info object.", STATUS_INVALID_OPERATION); goto CleanUp; } - if (STATUS_FAILED(retStatus = ::createKinesisVideoStream(mClientHandle, &kinesisVideoStreamInfo, &streamHandle))) - { + if (STATUS_FAILED(retStatus = ::createKinesisVideoStream(mClientHandle, &kinesisVideoStreamInfo, &streamHandle))) { DLOGE("Failed to create a stream with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to create a stream.", retStatus); goto CleanUp; @@ -349,25 +313,22 @@ SyncMutex& KinesisVideoClientWrapper::getSyncLock() void KinesisVideoClientWrapper::putKinesisVideoFrame(jlong streamHandle, jobject kinesisVideoFrame) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; } - if (!IS_VALID_STREAM_HANDLE(streamHandle)) - { + if (!IS_VALID_STREAM_HANDLE(streamHandle)) { DLOGE("Invalid stream handle 0x%016" PRIx64, (UINT64) streamHandle); throwNativeException(env, EXCEPTION_NAME, "Invalid stream handle.", STATUS_INVALID_OPERATION); return; } - if (kinesisVideoFrame == NULL) - { + if (kinesisVideoFrame == NULL) { DLOGE("Invalid kinesis video frame."); throwNativeException(env, EXCEPTION_NAME, "Kinesis video frame is null.", STATUS_INVALID_OPERATION); return; @@ -375,8 +336,7 @@ void KinesisVideoClientWrapper::putKinesisVideoFrame(jlong streamHandle, jobject // Convert the KinesisVideoFrame object Frame frame; - if (!setFrame(env, kinesisVideoFrame, &frame)) - { + if (!setFrame(env, kinesisVideoFrame, &frame)) { DLOGE("Failed converting frame object."); throwNativeException(env, EXCEPTION_NAME, "Failed converting frame object.", STATUS_INVALID_OPERATION); return; @@ -403,8 +363,7 @@ void KinesisVideoClientWrapper::putKinesisVideoFrame(jlong streamHandle, jobject } } - if (STATUS_FAILED(retStatus = ::putKinesisVideoFrame(streamHandle, &frame))) - { + if (STATUS_FAILED(retStatus = ::putKinesisVideoFrame(streamHandle, &frame))) { DLOGE("Failed to put a frame with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to put a frame into the stream.", retStatus); return; @@ -414,25 +373,22 @@ void KinesisVideoClientWrapper::putKinesisVideoFrame(jlong streamHandle, jobject void KinesisVideoClientWrapper::putKinesisVideoFragmentMetadata(jlong streamHandle, jstring metadataName, jstring metadataValue, jboolean persistent) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; } - if (!IS_VALID_STREAM_HANDLE(streamHandle)) - { + if (!IS_VALID_STREAM_HANDLE(streamHandle)) { DLOGE("Invalid stream handle 0x%016" PRIx64, (UINT64) streamHandle); throwNativeException(env, EXCEPTION_NAME, "Invalid stream handle.", STATUS_INVALID_OPERATION); return; } - if (metadataName == NULL || metadataValue == NULL) - { + if (metadataName == NULL || metadataValue == NULL) { DLOGE("metadataName or metadataValue is NULL"); throwNativeException(env, EXCEPTION_NAME, "metadataName or metadataValue is NULL.", STATUS_INVALID_OPERATION); return; @@ -442,28 +398,25 @@ void KinesisVideoClientWrapper::putKinesisVideoFragmentMetadata(jlong streamHand PCHAR pMetadataNameStr = (PCHAR) env->GetStringUTFChars(metadataName, NULL); PCHAR pMetadataValueStr = (PCHAR) env->GetStringUTFChars(metadataValue, NULL); - // Call the API retStatus = ::putKinesisVideoFragmentMetadata(streamHandle, pMetadataNameStr, pMetadataValueStr, persistent == JNI_TRUE); - // Release the string env->ReleaseStringUTFChars(metadataName, pMetadataNameStr); env->ReleaseStringUTFChars(metadataValue, pMetadataValueStr); - if (STATUS_FAILED(retStatus)) - { + if (STATUS_FAILED(retStatus)) { DLOGE("Failed to put a metadata with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to put a metadata into the stream.", retStatus); return; } - } -void KinesisVideoClientWrapper::getKinesisVideoStreamData(jlong streamHandle, jlong uploadHandle, jobject dataBuffer, jint offset, jint length, jobject readResult) +void KinesisVideoClientWrapper::getKinesisVideoStreamData(jlong streamHandle, jlong uploadHandle, jobject dataBuffer, jint offset, jint length, + jobject readResult) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); UINT32 filledSize = 0, bufferSize = 0; PBYTE pBuffer = NULL; @@ -471,49 +424,43 @@ void KinesisVideoClientWrapper::getKinesisVideoStreamData(jlong streamHandle, jl jclass readResultClass; jmethodID setterMethodId; - if (NULL == readResult) - { + if (NULL == readResult) { DLOGE("NULL ReadResult object"); throwNativeException(env, EXCEPTION_NAME, "NULL ReadResult object is passsed.", STATUS_NULL_ARG); goto CleanUp; } - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); goto CleanUp; } - if (!IS_VALID_STREAM_HANDLE(streamHandle)) - { + if (!IS_VALID_STREAM_HANDLE(streamHandle)) { DLOGE("Invalid stream handle 0x%016" PRIx64, (UINT64) streamHandle); throwNativeException(env, EXCEPTION_NAME, "Invalid stream handle.", STATUS_INVALID_OPERATION); goto CleanUp; } - if (dataBuffer == NULL) - { + if (dataBuffer == NULL) { DLOGE("Invalid buffer object."); throwNativeException(env, EXCEPTION_NAME, "Invalid buffer object.", STATUS_INVALID_OPERATION); goto CleanUp; } // Convert the buffer of stream data to get - if (!setStreamDataBuffer(env, dataBuffer, offset, &pBuffer)) - { + if (!setStreamDataBuffer(env, dataBuffer, offset, &pBuffer)) { DLOGE("Failed converting kinesis video stream data buffer object."); throwNativeException(env, EXCEPTION_NAME, "Failed converting kinesis video stream data buffer object.", STATUS_INVALID_OPERATION); goto CleanUp; } retStatus = ::getKinesisVideoStreamData(streamHandle, uploadHandle, pBuffer, (UINT32) length, &filledSize); - if (STATUS_SUCCESS != retStatus && STATUS_AWAITING_PERSISTED_ACK != retStatus - && STATUS_UPLOAD_HANDLE_ABORTED != retStatus - && STATUS_NO_MORE_DATA_AVAILABLE != retStatus && STATUS_END_OF_STREAM != retStatus) - { + if (STATUS_SUCCESS != retStatus && STATUS_AWAITING_PERSISTED_ACK != retStatus && STATUS_UPLOAD_HANDLE_ABORTED != retStatus && + STATUS_NO_MORE_DATA_AVAILABLE != retStatus && STATUS_END_OF_STREAM != retStatus) { char errMessage[256]; - SNPRINTF(errMessage, 256, "Failed to get data from the stream 0x%016" PRIx64 " with uploadHandle %" PRIu64 , (UINT64) streamHandle, (UINT64) uploadHandle); + SNPRINTF(errMessage, 256, "Failed to get data from the stream 0x%016" PRIx64 " with uploadHandle %" PRIu64, (UINT64) streamHandle, + (UINT64) uploadHandle); DLOGE("Failed to get data from the stream with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, errMessage, retStatus); goto CleanUp; @@ -525,7 +472,7 @@ void KinesisVideoClientWrapper::getKinesisVideoStreamData(jlong streamHandle, jl // Get the class readResultClass = env->GetObjectClass(readResult); - if (readResultClass == NULL){ + if (readResultClass == NULL) { DLOGE("Failed to get ReadResult class object"); throwNativeException(env, EXCEPTION_NAME, "Failed to get ReadResult class object.", STATUS_INVALID_OPERATION); goto CleanUp; @@ -533,23 +480,18 @@ void KinesisVideoClientWrapper::getKinesisVideoStreamData(jlong streamHandle, jl // Get the Java method id setterMethodId = env->GetMethodID(readResultClass, "setReadResult", "(IZ)V"); - if (setterMethodId == NULL) - { + if (setterMethodId == NULL) { DLOGE("Failed to get the setter method id."); throwNativeException(env, EXCEPTION_NAME, "Failed to get setter method id.", STATUS_INVALID_OPERATION); goto CleanUp; } // Call the setter method - env->CallVoidMethod(readResult, - setterMethodId, - filledSize, - isEos); + env->CallVoidMethod(readResult, setterMethodId, filledSize, isEos); CleanUp: - if (!releaseStreamDataBuffer(env, dataBuffer, offset, pBuffer)) - { + if (!releaseStreamDataBuffer(env, dataBuffer, offset, pBuffer)) { DLOGE("Failed releasing kinesis video stream data buffer object."); throwNativeException(env, EXCEPTION_NAME, "Failed releasing kinesis video stream data buffer object.", STATUS_INVALID_OPERATION); } @@ -558,41 +500,36 @@ void KinesisVideoClientWrapper::getKinesisVideoStreamData(jlong streamHandle, jl void KinesisVideoClientWrapper::kinesisVideoStreamFragmentAck(jlong streamHandle, jlong uploadHandle, jobject fragmentAck) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); FragmentAck ack; - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; } - if (!IS_VALID_STREAM_HANDLE(streamHandle)) - { + if (!IS_VALID_STREAM_HANDLE(streamHandle)) { DLOGE("Invalid stream handle 0x%016" PRIx64, (UINT64) streamHandle); throwNativeException(env, EXCEPTION_NAME, "Invalid stream handle.", STATUS_INVALID_OPERATION); return; } - if (fragmentAck == NULL) - { + if (fragmentAck == NULL) { DLOGE("Invalid fragment ack"); throwNativeException(env, EXCEPTION_NAME, "Invalid fragment ack.", STATUS_INVALID_OPERATION); return; } // Convert the KinesisVideoFrame object - if (!setFragmentAck(env, fragmentAck, &ack)) - { + if (!setFragmentAck(env, fragmentAck, &ack)) { DLOGE("Failed converting frame object."); throwNativeException(env, EXCEPTION_NAME, "Failed converting fragment ack object.", STATUS_INVALID_OPERATION); return; } - if (STATUS_FAILED(retStatus = ::kinesisVideoStreamFragmentAck(streamHandle, uploadHandle, &ack))) - { + if (STATUS_FAILED(retStatus = ::kinesisVideoStreamFragmentAck(streamHandle, uploadHandle, &ack))) { DLOGE("Failed to report a fragment ack with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to report a fragment ack.", retStatus); return; @@ -602,25 +539,22 @@ void KinesisVideoClientWrapper::kinesisVideoStreamFragmentAck(jlong streamHandle void KinesisVideoClientWrapper::kinesisVideoStreamParseFragmentAck(jlong streamHandle, jlong uploadHandle, jstring ack) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; } - if (!IS_VALID_STREAM_HANDLE(streamHandle)) - { + if (!IS_VALID_STREAM_HANDLE(streamHandle)) { DLOGE("Invalid stream handle 0x%016" PRIx64, (UINT64) streamHandle); throwNativeException(env, EXCEPTION_NAME, "Invalid stream handle.", STATUS_INVALID_OPERATION); return; } - if (ack == NULL) - { + if (ack == NULL) { DLOGE("Invalid ack"); throwNativeException(env, EXCEPTION_NAME, "Invalid ack.", STATUS_INVALID_OPERATION); return; @@ -635,8 +569,7 @@ void KinesisVideoClientWrapper::kinesisVideoStreamParseFragmentAck(jlong streamH // Release the string env->ReleaseStringUTFChars(ack, pAckStr); - if (STATUS_FAILED(retStatus)) - { + if (STATUS_FAILED(retStatus)) { DLOGE("Failed to parse a fragment ack with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to parse a fragment ack.", retStatus); return; @@ -646,21 +579,19 @@ void KinesisVideoClientWrapper::kinesisVideoStreamParseFragmentAck(jlong streamH void KinesisVideoClientWrapper::streamFormatChanged(jlong streamHandle, jobject codecPrivateData, jlong trackId) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); UINT32 bufferSize = 0; PBYTE pBuffer = NULL; BOOL releaseBuffer = FALSE; - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); goto CleanUp; } - if (!IS_VALID_STREAM_HANDLE(streamHandle)) - { + if (!IS_VALID_STREAM_HANDLE(streamHandle)) { DLOGE("Invalid stream handle 0x%016" PRIx64, (UINT64) streamHandle); throwNativeException(env, EXCEPTION_NAME, "Invalid stream handle.", STATUS_INVALID_OPERATION); goto CleanUp; @@ -669,8 +600,7 @@ void KinesisVideoClientWrapper::streamFormatChanged(jlong streamHandle, jobject // Get the codec private data byte buffer - null object has a special semmantic of clearing the CPD if (codecPrivateData != NULL) { bufferSize = (UINT32) env->GetArrayLength((jbyteArray) codecPrivateData); - if (NULL == (pBuffer = (PBYTE) env->GetByteArrayElements((jbyteArray) codecPrivateData, NULL))) - { + if (NULL == (pBuffer = (PBYTE) env->GetByteArrayElements((jbyteArray) codecPrivateData, NULL))) { DLOGE("Failed getting byte buffer from the java array."); throwNativeException(env, EXCEPTION_NAME, "Failed getting byte buffer from the java array.", STATUS_INVALID_OPERATION); goto CleanUp; @@ -683,8 +613,7 @@ void KinesisVideoClientWrapper::streamFormatChanged(jlong streamHandle, jobject bufferSize = 0; } - if (STATUS_FAILED(retStatus = ::kinesisVideoStreamFormatChanged(streamHandle, bufferSize, pBuffer, trackId))) - { + if (STATUS_FAILED(retStatus = ::kinesisVideoStreamFormatChanged(streamHandle, bufferSize, pBuffer, trackId))) { DLOGE("Failed to set the stream format with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to set the stream format.", retStatus); goto CleanUp; @@ -692,8 +621,7 @@ void KinesisVideoClientWrapper::streamFormatChanged(jlong streamHandle, jobject CleanUp: - if (releaseBuffer) - { + if (releaseBuffer) { env->ReleaseByteArrayElements((jbyteArray) codecPrivateData, (jbyte*) pBuffer, JNI_ABORT); } } @@ -701,11 +629,10 @@ void KinesisVideoClientWrapper::streamFormatChanged(jlong streamHandle, jobject void KinesisVideoClientWrapper::describeStreamResult(jlong streamHandle, jint httpStatusCode, jobject streamDescription) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; @@ -716,8 +643,7 @@ void KinesisVideoClientWrapper::describeStreamResult(jlong streamHandle, jint ht if (NULL != streamDescription) { if (!setStreamDescription(env, streamDescription, &streamDesc)) { DLOGE("Failed converting stream description object."); - throwNativeException(env, EXCEPTION_NAME, "Failed converting stream description object.", - STATUS_INVALID_OPERATION); + throwNativeException(env, EXCEPTION_NAME, "Failed converting stream description object.", STATUS_INVALID_OPERATION); return; } @@ -725,8 +651,7 @@ void KinesisVideoClientWrapper::describeStreamResult(jlong streamHandle, jint ht pStreamDesc = &streamDesc; } - if (STATUS_FAILED(retStatus = ::describeStreamResultEvent(streamHandle, (SERVICE_CALL_RESULT) httpStatusCode, pStreamDesc))) - { + if (STATUS_FAILED(retStatus = ::describeStreamResultEvent(streamHandle, (SERVICE_CALL_RESULT) httpStatusCode, pStreamDesc))) { DLOGE("Failed to describe stream result event with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to describe stream result event.", retStatus); return; @@ -736,18 +661,16 @@ void KinesisVideoClientWrapper::describeStreamResult(jlong streamHandle, jint ht void KinesisVideoClientWrapper::kinesisVideoStreamTerminated(jlong streamHandle, jlong uploadHandle, jint httpStatusCode) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; } - if (STATUS_FAILED(retStatus = ::kinesisVideoStreamTerminated(streamHandle, uploadHandle, (SERVICE_CALL_RESULT) httpStatusCode))) - { + if (STATUS_FAILED(retStatus = ::kinesisVideoStreamTerminated(streamHandle, uploadHandle, (SERVICE_CALL_RESULT) httpStatusCode))) { DLOGE("Failed to submit stream terminated event with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to submit stream terminated event.", retStatus); return; @@ -757,12 +680,11 @@ void KinesisVideoClientWrapper::kinesisVideoStreamTerminated(jlong streamHandle, void KinesisVideoClientWrapper::createStreamResult(jlong streamHandle, jint httpStatusCode, jstring streamArn) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; PCHAR pStreamArn = NULL; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; @@ -779,8 +701,7 @@ void KinesisVideoClientWrapper::createStreamResult(jlong streamHandle, jint http env->ReleaseStringUTFChars(streamArn, pStreamArn); } - if (STATUS_FAILED(retStatus)) - { + if (STATUS_FAILED(retStatus)) { DLOGE("Failed to create stream result event with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to create stream result event.", retStatus); return; @@ -790,18 +711,16 @@ void KinesisVideoClientWrapper::createStreamResult(jlong streamHandle, jint http void KinesisVideoClientWrapper::putStreamResult(jlong streamHandle, jint httpStatusCode, jlong clientStreamHandle) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; } - if (STATUS_FAILED(retStatus = ::putStreamResultEvent(streamHandle, (SERVICE_CALL_RESULT) httpStatusCode, (UINT64) clientStreamHandle))) - { + if (STATUS_FAILED(retStatus = ::putStreamResultEvent(streamHandle, (SERVICE_CALL_RESULT) httpStatusCode, (UINT64) clientStreamHandle))) { DLOGE("Failed to put stream result event with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to put stream result event.", retStatus); return; @@ -811,18 +730,16 @@ void KinesisVideoClientWrapper::putStreamResult(jlong streamHandle, jint httpSta void KinesisVideoClientWrapper::tagResourceResult(jlong customData, jint httpStatusCode) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; } - if (STATUS_FAILED(retStatus = ::tagResourceResultEvent(customData, (SERVICE_CALL_RESULT) httpStatusCode))) - { + if (STATUS_FAILED(retStatus = ::tagResourceResultEvent(customData, (SERVICE_CALL_RESULT) httpStatusCode))) { DLOGE("Failed on tag resource result event with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed on tag resource result event.", retStatus); return; @@ -832,40 +749,37 @@ void KinesisVideoClientWrapper::tagResourceResult(jlong customData, jint httpSta void KinesisVideoClientWrapper::getStreamingEndpointResult(jlong streamHandle, jint httpStatusCode, jstring streamingEndpoint) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; } CHAR pEndpoint[MAX_URI_CHAR_LEN + 1]; - if (!setStreamingEndpoint(env, streamingEndpoint, pEndpoint)) - { + if (!setStreamingEndpoint(env, streamingEndpoint, pEndpoint)) { DLOGE("Failed converting streaming endpoint object."); throwNativeException(env, EXCEPTION_NAME, "Failed converting streaming endpoint object.", STATUS_INVALID_OPERATION); return; } - if (STATUS_FAILED(retStatus = ::getStreamingEndpointResultEvent(streamHandle, (SERVICE_CALL_RESULT) httpStatusCode, pEndpoint))) - { + if (STATUS_FAILED(retStatus = ::getStreamingEndpointResultEvent(streamHandle, (SERVICE_CALL_RESULT) httpStatusCode, pEndpoint))) { DLOGE("Failed to get streaming endpoint result event with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to get streaming endpoint result event.", retStatus); return; } } -void KinesisVideoClientWrapper::getStreamingTokenResult(jlong streamHandle, jint httpStatusCode, jbyteArray streamingToken, jint tokenSize, jlong expiration) +void KinesisVideoClientWrapper::getStreamingTokenResult(jlong streamHandle, jint httpStatusCode, jbyteArray streamingToken, jint tokenSize, + jlong expiration) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; @@ -878,14 +792,10 @@ void KinesisVideoClientWrapper::getStreamingTokenResult(jlong streamHandle, jint } BYTE pToken[MAX_AUTH_LEN]; - env->GetByteArrayRegion(streamingToken, 0, tokenSize, (jbyte *) pToken); - - if (STATUS_FAILED(retStatus = ::getStreamingTokenResultEvent(streamHandle, - (SERVICE_CALL_RESULT) httpStatusCode, - pToken, - (UINT32) tokenSize, - (UINT64) expiration))) - { + env->GetByteArrayRegion(streamingToken, 0, tokenSize, (jbyte*) pToken); + + if (STATUS_FAILED(retStatus = ::getStreamingTokenResultEvent(streamHandle, (SERVICE_CALL_RESULT) httpStatusCode, pToken, (UINT32) tokenSize, + (UINT64) expiration))) { DLOGE("Failed to get streaming token result event with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to get streaming token result event.", retStatus); return; @@ -895,12 +805,11 @@ void KinesisVideoClientWrapper::getStreamingTokenResult(jlong streamHandle, jint void KinesisVideoClientWrapper::createDeviceResult(jlong clientHandle, jint httpStatusCode, jstring deviceArn) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; PCHAR pDeviceArn = NULL; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; @@ -917,8 +826,7 @@ void KinesisVideoClientWrapper::createDeviceResult(jlong clientHandle, jint http env->ReleaseStringUTFChars(deviceArn, pDeviceArn); } - if (STATUS_FAILED(retStatus)) - { + if (STATUS_FAILED(retStatus)) { DLOGE("Failed to create device result event with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed to create device result event.", retStatus); return; @@ -928,11 +836,10 @@ void KinesisVideoClientWrapper::createDeviceResult(jlong clientHandle, jint http void KinesisVideoClientWrapper::deviceCertToTokenResult(jlong clientHandle, jint httpStatusCode, jbyteArray token, jint tokenSize, jlong expiration) { STATUS retStatus = STATUS_SUCCESS; - JNIEnv *env; + JNIEnv* env; mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); - if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) - { + if (!IS_VALID_CLIENT_HANDLE(mClientHandle)) { DLOGE("Invalid client object"); throwNativeException(env, EXCEPTION_NAME, "Invalid call after the client is freed.", STATUS_INVALID_OPERATION); return; @@ -945,14 +852,10 @@ void KinesisVideoClientWrapper::deviceCertToTokenResult(jlong clientHandle, jint } BYTE pToken[MAX_AUTH_LEN]; - env->GetByteArrayRegion(token, 0, tokenSize, (jbyte *) pToken); - - if (STATUS_FAILED(retStatus = ::deviceCertToTokenResultEvent(clientHandle, - (SERVICE_CALL_RESULT) httpStatusCode, - pToken, - (UINT32) tokenSize, - (UINT64) expiration))) - { + env->GetByteArrayRegion(token, 0, tokenSize, (jbyte*) pToken); + + if (STATUS_FAILED(retStatus = ::deviceCertToTokenResultEvent(clientHandle, (SERVICE_CALL_RESULT) httpStatusCode, pToken, (UINT32) tokenSize, + (UINT64) expiration))) { DLOGE("Failed the deviceCertToToken result event with status code 0x%08x", retStatus); throwNativeException(env, EXCEPTION_NAME, "Failed the deviceCertToToken result event.", retStatus); return; @@ -1069,7 +972,8 @@ BOOL KinesisVideoClientWrapper::setCallbacks(JNIEnv* env, jobject thiz) return FALSE; } - mFragmentAckReceivedMethodId = env->GetMethodID(thizCls, "fragmentAckReceived", "(JJLcom/amazonaws/kinesisvideo/producer/KinesisVideoFragmentAck;)V"); + mFragmentAckReceivedMethodId = + env->GetMethodID(thizCls, "fragmentAckReceived", "(JJLcom/amazonaws/kinesisvideo/producer/KinesisVideoFragmentAck;)V"); if (mFragmentAckReceivedMethodId == NULL) { DLOGE("Couldn't find method id fragmentAckReceived"); return FALSE; @@ -1117,7 +1021,8 @@ BOOL KinesisVideoClientWrapper::setCallbacks(JNIEnv* env, jobject thiz) return FALSE; } - mCreateStreamMethodId = env->GetMethodID(thizCls, "createStream", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;JJJ[BIJ)I"); + mCreateStreamMethodId = + env->GetMethodID(thizCls, "createStream", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;JJJ[BIJ)I"); if (mCreateStreamMethodId == NULL) { DLOGE("Couldn't find method id createStream"); return FALSE; @@ -1201,7 +1106,7 @@ UINT64 KinesisVideoClientWrapper::getCurrentTimeFunc(UINT64 customData) } // The precision needs to be on a 100th nanosecond resolution - return (UINT64)nowTime.tv_sec * HUNDREDS_OF_NANOS_IN_A_SECOND + (UINT64)nowTime.tv_usec * HUNDREDS_OF_NANOS_IN_A_MICROSECOND; + return (UINT64) nowTime.tv_sec * HUNDREDS_OF_NANOS_IN_A_SECOND + (UINT64) nowTime.tv_usec * HUNDREDS_OF_NANOS_IN_A_MICROSECOND; #endif } @@ -1289,7 +1194,7 @@ STATUS KinesisVideoClientWrapper::getDeviceCertificateFunc(UINT64 customData, PB { DLOGS("TID 0x%016" PRIx64 " getDeviceCertificateFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL && ppCert != NULL && pSize != NULL && pExpiration != NULL); return pWrapper->getAuthInfo(pWrapper->mGetDeviceCertificateMethodId, ppCert, pSize, pExpiration); @@ -1299,7 +1204,7 @@ STATUS KinesisVideoClientWrapper::getSecurityTokenFunc(UINT64 customData, PBYTE* { DLOGS("TID 0x%016" PRIx64 " getSecurityTokenFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL && ppToken != NULL && pSize != NULL && pExpiration != NULL); return pWrapper->getAuthInfo(pWrapper->mGetSecurityTokenMethodId, ppToken, pSize, pExpiration); @@ -1309,11 +1214,11 @@ STATUS KinesisVideoClientWrapper::getDeviceFingerprintFunc(UINT64 customData, PC { DLOGS("TID 0x%016" PRIx64 " getDeviceFingerprintFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL && ppFingerprint != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; jstring jstr = NULL; @@ -1335,7 +1240,7 @@ STATUS KinesisVideoClientWrapper::getDeviceFingerprintFunc(UINT64 customData, PC if (jstr != NULL) { // Extract the bits from the byte buffer bufferPtr = env->GetStringChars(jstr, NULL); - strLen = (UINT32)STRLEN((PCHAR) bufferPtr); + strLen = (UINT32) STRLEN((PCHAR) bufferPtr); if (strLen >= MAX_AUTH_LEN) { retStatus = STATUS_INVALID_ARG; goto CleanUp; @@ -1374,11 +1279,11 @@ STATUS KinesisVideoClientWrapper::streamUnderflowReportFunc(UINT64 customData, S { DLOGS("TID 0x%016" PRIx64 " streamUnderflowReportFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; @@ -1408,11 +1313,11 @@ STATUS KinesisVideoClientWrapper::storageOverflowPressureFunc(UINT64 customData, { DLOGS("TID 0x%016" PRIx64 " storageOverflowPressureFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; @@ -1442,11 +1347,11 @@ STATUS KinesisVideoClientWrapper::streamLatencyPressureFunc(UINT64 customData, S { DLOGS("TID 0x%016" PRIx64 " streamLatencyPressureFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; @@ -1476,11 +1381,11 @@ STATUS KinesisVideoClientWrapper::streamConnectionStaleFunc(UINT64 customData, S { DLOGS("TID 0x%016" PRIx64 " streamConnectionStaleFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; @@ -1506,16 +1411,16 @@ STATUS KinesisVideoClientWrapper::streamConnectionStaleFunc(UINT64 customData, S return retStatus; } -STATUS KinesisVideoClientWrapper::fragmentAckReceivedFunc(UINT64 customData, STREAM_HANDLE streamHandle, - UPLOAD_HANDLE upload_handle, PFragmentAck pFragmentAck) +STATUS KinesisVideoClientWrapper::fragmentAckReceivedFunc(UINT64 customData, STREAM_HANDLE streamHandle, UPLOAD_HANDLE upload_handle, + PFragmentAck pFragmentAck) { DLOGS("TID 0x%016" PRIx64 " fragmentAckReceivedFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; jstring jstrSequenceNum = NULL; @@ -1543,11 +1448,7 @@ STATUS KinesisVideoClientWrapper::fragmentAckReceivedFunc(UINT64 customData, STR CHK(jstrSequenceNum != NULL, STATUS_NOT_ENOUGH_MEMORY); // Create a new tag object - ack = env->NewObject(ackClass, - methodId, - (jint) pFragmentAck->ackType, - (jlong) pFragmentAck->timestamp, - jstrSequenceNum, + ack = env->NewObject(ackClass, methodId, (jint) pFragmentAck->ackType, (jlong) pFragmentAck->timestamp, jstrSequenceNum, (jint) pFragmentAck->result); CHK(ack != NULL, STATUS_NOT_ENOUGH_MEMORY); @@ -1569,11 +1470,11 @@ STATUS KinesisVideoClientWrapper::droppedFrameReportFunc(UINT64 customData, STRE { DLOGS("TID 0x%016" PRIx64 " droppedFrameReportFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; @@ -1599,14 +1500,15 @@ STATUS KinesisVideoClientWrapper::droppedFrameReportFunc(UINT64 customData, STRE return retStatus; } -STATUS KinesisVideoClientWrapper::bufferDurationOverflowPressureFunc(UINT64 customData, STREAM_HANDLE streamHandle, UINT64 remainingDuration){ +STATUS KinesisVideoClientWrapper::bufferDurationOverflowPressureFunc(UINT64 customData, STREAM_HANDLE streamHandle, UINT64 remainingDuration) +{ DLOGS("TID 0x%016" PRIx64 " bufferDurationOverflowPressureFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; @@ -1636,11 +1538,11 @@ STATUS KinesisVideoClientWrapper::droppedFragmentReportFunc(UINT64 customData, S { DLOGS("TID 0x%016" PRIx64 " droppedFragmentReportFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; @@ -1666,16 +1568,16 @@ STATUS KinesisVideoClientWrapper::droppedFragmentReportFunc(UINT64 customData, S return retStatus; } -STATUS KinesisVideoClientWrapper::streamErrorReportFunc(UINT64 customData, STREAM_HANDLE streamHandle, - UPLOAD_HANDLE upload_handle, UINT64 fragmentTimecode, STATUS statusCode) +STATUS KinesisVideoClientWrapper::streamErrorReportFunc(UINT64 customData, STREAM_HANDLE streamHandle, UPLOAD_HANDLE upload_handle, + UINT64 fragmentTimecode, STATUS statusCode) { DLOGS("TID 0x%016" PRIx64 " streamErrorReportFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; @@ -1688,8 +1590,7 @@ STATUS KinesisVideoClientWrapper::streamErrorReportFunc(UINT64 customData, STREA } // Call the Java func - env->CallVoidMethod(pWrapper->mGlobalJniObjRef, pWrapper->mStreamErrorReportMethodId, streamHandle, upload_handle, - fragmentTimecode, statusCode); + env->CallVoidMethod(pWrapper->mGlobalJniObjRef, pWrapper->mStreamErrorReportMethodId, streamHandle, upload_handle, fragmentTimecode, statusCode); CHK_JVM_EXCEPTION(env); CleanUp: @@ -1706,11 +1607,11 @@ STATUS KinesisVideoClientWrapper::streamReadyFunc(UINT64 customData, STREAM_HAND { DLOGS("TID 0x%016" PRIx64 " streamReadyFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; @@ -1740,11 +1641,11 @@ STATUS KinesisVideoClientWrapper::streamClosedFunc(UINT64 customData, STREAM_HAN { DLOGS("TID 0x%016" PRIx64 " streamClosedFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; @@ -1770,15 +1671,16 @@ STATUS KinesisVideoClientWrapper::streamClosedFunc(UINT64 customData, STREAM_HAN return retStatus; } -STATUS KinesisVideoClientWrapper::streamDataAvailableFunc(UINT64 customData, STREAM_HANDLE streamHandle, PCHAR streamName, UINT64 uploadHandle, UINT64 duration, UINT64 availableSize) +STATUS KinesisVideoClientWrapper::streamDataAvailableFunc(UINT64 customData, STREAM_HANDLE streamHandle, PCHAR streamName, UINT64 uploadHandle, + UINT64 duration, UINT64 availableSize) { DLOGS("TID 0x%016" PRIx64 " streamDataAvailableFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; @@ -1790,7 +1692,8 @@ STATUS KinesisVideoClientWrapper::streamDataAvailableFunc(UINT64 customData, STR detached = TRUE; } - env->CallVoidMethod(pWrapper->mGlobalJniObjRef, pWrapper->mStreamDataAvailableMethodId, streamHandle, NULL, uploadHandle, duration, availableSize); + env->CallVoidMethod(pWrapper->mGlobalJniObjRef, pWrapper->mStreamDataAvailableMethodId, streamHandle, NULL, uploadHandle, duration, + availableSize); CHK_JVM_EXCEPTION(env); CleanUp: @@ -1803,21 +1706,16 @@ STATUS KinesisVideoClientWrapper::streamDataAvailableFunc(UINT64 customData, STR return retStatus; } -STATUS KinesisVideoClientWrapper::createStreamFunc(UINT64 customData, - PCHAR deviceName, - PCHAR streamName, - PCHAR contentType, - PCHAR kmsKeyId, - UINT64 retention, - PServiceCallContext pCallbackContext) +STATUS KinesisVideoClientWrapper::createStreamFunc(UINT64 customData, PCHAR deviceName, PCHAR streamName, PCHAR contentType, PCHAR kmsKeyId, + UINT64 retention, PServiceCallContext pCallbackContext) { DLOGS("TID 0x%016" PRIx64 " createStreamFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; jstring jstrDeviceName = NULL, jstrStreamName = NULL, jstrContentType = NULL, jstrKmsKeyId = NULL; @@ -1840,34 +1738,18 @@ STATUS KinesisVideoClientWrapper::createStreamFunc(UINT64 customData, } authByteArray = env->NewByteArray(pCallbackContext->pAuthInfo->size); - if (jstrContentType == NULL || - jstrDeviceName == NULL || - jstrStreamName == NULL || - authByteArray == NULL) { + if (jstrContentType == NULL || jstrDeviceName == NULL || jstrStreamName == NULL || authByteArray == NULL) { retStatus = STATUS_NOT_ENOUGH_MEMORY; goto CleanUp; } // Copy the bits into the managed array - env->SetByteArrayRegion(authByteArray, - 0, - pCallbackContext->pAuthInfo->size, - (const jbyte*) pCallbackContext->pAuthInfo->data); + env->SetByteArrayRegion(authByteArray, 0, pCallbackContext->pAuthInfo->size, (const jbyte*) pCallbackContext->pAuthInfo->data); // Invoke the callback - retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, - pWrapper->mCreateStreamMethodId, - jstrDeviceName, - jstrStreamName, - jstrContentType, - jstrKmsKeyId, - retention, - pCallbackContext->callAfter, - pCallbackContext->timeout, - authByteArray, - pCallbackContext->pAuthInfo->type, - pCallbackContext->customData - ); + retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, pWrapper->mCreateStreamMethodId, jstrDeviceName, jstrStreamName, jstrContentType, + jstrKmsKeyId, retention, pCallbackContext->callAfter, pCallbackContext->timeout, authByteArray, + pCallbackContext->pAuthInfo->type, pCallbackContext->customData); CHK_JVM_EXCEPTION(env); @@ -1901,17 +1783,15 @@ STATUS KinesisVideoClientWrapper::createStreamFunc(UINT64 customData, return retStatus; } -STATUS KinesisVideoClientWrapper::describeStreamFunc(UINT64 customData, - PCHAR streamName, - PServiceCallContext pCallbackContext) +STATUS KinesisVideoClientWrapper::describeStreamFunc(UINT64 customData, PCHAR streamName, PServiceCallContext pCallbackContext) { DLOGS("TID 0x%016" PRIx64 " describeStreamFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; jstring jstrStreamName = NULL; @@ -1928,28 +1808,17 @@ STATUS KinesisVideoClientWrapper::describeStreamFunc(UINT64 customData, // Call the Java func jstrStreamName = env->NewStringUTF(streamName); authByteArray = env->NewByteArray(pCallbackContext->pAuthInfo->size); - if (jstrStreamName == NULL || - authByteArray == NULL) { + if (jstrStreamName == NULL || authByteArray == NULL) { retStatus = STATUS_NOT_ENOUGH_MEMORY; goto CleanUp; } // Copy the bits into the managed array - env->SetByteArrayRegion(authByteArray, - 0, - pCallbackContext->pAuthInfo->size, - (const jbyte*) pCallbackContext->pAuthInfo->data); + env->SetByteArrayRegion(authByteArray, 0, pCallbackContext->pAuthInfo->size, (const jbyte*) pCallbackContext->pAuthInfo->data); // Invoke the callback - retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, - pWrapper->mDescribeStreamMethodId, - jstrStreamName, - pCallbackContext->callAfter, - pCallbackContext->timeout, - authByteArray, - pCallbackContext->pAuthInfo->type, - pCallbackContext->customData - ); + retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, pWrapper->mDescribeStreamMethodId, jstrStreamName, pCallbackContext->callAfter, + pCallbackContext->timeout, authByteArray, pCallbackContext->pAuthInfo->type, pCallbackContext->customData); CHK_JVM_EXCEPTION(env); @@ -1971,18 +1840,15 @@ STATUS KinesisVideoClientWrapper::describeStreamFunc(UINT64 customData, return retStatus; } -STATUS KinesisVideoClientWrapper::getStreamingEndpointFunc(UINT64 customData, - PCHAR streamName, - PCHAR apiName, - PServiceCallContext pCallbackContext) +STATUS KinesisVideoClientWrapper::getStreamingEndpointFunc(UINT64 customData, PCHAR streamName, PCHAR apiName, PServiceCallContext pCallbackContext) { DLOGS("TID 0x%016" PRIx64 " getStreamingEndpointFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; jstring jstrStreamName = NULL; @@ -2001,30 +1867,18 @@ STATUS KinesisVideoClientWrapper::getStreamingEndpointFunc(UINT64 customData, jstrStreamName = env->NewStringUTF(streamName); jstrApiName = env->NewStringUTF(apiName); authByteArray = env->NewByteArray(pCallbackContext->pAuthInfo->size); - if (jstrStreamName == NULL || - jstrApiName == NULL || - authByteArray == NULL) { + if (jstrStreamName == NULL || jstrApiName == NULL || authByteArray == NULL) { retStatus = STATUS_NOT_ENOUGH_MEMORY; goto CleanUp; } // Copy the bits into the managed array - env->SetByteArrayRegion(authByteArray, - 0, - pCallbackContext->pAuthInfo->size, - (const jbyte*) pCallbackContext->pAuthInfo->data); + env->SetByteArrayRegion(authByteArray, 0, pCallbackContext->pAuthInfo->size, (const jbyte*) pCallbackContext->pAuthInfo->data); // Invoke the callback - retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, - pWrapper->mGetStreamingEndpointMethodId, - jstrStreamName, - jstrApiName, - pCallbackContext->callAfter, - pCallbackContext->timeout, - authByteArray, - pCallbackContext->pAuthInfo->type, - pCallbackContext->customData - ); + retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, pWrapper->mGetStreamingEndpointMethodId, jstrStreamName, jstrApiName, + pCallbackContext->callAfter, pCallbackContext->timeout, authByteArray, pCallbackContext->pAuthInfo->type, + pCallbackContext->customData); CHK_JVM_EXCEPTION(env); @@ -2046,18 +1900,16 @@ STATUS KinesisVideoClientWrapper::getStreamingEndpointFunc(UINT64 customData, return retStatus; } -STATUS KinesisVideoClientWrapper::getStreamingTokenFunc(UINT64 customData, - PCHAR streamName, - STREAM_ACCESS_MODE accessMode, - PServiceCallContext pCallbackContext) +STATUS KinesisVideoClientWrapper::getStreamingTokenFunc(UINT64 customData, PCHAR streamName, STREAM_ACCESS_MODE accessMode, + PServiceCallContext pCallbackContext) { DLOGS("TID 0x%016" PRIx64 " getStreamingTokenFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; jstring jstrStreamName = NULL; @@ -2074,28 +1926,17 @@ STATUS KinesisVideoClientWrapper::getStreamingTokenFunc(UINT64 customData, // Call the Java func jstrStreamName = env->NewStringUTF(streamName); authByteArray = env->NewByteArray(pCallbackContext->pAuthInfo->size); - if (jstrStreamName == NULL || - authByteArray == NULL) { + if (jstrStreamName == NULL || authByteArray == NULL) { retStatus = STATUS_NOT_ENOUGH_MEMORY; goto CleanUp; } // Copy the bits into the managed array - env->SetByteArrayRegion(authByteArray, - 0, - pCallbackContext->pAuthInfo->size, - (const jbyte*) pCallbackContext->pAuthInfo->data); + env->SetByteArrayRegion(authByteArray, 0, pCallbackContext->pAuthInfo->size, (const jbyte*) pCallbackContext->pAuthInfo->data); // Invoke the callback - retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, - pWrapper->mGetStreamingTokenMethodId, - jstrStreamName, - pCallbackContext->callAfter, - pCallbackContext->timeout, - authByteArray, - pCallbackContext->pAuthInfo->type, - pCallbackContext->customData - ); + retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, pWrapper->mGetStreamingTokenMethodId, jstrStreamName, pCallbackContext->callAfter, + pCallbackContext->timeout, authByteArray, pCallbackContext->pAuthInfo->type, pCallbackContext->customData); CHK_JVM_EXCEPTION(env); @@ -2117,22 +1958,17 @@ STATUS KinesisVideoClientWrapper::getStreamingTokenFunc(UINT64 customData, return retStatus; } -STATUS KinesisVideoClientWrapper::putStreamFunc(UINT64 customData, - PCHAR streamName, - PCHAR containerType, - UINT64 streamStartTime, - BOOL absoluteFragmentTimestamp, - BOOL ackRequired, - PCHAR streamingEndpoint, - PServiceCallContext pCallbackContext) +STATUS KinesisVideoClientWrapper::putStreamFunc(UINT64 customData, PCHAR streamName, PCHAR containerType, UINT64 streamStartTime, + BOOL absoluteFragmentTimestamp, BOOL ackRequired, PCHAR streamingEndpoint, + PServiceCallContext pCallbackContext) { DLOGS("TID 0x%016" PRIx64 " putStreamFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; jstring jstrStreamName = NULL, jstrContainerType = NULL, jstrStreamingEndpoint = NULL; @@ -2151,35 +1987,18 @@ STATUS KinesisVideoClientWrapper::putStreamFunc(UINT64 customData, jstrContainerType = env->NewStringUTF(containerType); jstrStreamingEndpoint = env->NewStringUTF(streamingEndpoint); authByteArray = env->NewByteArray(pCallbackContext->pAuthInfo->size); - if (jstrStreamName == NULL || - jstrContainerType == NULL || - jstrStreamingEndpoint == NULL || - authByteArray == NULL) { + if (jstrStreamName == NULL || jstrContainerType == NULL || jstrStreamingEndpoint == NULL || authByteArray == NULL) { retStatus = STATUS_NOT_ENOUGH_MEMORY; goto CleanUp; } // Copy the bits into the managed array - env->SetByteArrayRegion(authByteArray, - 0, - pCallbackContext->pAuthInfo->size, - (const jbyte*) pCallbackContext->pAuthInfo->data); + env->SetByteArrayRegion(authByteArray, 0, pCallbackContext->pAuthInfo->size, (const jbyte*) pCallbackContext->pAuthInfo->data); // Invoke the callback - retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, - pWrapper->mPutStreamMethodId, - jstrStreamName, - jstrContainerType, - streamStartTime, - absoluteFragmentTimestamp == TRUE, - ackRequired == TRUE, - jstrStreamingEndpoint, - pCallbackContext->callAfter, - pCallbackContext->timeout, - authByteArray, - pCallbackContext->pAuthInfo->type, - pCallbackContext->customData - ); + retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, pWrapper->mPutStreamMethodId, jstrStreamName, jstrContainerType, streamStartTime, + absoluteFragmentTimestamp == TRUE, ackRequired == TRUE, jstrStreamingEndpoint, pCallbackContext->callAfter, + pCallbackContext->timeout, authByteArray, pCallbackContext->pAuthInfo->type, pCallbackContext->customData); CHK_JVM_EXCEPTION(env); @@ -2205,13 +2024,10 @@ STATUS KinesisVideoClientWrapper::putStreamFunc(UINT64 customData, return retStatus; } -STATUS KinesisVideoClientWrapper::tagResourceFunc(UINT64 customData, - PCHAR streamArn, - UINT32 tagCount, - PTag tags, - PServiceCallContext pCallbackContext) +STATUS KinesisVideoClientWrapper::tagResourceFunc(UINT64 customData, PCHAR streamArn, UINT32 tagCount, PTag tags, + PServiceCallContext pCallbackContext) { - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; jstring jstrStreamArn = NULL, jstrTagName = NULL, jstrTagValue = NULL; @@ -2225,7 +2041,7 @@ STATUS KinesisVideoClientWrapper::tagResourceFunc(UINT64 customData, DLOGS("TID 0x%016" PRIx64 " tagResourceFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Early return if no tags @@ -2281,22 +2097,11 @@ STATUS KinesisVideoClientWrapper::tagResourceFunc(UINT64 customData, } // Copy the bits into the managed array - env->SetByteArrayRegion(authByteArray, - 0, - pCallbackContext->pAuthInfo->size, - (const jbyte*) pCallbackContext->pAuthInfo->data); + env->SetByteArrayRegion(authByteArray, 0, pCallbackContext->pAuthInfo->size, (const jbyte*) pCallbackContext->pAuthInfo->data); // Invoke the callback - retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, - pWrapper->mTagResourceMethodId, - jstrStreamArn, - tagArray, - pCallbackContext->callAfter, - pCallbackContext->timeout, - authByteArray, - pCallbackContext->pAuthInfo->type, - pCallbackContext->customData - ); + retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, pWrapper->mTagResourceMethodId, jstrStreamArn, tagArray, pCallbackContext->callAfter, + pCallbackContext->timeout, authByteArray, pCallbackContext->pAuthInfo->type, pCallbackContext->customData); CHK_JVM_EXCEPTION(env); @@ -2325,7 +2130,7 @@ STATUS KinesisVideoClientWrapper::tagResourceFunc(UINT64 customData, STATUS KinesisVideoClientWrapper::getAuthInfo(jmethodID methodId, PBYTE* ppCert, PUINT32 pSize, PUINT64 pExpiration) { // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; @@ -2341,7 +2146,7 @@ STATUS KinesisVideoClientWrapper::getAuthInfo(jmethodID methodId, PBYTE* ppCert, jmethodID authExpirationMethodId = NULL; // Store this pointer so we can run the common macros - KinesisVideoClientWrapper *pWrapper = this; + KinesisVideoClientWrapper* pWrapper = this; INT32 envState = mJvm->GetEnv((PVOID*) &env, JNI_VERSION_1_6); if (envState == JNI_EDETACHED) { @@ -2440,11 +2245,11 @@ STATUS KinesisVideoClientWrapper::clientReadyFunc(UINT64 customData, CLIENT_HAND { DLOGS("TID 0x%016" PRIx64 " clientReadyFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Get the ENV from the JavaVM - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; @@ -2472,7 +2277,7 @@ STATUS KinesisVideoClientWrapper::clientReadyFunc(UINT64 customData, CLIENT_HAND STATUS KinesisVideoClientWrapper::createDeviceFunc(UINT64 customData, PCHAR deviceName, PServiceCallContext pCallbackContext) { - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; jstring jstrDeviceName = NULL; @@ -2481,7 +2286,7 @@ STATUS KinesisVideoClientWrapper::createDeviceFunc(UINT64 customData, PCHAR devi DLOGS("TID 0x%016" PRIx64 " createDeviceFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Device name should be valid @@ -2508,21 +2313,11 @@ STATUS KinesisVideoClientWrapper::createDeviceFunc(UINT64 customData, PCHAR devi } // Copy the bits into the managed array - env->SetByteArrayRegion(authByteArray, - 0, - pCallbackContext->pAuthInfo->size, - (const jbyte*) pCallbackContext->pAuthInfo->data); + env->SetByteArrayRegion(authByteArray, 0, pCallbackContext->pAuthInfo->size, (const jbyte*) pCallbackContext->pAuthInfo->data); // Invoke the callback - retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, - pWrapper->mCreateDeviceMethodId, - jstrDeviceName, - pCallbackContext->callAfter, - pCallbackContext->timeout, - authByteArray, - pCallbackContext->pAuthInfo->type, - pCallbackContext->customData - ); + retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, pWrapper->mCreateDeviceMethodId, jstrDeviceName, pCallbackContext->callAfter, + pCallbackContext->timeout, authByteArray, pCallbackContext->pAuthInfo->type, pCallbackContext->customData); CHK_JVM_EXCEPTION(env); @@ -2542,7 +2337,7 @@ STATUS KinesisVideoClientWrapper::createDeviceFunc(UINT64 customData, PCHAR devi STATUS KinesisVideoClientWrapper::deviceCertToTokenFunc(UINT64 customData, PCHAR deviceName, PServiceCallContext pCallbackContext) { - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; jstring jstrDeviceName = NULL; @@ -2551,7 +2346,7 @@ STATUS KinesisVideoClientWrapper::deviceCertToTokenFunc(UINT64 customData, PCHAR DLOGS("TID 0x%016" PRIx64 " deviceCertToTokenFunc called.", GETTID()); - KinesisVideoClientWrapper *pWrapper = FROM_WRAPPER_HANDLE(customData); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(customData); CHECK(pWrapper != NULL); // Device name should be valid @@ -2578,21 +2373,11 @@ STATUS KinesisVideoClientWrapper::deviceCertToTokenFunc(UINT64 customData, PCHAR } // Copy the bits into the managed array - env->SetByteArrayRegion(authByteArray, - 0, - pCallbackContext->pAuthInfo->size, - (const jbyte*) pCallbackContext->pAuthInfo->data); + env->SetByteArrayRegion(authByteArray, 0, pCallbackContext->pAuthInfo->size, (const jbyte*) pCallbackContext->pAuthInfo->data); // Invoke the callback - retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, - pWrapper->mDeviceCertToTokenMethodId, - jstrDeviceName, - pCallbackContext->callAfter, - pCallbackContext->timeout, - authByteArray, - pCallbackContext->pAuthInfo->type, - pCallbackContext->customData - ); + retStatus = env->CallIntMethod(pWrapper->mGlobalJniObjRef, pWrapper->mDeviceCertToTokenMethodId, jstrDeviceName, pCallbackContext->callAfter, + pCallbackContext->timeout, authByteArray, pCallbackContext->pAuthInfo->type, pCallbackContext->customData); CHK_JVM_EXCEPTION(env); @@ -2613,16 +2398,20 @@ STATUS KinesisVideoClientWrapper::deviceCertToTokenFunc(UINT64 customData, PCHAR AUTH_INFO_TYPE KinesisVideoClientWrapper::authInfoTypeFromInt(UINT32 authInfoType) { switch (authInfoType) { - case 1: return AUTH_INFO_TYPE_CERT; - case 2: return AUTH_INFO_TYPE_STS; - case 3: return AUTH_INFO_NONE; - default: return AUTH_INFO_UNDEFINED; + case 1: + return AUTH_INFO_TYPE_CERT; + case 2: + return AUTH_INFO_TYPE_STS; + case 3: + return AUTH_INFO_NONE; + default: + return AUTH_INFO_UNDEFINED; } } VOID KinesisVideoClientWrapper::logPrintFunc(UINT32 level, PCHAR tag, PCHAR fmt, ...) { - JNIEnv *env; + JNIEnv* env; BOOL detached = FALSE; STATUS retStatus = STATUS_SUCCESS; jstring jstrTag = NULL, jstrFmt = NULL, jstrBuffer = NULL; @@ -2637,7 +2426,7 @@ VOID KinesisVideoClientWrapper::logPrintFunc(UINT32 level, PCHAR tag, PCHAR fmt, } detached = TRUE; } - + va_list list; va_start(list, fmt); vsnprintf(buffer, MAX_LOG_MESSAGE_LENGTH, fmt, list); @@ -2659,15 +2448,19 @@ VOID KinesisVideoClientWrapper::logPrintFunc(UINT32 level, PCHAR tag, PCHAR fmt, /* Sample logs from PIC as displayed by log4j2 in Java Producer SDK - 2021-12-10 10:01:53,874 [main] TRACE c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] KinesisVideoProducerJNI - Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createKinesisVideoStream(): Enter - 2021-12-10 10:01:53,875 [main] INFO c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] KinesisVideoProducerJNI - Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createKinesisVideoStream(): Creating Kinesis Video stream. - 2021-12-10 10:01:53,875 [main] INFO c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] KinesisVideoClient - createKinesisVideoStream(): Creating Kinesis Video Stream. - 2021-12-10 10:01:53,875 [main] DEBUG c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] Stream - logStreamInfo(): Kinesis Video Stream Info + 2021-12-10 10:01:53,874 [main] TRACE c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] KinesisVideoProducerJNI - + Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createKinesisVideoStream(): Enter 2021-12-10 10:01:53,875 + [main] INFO c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] KinesisVideoProducerJNI - + Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createKinesisVideoStream(): Creating Kinesis Video stream. + 2021-12-10 10:01:53,875 [main] INFO c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] KinesisVideoClient - createKinesisVideoStream(): Creating + Kinesis Video Stream. 2021-12-10 10:01:53,875 [main] DEBUG c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] Stream - logStreamInfo(): Kinesis Video + Stream Info 2021-12-10 10:01:53,875 [main] DEBUG c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] Stream - logStreamInfo(): Kinesis Video Stream Info - 2021-12-10 10:01:53,875 [main] DEBUG c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] Stream - logStreamInfo(): Stream name: NewStreamJava12 - 2021-12-10 10:01:53,875 [main] DEBUG c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] Stream - logStreamInfo(): Streaming type: STREAMING_TYPE_REALTIME - 2021-12-10 10:01:53,876 [main] DEBUG c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] Stream - logStreamInfo(): Content type: video/h264 + 2021-12-10 10:01:53,875 [main] DEBUG c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] Stream - logStreamInfo(): Stream name: NewStreamJava12 + 2021-12-10 10:01:53,875 [main] DEBUG c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] Stream - logStreamInfo(): Streaming type: + STREAMING_TYPE_REALTIME 2021-12-10 10:01:53,876 [main] DEBUG c.a.k.j.c.KinesisVideoJavaClientFactory - [PIC] Stream - logStreamInfo(): Content + type: video/h264 */ CleanUp: @@ -2687,5 +2480,5 @@ VOID KinesisVideoClientWrapper::logPrintFunc(UINT32 level, PCHAR tag, PCHAR fmt, // Detach the thread if we have attached it to JVM if (detached) { mJvm->DetachCurrentThread(); - } + } } \ No newline at end of file diff --git a/src/JNI/com/amazonaws/kinesis/video/producer/jni/NativeProducerInterface.cpp b/src/JNI/com/amazonaws/kinesis/video/producer/jni/NativeProducerInterface.cpp index 5dce7ecf..8402e0ae 100644 --- a/src/JNI/com/amazonaws/kinesis/video/producer/jni/NativeProducerInterface.cpp +++ b/src/JNI/com/amazonaws/kinesis/video/producer/jni/NativeProducerInterface.cpp @@ -20,445 +20,475 @@ #ifdef __cplusplus extern "C" { #endif - /** - * Returns a hardcoded library version string. - */ - PUBLIC_API jstring JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getNativeLibraryVersion(JNIEnv* env, jobject thiz) - { - return env->NewStringUTF(NATIVE_LIBRARY_VERSION); - } - - /** - * Returns a string representing the date and time when this code was compiled. - */ - PUBLIC_API jstring JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getNativeCodeCompileTime(JNIEnv* env, jobject thiz) - { - return env->NewStringUTF(__DATE__ " " __TIME__); - } - - /** - * Releases the kinesis video client object. All operations will fail from this moment on - */ - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_freeKinesisVideoClient(JNIEnv* env, jobject thiz, jlong handle) - { - ENTER(); - - - DLOGI("Freeing Kinesis Video client."); - CHECK(env != NULL && thiz != NULL); - - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - // Cache the globalRef for later deletion - jobject globalRef = pWrapper->getGlobalRef(); +/** + * Returns a hardcoded library version string. + */ +PUBLIC_API jstring JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getNativeLibraryVersion(JNIEnv* env, + jobject thiz) +{ + return env->NewStringUTF(NATIVE_LIBRARY_VERSION); +} - // Free the existing engine - delete pWrapper; +/** + * Returns a string representing the date and time when this code was compiled. + */ +PUBLIC_API jstring JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getNativeCodeCompileTime(JNIEnv* env, + jobject thiz) +{ + return env->NewStringUTF(__DATE__ " " __TIME__); +} - // Free the global reference - if (globalRef != NULL) { - env->DeleteGlobalRef(globalRef); - } +/** + * Releases the kinesis video client object. All operations will fail from this moment on + */ +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_freeKinesisVideoClient(JNIEnv* env, + jobject thiz, + jlong handle) +{ + ENTER(); + + DLOGI("Freeing Kinesis Video client."); + CHECK(env != NULL && thiz != NULL); + + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + // Cache the globalRef for later deletion + jobject globalRef = pWrapper->getGlobalRef(); + + // Free the existing engine + delete pWrapper; + + // Free the global reference + if (globalRef != NULL) { + env->DeleteGlobalRef(globalRef); } - - LEAVE(); } - /** - * Creates and initializes the kinesis video client - */ - PUBLIC_API jlong JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createKinesisVideoClient(JNIEnv* env, jobject thiz, jobject deviceInfo) - { - ENTER(); - - KinesisVideoClientWrapper* pWrapper = NULL; - jlong retValue = (jlong) NULL; + LEAVE(); +} - DLOGI("Creating Kinesis Video client."); - CHECK(env != NULL && thiz != NULL); - - if (deviceInfo == NULL) { - DLOGE("DeviceInfo is NULL."); - throwNativeException(env, EXCEPTION_NAME, "DeviceInfo is NULL.", STATUS_NULL_ARG); - goto CleanUp; - } - - // Create the wrapper engine - pWrapper = new KinesisVideoClientWrapper(env, thiz, deviceInfo); +/** + * Creates and initializes the kinesis video client + */ +PUBLIC_API jlong JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createKinesisVideoClient( + JNIEnv* env, jobject thiz, jobject deviceInfo) +{ + ENTER(); - // Returning the pointer as a handle - retValue = (jlong) TO_WRAPPER_HANDLE(pWrapper); + KinesisVideoClientWrapper* pWrapper = NULL; + jlong retValue = (jlong) NULL; -CleanUp: + DLOGI("Creating Kinesis Video client."); + CHECK(env != NULL && thiz != NULL); - LEAVE(); - return retValue; + if (deviceInfo == NULL) { + DLOGE("DeviceInfo is NULL."); + throwNativeException(env, EXCEPTION_NAME, "DeviceInfo is NULL.", STATUS_NULL_ARG); + goto CleanUp; } - /** - * Stops KinesisVideo streams - */ - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_stopKinesisVideoStreams(JNIEnv* env, jobject thiz, jlong handle) - { - ENTER(); + // Create the wrapper engine + pWrapper = new KinesisVideoClientWrapper(env, thiz, deviceInfo); - DLOGI("Stopping Kinesis Video streams."); - CHECK(env != NULL && thiz != NULL); + // Returning the pointer as a handle + retValue = (jlong) TO_WRAPPER_HANDLE(pWrapper); - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->stopKinesisVideoStreams(); - } - - LEAVE(); - } - - /** - * Stops a KinesisVideo stream - */ - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_stopKinesisVideoStream(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle) - { - ENTER(); - - DLOGI("Stopping Kinesis Video stream."); - CHECK(env != NULL && thiz != NULL); +CleanUp: - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->stopKinesisVideoStream(streamHandle); - } + LEAVE(); + return retValue; +} - LEAVE(); +/** + * Stops KinesisVideo streams + */ +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_stopKinesisVideoStreams(JNIEnv* env, + jobject thiz, + jlong handle) +{ + ENTER(); + + DLOGI("Stopping Kinesis Video streams."); + CHECK(env != NULL && thiz != NULL); + + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->stopKinesisVideoStreams(); } - /** - * Frees a KinesisVideo stream. - */ - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_freeKinesisVideoStream(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle) - { - ENTER(); - - DLOGI("Stopping Kinesis Video stream."); - CHECK(env != NULL && thiz != NULL); - - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->freeKinesisVideoStream(streamHandle); - } + LEAVE(); +} - LEAVE(); +/** + * Stops a KinesisVideo stream + */ +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_stopKinesisVideoStream(JNIEnv* env, + jobject thiz, + jlong handle, + jlong streamHandle) +{ + ENTER(); + + DLOGI("Stopping Kinesis Video stream."); + CHECK(env != NULL && thiz != NULL); + + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->stopKinesisVideoStream(streamHandle); } - /** - * Extracts the KinesisVideo client object metrics - */ - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getKinesisVideoMetrics(JNIEnv* env, jobject thiz, jlong handle, jobject kinesisVideoMetrics) - { - ENTERS(); + LEAVE(); +} - DLOGS("Getting Kinesis Video metrics."); - CHECK(env != NULL && thiz != NULL); - - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->getKinesisVideoMetrics(kinesisVideoMetrics); - } - - LEAVES(); +/** + * Frees a KinesisVideo stream. + */ +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_freeKinesisVideoStream(JNIEnv* env, + jobject thiz, + jlong handle, + jlong streamHandle) +{ + ENTER(); + + DLOGI("Stopping Kinesis Video stream."); + CHECK(env != NULL && thiz != NULL); + + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->freeKinesisVideoStream(streamHandle); } - /** - * Extracts the KinesisVideo client object metrics - */ - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getKinesisVideoStreamMetrics(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jobject kinesisVideoStreamMetrics) - { - ENTERS(); + LEAVE(); +} - DLOGS("Getting Kinesis Video stream metrics."); - CHECK(env != NULL && thiz != NULL); +/** + * Extracts the KinesisVideo client object metrics + */ +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getKinesisVideoMetrics( + JNIEnv* env, jobject thiz, jlong handle, jobject kinesisVideoMetrics) +{ + ENTERS(); + + DLOGS("Getting Kinesis Video metrics."); + CHECK(env != NULL && thiz != NULL); + + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->getKinesisVideoMetrics(kinesisVideoMetrics); + } - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->getKinesisVideoStreamMetrics(streamHandle, kinesisVideoStreamMetrics); - } + LEAVES(); +} - LEAVES(); +/** + * Extracts the KinesisVideo client object metrics + */ +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getKinesisVideoStreamMetrics( + JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jobject kinesisVideoStreamMetrics) +{ + ENTERS(); + + DLOGS("Getting Kinesis Video stream metrics."); + CHECK(env != NULL && thiz != NULL); + + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->getKinesisVideoStreamMetrics(streamHandle, kinesisVideoStreamMetrics); } - /** - * Creates and initializes the kinesis video client - */ - PUBLIC_API jlong JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createKinesisVideoStream(JNIEnv* env, jobject thiz, jlong handle, jobject streamInfo) - { - ENTER(); - jlong streamHandle = INVALID_STREAM_HANDLE_VALUE; - - DLOGI("Creating Kinesis Video stream."); - CHECK(env != NULL && thiz != NULL && streamInfo != NULL); - - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - streamHandle = pWrapper->createKinesisVideoStream(streamInfo); - } - - LEAVE(); + LEAVES(); +} - return streamHandle; +/** + * Creates and initializes the kinesis video client + */ +PUBLIC_API jlong JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createKinesisVideoStream( + JNIEnv* env, jobject thiz, jlong handle, jobject streamInfo) +{ + ENTER(); + jlong streamHandle = INVALID_STREAM_HANDLE_VALUE; + + DLOGI("Creating Kinesis Video stream."); + CHECK(env != NULL && thiz != NULL && streamInfo != NULL); + + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + streamHandle = pWrapper->createKinesisVideoStream(streamInfo); } - /** - * Puts a frame in to the frame buffer - */ - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_putKinesisVideoFrame(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jobject kinesisVideoFrame) - { - ENTERS(); + LEAVE(); - DLOGS("Putting Kinesis Video frame for stream 0x%016" PRIx64 ".", streamHandle); - CHECK(env != NULL && thiz != NULL && kinesisVideoFrame != NULL); + return streamHandle; +} - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->putKinesisVideoFrame(streamHandle, kinesisVideoFrame); - } - - LEAVES(); +/** + * Puts a frame in to the frame buffer + */ +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_putKinesisVideoFrame( + JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jobject kinesisVideoFrame) +{ + ENTERS(); + + DLOGS("Putting Kinesis Video frame for stream 0x%016" PRIx64 ".", streamHandle); + CHECK(env != NULL && thiz != NULL && kinesisVideoFrame != NULL); + + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->putKinesisVideoFrame(streamHandle, kinesisVideoFrame); } - /** - * Puts a metadata in to the frame buffer - */ - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_putKinesisVideoFragmentMetadata(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jstring metadataName, jstring metadataValue, jboolean persistent) - { - ENTERS(); - - DLOGS("Putting Kinesis Video metadata for stream 0x%016" PRIx64 ".", streamHandle); - CHECK(env != NULL && thiz != NULL); + LEAVES(); +} - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->putKinesisVideoFragmentMetadata(streamHandle, metadataName, metadataValue, persistent); - } - - LEAVES(); +/** + * Puts a metadata in to the frame buffer + */ +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_putKinesisVideoFragmentMetadata( + JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jstring metadataName, jstring metadataValue, jboolean persistent) +{ + ENTERS(); + + DLOGS("Putting Kinesis Video metadata for stream 0x%016" PRIx64 ".", streamHandle); + CHECK(env != NULL && thiz != NULL); + + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->putKinesisVideoFragmentMetadata(streamHandle, metadataName, metadataValue, persistent); } - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamFragmentAck(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jlong uploadHandle, jobject fragmentAck) - { - ENTERS(); + LEAVES(); +} - DLOGS("Reporting Kinesis Video fragment ack for stream 0x%016" PRIx64 ".", streamHandle); - CHECK(env != NULL && thiz != NULL && fragmentAck != NULL); +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamFragmentAck( + JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jlong uploadHandle, jobject fragmentAck) +{ + ENTERS(); - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->kinesisVideoStreamFragmentAck(streamHandle, uploadHandle, fragmentAck); - } + DLOGS("Reporting Kinesis Video fragment ack for stream 0x%016" PRIx64 ".", streamHandle); + CHECK(env != NULL && thiz != NULL && fragmentAck != NULL); - LEAVES(); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->kinesisVideoStreamFragmentAck(streamHandle, uploadHandle, fragmentAck); } - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamParseFragmentAck(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jlong uploadHandle, jstring ack) - { - ENTERS(); + LEAVES(); +} - DLOGS("Parsing Kinesis Video fragment ack for stream 0x%016" PRIx64 ".", streamHandle); - CHECK(env != NULL && thiz != NULL && ack != NULL); +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamParseFragmentAck( + JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jlong uploadHandle, jstring ack) +{ + ENTERS(); - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->kinesisVideoStreamParseFragmentAck(streamHandle, uploadHandle, ack); - } + DLOGS("Parsing Kinesis Video fragment ack for stream 0x%016" PRIx64 ".", streamHandle); + CHECK(env != NULL && thiz != NULL && ack != NULL); - LEAVES(); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->kinesisVideoStreamParseFragmentAck(streamHandle, uploadHandle, ack); } - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_describeStreamResultEvent(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jint httpStatusCode, jobject streamDescription) - { - ENTER(); + LEAVES(); +} - DLOGI("Describe stream event for handle 0x%016" PRIx64 ".", (UINT64) handle); - CHECK(env != NULL && thiz != NULL); +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_describeStreamResultEvent( + JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jint httpStatusCode, jobject streamDescription) +{ + ENTER(); - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->describeStreamResult(streamHandle, httpStatusCode, streamDescription); - } + DLOGI("Describe stream event for handle 0x%016" PRIx64 ".", (UINT64) handle); + CHECK(env != NULL && thiz != NULL); - LEAVE(); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->describeStreamResult(streamHandle, httpStatusCode, streamDescription); } - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getStreamingEndpointResultEvent(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jint httpStatusCode, jstring streamingEndpoint) - { - ENTER(); + LEAVE(); +} - DLOGI("get streaming endpoint event for handle 0x%016" PRIx64 ".", (UINT64) handle); - CHECK(env != NULL && thiz != NULL); +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getStreamingEndpointResultEvent( + JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jint httpStatusCode, jstring streamingEndpoint) +{ + ENTER(); - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->getStreamingEndpointResult(streamHandle, httpStatusCode, streamingEndpoint); - } + DLOGI("get streaming endpoint event for handle 0x%016" PRIx64 ".", (UINT64) handle); + CHECK(env != NULL && thiz != NULL); - LEAVE(); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->getStreamingEndpointResult(streamHandle, httpStatusCode, streamingEndpoint); } - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getStreamingTokenResultEvent(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jint httpStatusCode, jobject streamingToken, jint tokenSize, jlong tokenExpiration) - { - ENTER(); + LEAVE(); +} - DLOGI("get streaming token event for handle 0x%016" PRIx64 ".", (UINT64) handle); - CHECK(env != NULL && thiz != NULL); +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getStreamingTokenResultEvent( + JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jint httpStatusCode, jobject streamingToken, jint tokenSize, jlong tokenExpiration) +{ + ENTER(); - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->getStreamingTokenResult(streamHandle, httpStatusCode, (jbyteArray) streamingToken, tokenSize, tokenExpiration); - } + DLOGI("get streaming token event for handle 0x%016" PRIx64 ".", (UINT64) handle); + CHECK(env != NULL && thiz != NULL); - LEAVE(); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->getStreamingTokenResult(streamHandle, httpStatusCode, (jbyteArray) streamingToken, tokenSize, tokenExpiration); } - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createStreamResultEvent(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jint httpStatusCode, jstring streamArn) - { - ENTERS(); + LEAVE(); +} - DLOGI("create stream event for handle 0x%016" PRIx64 ".", (UINT64) handle); - CHECK(env != NULL && thiz != NULL); +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createStreamResultEvent( + JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jint httpStatusCode, jstring streamArn) +{ + ENTERS(); - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->createStreamResult(streamHandle, httpStatusCode, streamArn); - } + DLOGI("create stream event for handle 0x%016" PRIx64 ".", (UINT64) handle); + CHECK(env != NULL && thiz != NULL); - LEAVES(); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->createStreamResult(streamHandle, httpStatusCode, streamArn); } - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_putStreamResultEvent(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jint httpStatusCode, jlong clientStreamHandle) - { - ENTER(); + LEAVES(); +} - DLOGI("put stream event for handle 0x%016" PRIx64 ".", (UINT64) handle); - CHECK(env != NULL && thiz != NULL); +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_putStreamResultEvent( + JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jint httpStatusCode, jlong clientStreamHandle) +{ + ENTER(); - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->putStreamResult(streamHandle, httpStatusCode, clientStreamHandle); - } + DLOGI("put stream event for handle 0x%016" PRIx64 ".", (UINT64) handle); + CHECK(env != NULL && thiz != NULL); - LEAVE(); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->putStreamResult(streamHandle, httpStatusCode, clientStreamHandle); } - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_tagResourceResultEvent(JNIEnv* env, jobject thiz, jlong handle, jlong customData, jint httpStatusCode) - { - ENTER(); + LEAVE(); +} - DLOGI("tag resource event for handle 0x%016" PRIx64 ".", (UINT64) handle); - CHECK(env != NULL && thiz != NULL); +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_tagResourceResultEvent( + JNIEnv* env, jobject thiz, jlong handle, jlong customData, jint httpStatusCode) +{ + ENTER(); - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->tagResourceResult(customData, httpStatusCode); - } + DLOGI("tag resource event for handle 0x%016" PRIx64 ".", (UINT64) handle); + CHECK(env != NULL && thiz != NULL); - LEAVE(); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->tagResourceResult(customData, httpStatusCode); } - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getKinesisVideoStreamData(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jlong uploadHandle, jobject dataBuffer, jint offset, jint length, jobject readResult) - { - ENTERS(); - jint retStatus = STATUS_SUCCESS; + LEAVE(); +} - DLOGS("get kinesis video stream data event for handle 0x%016" PRIx64 ".", (UINT64) handle); - CHECK(env != NULL && thiz != NULL); +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getKinesisVideoStreamData( + JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jlong uploadHandle, jobject dataBuffer, jint offset, jint length, jobject readResult) +{ + ENTERS(); + jint retStatus = STATUS_SUCCESS; - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->getKinesisVideoStreamData(streamHandle, uploadHandle, dataBuffer, offset, length, readResult); - } + DLOGS("get kinesis video stream data event for handle 0x%016" PRIx64 ".", (UINT64) handle); + CHECK(env != NULL && thiz != NULL); - LEAVES(); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->getKinesisVideoStreamData(streamHandle, uploadHandle, dataBuffer, offset, length, readResult); } - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamFormatChanged(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jobject codecPrivateData, jlong trackId) - { - ENTER(); + LEAVES(); +} - DLOGI("stream format changed event for handle 0x%016" PRIx64 ".", (UINT64) handle); - CHECK(env != NULL && thiz != NULL); +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamFormatChanged( + JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jobject codecPrivateData, jlong trackId) +{ + ENTER(); - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->streamFormatChanged(streamHandle, codecPrivateData, trackId); - } + DLOGI("stream format changed event for handle 0x%016" PRIx64 ".", (UINT64) handle); + CHECK(env != NULL && thiz != NULL); - LEAVE(); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->streamFormatChanged(streamHandle, codecPrivateData, trackId); } - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createDeviceResultEvent(JNIEnv* env, jobject thiz, jlong handle, jlong deviceHandle, jint httpStatusCode, jstring deviceArn) - { - ENTER(); + LEAVE(); +} - DLOGI("create device event for handle 0x%016" PRIx64 ".", (UINT64) handle); - CHECK(env != NULL && thiz != NULL); +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createDeviceResultEvent( + JNIEnv* env, jobject thiz, jlong handle, jlong deviceHandle, jint httpStatusCode, jstring deviceArn) +{ + ENTER(); - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->createDeviceResult(deviceHandle, httpStatusCode, deviceArn); - } + DLOGI("create device event for handle 0x%016" PRIx64 ".", (UINT64) handle); + CHECK(env != NULL && thiz != NULL); - LEAVE(); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->createDeviceResult(deviceHandle, httpStatusCode, deviceArn); } - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_deviceCertToTokenResultEvent(JNIEnv* env, jobject thiz, jlong handle, jlong deviceHandle, jint httpStatusCode, jobject token, jint tokenSize, jlong tokenExpiration) - { - ENTER(); + LEAVE(); +} - DLOGI("device cert to token event for handle 0x%016" PRIx64 ".", (UINT64) handle); - CHECK(env != NULL && thiz != NULL); +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_deviceCertToTokenResultEvent( + JNIEnv* env, jobject thiz, jlong handle, jlong deviceHandle, jint httpStatusCode, jobject token, jint tokenSize, jlong tokenExpiration) +{ + ENTER(); - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->deviceCertToTokenResult(deviceHandle, httpStatusCode, (jbyteArray) token, tokenSize, tokenExpiration); - } + DLOGI("device cert to token event for handle 0x%016" PRIx64 ".", (UINT64) handle); + CHECK(env != NULL && thiz != NULL); - LEAVE(); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->deviceCertToTokenResult(deviceHandle, httpStatusCode, (jbyteArray) token, tokenSize, tokenExpiration); } - PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamTerminated(JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jlong uploadHandle, jint httpStatusCode) - { - ENTER(); + LEAVE(); +} - DLOGI("Stream terminated event for handle 0x%016" PRIx64 ".", (UINT64) handle); - CHECK(env != NULL && thiz != NULL); +PUBLIC_API void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamTerminated( + JNIEnv* env, jobject thiz, jlong handle, jlong streamHandle, jlong uploadHandle, jint httpStatusCode) +{ + ENTER(); - KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); - if (pWrapper != NULL) { - SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); - pWrapper->kinesisVideoStreamTerminated(streamHandle, uploadHandle, httpStatusCode); - } + DLOGI("Stream terminated event for handle 0x%016" PRIx64 ".", (UINT64) handle); + CHECK(env != NULL && thiz != NULL); - LEAVE(); + KinesisVideoClientWrapper* pWrapper = FROM_WRAPPER_HANDLE(handle); + if (pWrapper != NULL) { + SyncMutex::Autolock l(pWrapper->getSyncLock(), __FUNCTION__); + pWrapper->kinesisVideoStreamTerminated(streamHandle, uploadHandle, httpStatusCode); } + LEAVE(); +} + #ifdef __cplusplus } // End extern "C" #endif diff --git a/src/JNI/com/amazonaws/kinesis/video/producer/jni/Parameters.cpp b/src/JNI/com/amazonaws/kinesis/video/producer/jni/Parameters.cpp index 515122e5..30b2e3d7 100644 --- a/src/JNI/com/amazonaws/kinesis/video/producer/jni/Parameters.cpp +++ b/src/JNI/com/amazonaws/kinesis/video/producer/jni/Parameters.cpp @@ -5,11 +5,11 @@ #include "com/amazonaws/kinesis/video/producer/jni/KinesisVideoClientWrapper.h" -BOOL setDeviceInfo(JNIEnv *env, jobject deviceInfo, PDeviceInfo pDeviceInfo) +BOOL setDeviceInfo(JNIEnv* env, jobject deviceInfo, PDeviceInfo pDeviceInfo) { STATUS retStatus = STATUS_SUCCESS; jmethodID methodId = NULL; - const char *retChars; + const char* retChars; CHECK(env != NULL && deviceInfo != NULL && pDeviceInfo != NULL); @@ -154,10 +154,11 @@ BOOL setDeviceInfo(JNIEnv *env, jobject deviceInfo, PDeviceInfo pDeviceInfo) return STATUS_FAILED(retStatus) ? FALSE : TRUE; } -BOOL setClientInfo(JNIEnv *env, jobject clientInfo, PClientInfo pClientInfo) { +BOOL setClientInfo(JNIEnv* env, jobject clientInfo, PClientInfo pClientInfo) +{ STATUS retStatus = STATUS_SUCCESS; jmethodID methodId = NULL; - const char *retChars; + const char* retChars; CHECK(env != NULL && clientInfo != NULL && pClientInfo != NULL); @@ -253,11 +254,10 @@ BOOL setClientInfo(JNIEnv *env, jobject clientInfo, PClientInfo pClientInfo) { return STATUS_FAILED(retStatus) ? FALSE : TRUE; } - -BOOL setTags(JNIEnv *env, jobjectArray tagArray, PTag* ppTags, PUINT32 pTagCount) +BOOL setTags(JNIEnv* env, jobjectArray tagArray, PTag* ppTags, PUINT32 pTagCount) { STATUS retStatus = STATUS_SUCCESS; - const char *retChars; + const char* retChars; jclass tagClass = NULL; jobject tagObj = NULL; jstring retString = NULL; @@ -277,11 +277,12 @@ BOOL setTags(JNIEnv *env, jobjectArray tagArray, PTag* ppTags, PUINT32 pTagCount // Allocate enough memory. // NOTE: We need to add two NULL terminators for tag name and tag value - CHK(NULL != (pTags = (PTag) MEMCALLOC(tagCount, SIZEOF(Tag) + (MAX_TAG_NAME_LEN + MAX_TAG_VALUE_LEN + 2) * SIZEOF(CHAR))), STATUS_NOT_ENOUGH_MEMORY); + CHK(NULL != (pTags = (PTag) MEMCALLOC(tagCount, SIZEOF(Tag) + (MAX_TAG_NAME_LEN + MAX_TAG_VALUE_LEN + 2) * SIZEOF(CHAR))), + STATUS_NOT_ENOUGH_MEMORY); // Iterate over and set the values. NOTE: the actual storage for the strings will follow the array pCurPtr = (PCHAR) (pTags + tagCount); - for (;i < tagCount; i++) { + for (; i < tagCount; i++) { CHK(NULL != (tagObj = env->GetObjectArrayElement(tagArray, (jsize) i)), STATUS_INVALID_ARG); CHK_JVM_EXCEPTION(env); @@ -355,7 +356,7 @@ BOOL setStreamInfo(JNIEnv* env, jobject streamInfo, PStreamInfo pStreamInfo) jbyte* bufferPtr = NULL; jsize arrayLen = 0; UINT32 trackInfoCount = 0; - const char *retChars; + const char* retChars; CHECK(env != NULL && streamInfo != NULL && pStreamInfo != NULL); @@ -461,7 +462,6 @@ BOOL setStreamInfo(JNIEnv* env, jobject streamInfo, PStreamInfo pStreamInfo) CHK_JVM_EXCEPTION(env); } - methodId = env->GetMethodID(cls, "getMaxLatency", "()J"); if (methodId == NULL) { DLOGW("Couldn't find method id getMaxLatency"); @@ -581,7 +581,7 @@ BOOL setStreamInfo(JNIEnv* env, jobject streamInfo, PStreamInfo pStreamInfo) if (methodId == NULL) { DLOGW("Couldn't find method id getTrackInfoVersion"); } else { - for(UINT32 i = 0; i < trackInfoCount; ++i) { + for (UINT32 i = 0; i < trackInfoCount; ++i) { pStreamInfo->streamCaps.trackInfoList[i].version = (UINT64) env->CallIntMethod(streamInfo, methodId, i); CHK_JVM_EXCEPTION(env); } @@ -591,7 +591,7 @@ BOOL setStreamInfo(JNIEnv* env, jobject streamInfo, PStreamInfo pStreamInfo) if (methodId == NULL) { DLOGW("Couldn't find method id getTrackName"); } else { - for(UINT32 i = 0; i < trackInfoCount; ++i) { + for (UINT32 i = 0; i < trackInfoCount; ++i) { jstring retString = (jstring) env->CallObjectMethod(streamInfo, methodId, i); CHK_JVM_EXCEPTION(env); @@ -610,7 +610,7 @@ BOOL setStreamInfo(JNIEnv* env, jobject streamInfo, PStreamInfo pStreamInfo) if (methodId == NULL) { DLOGW("Couldn't find method id getCodecId"); } else { - for(UINT32 i = 0; i < trackInfoCount; ++i) { + for (UINT32 i = 0; i < trackInfoCount; ++i) { jstring retString = (jstring) env->CallObjectMethod(streamInfo, methodId, i); CHK_JVM_EXCEPTION(env); @@ -629,7 +629,7 @@ BOOL setStreamInfo(JNIEnv* env, jobject streamInfo, PStreamInfo pStreamInfo) if (methodId == NULL) { DLOGW("Couldn't find method id getCodecPrivateData"); } else { - for(UINT32 i = 0; i < trackInfoCount; ++i) { + for (UINT32 i = 0; i < trackInfoCount; ++i) { byteArray = (jbyteArray) env->CallObjectMethod(streamInfo, methodId, i); CHK_JVM_EXCEPTION(env); @@ -659,7 +659,7 @@ BOOL setStreamInfo(JNIEnv* env, jobject streamInfo, PStreamInfo pStreamInfo) if (methodId == NULL) { DLOGW("Couldn't find method id getTrackInfoType"); } else { - for(UINT32 i = 0; i < trackInfoCount; ++i) { + for (UINT32 i = 0; i < trackInfoCount; ++i) { pStreamInfo->streamCaps.trackInfoList[i].trackType = (MKV_TRACK_INFO_TYPE) env->CallIntMethod(streamInfo, methodId, i); CHK_JVM_EXCEPTION(env); } @@ -669,7 +669,7 @@ BOOL setStreamInfo(JNIEnv* env, jobject streamInfo, PStreamInfo pStreamInfo) if (methodId == NULL) { DLOGW("Couldn't find method id getTrackId"); } else { - for(UINT32 i = 0; i < trackInfoCount; ++i) { + for (UINT32 i = 0; i < trackInfoCount; ++i) { pStreamInfo->streamCaps.trackInfoList[i].trackId = (UINT64) env->CallLongMethod(streamInfo, methodId, i); CHK_JVM_EXCEPTION(env); } @@ -920,7 +920,7 @@ BOOL setStreamDescription(JNIEnv* env, jobject streamDescription, PStreamDescrip { STATUS retStatus = STATUS_SUCCESS; jmethodID methodId = NULL; - const char *retChars; + const char* retChars; CHECK(env != NULL && streamDescription != NULL && pStreamDesc != NULL); @@ -1074,7 +1074,7 @@ BOOL setStreamingEndpoint(JNIEnv* env, jstring streamingEndpoint, PCHAR pEndpoin { CHECK(env != NULL && streamingEndpoint != NULL && pEndpoint != NULL); - const char *endpointChars = env->GetStringUTFChars(streamingEndpoint, NULL); + const char* endpointChars = env->GetStringUTFChars(streamingEndpoint, NULL); STRNCPY(pEndpoint, endpointChars, MAX_URI_CHAR_LEN + 1); pEndpoint[MAX_URI_CHAR_LEN] = '\0'; env->ReleaseStringUTFChars(streamingEndpoint, endpointChars); diff --git a/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/JNICommon.h b/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/JNICommon.h index ce6cf70b..3d254d96 100644 --- a/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/JNICommon.h +++ b/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/JNICommon.h @@ -5,15 +5,14 @@ #ifndef __JNICOMMON_H__ #define __JNICOMMON_H__ -#include // Basic native API +#include // Basic native API #include #define EXCEPTION_NAME "com/amazonaws/kinesisvideo/producer/ProducerException" inline void throwNativeException(JNIEnv* env, const char* name, const char* msg, STATUS status) { - if (env->ExceptionCheck()) - { + if (env->ExceptionCheck()) { env->ExceptionClear(); // Discard pending exception (should never happen) DLOGW("Had to clear a pending exception found when throwing \"%s\" (code 0x%x)", msg, status); } @@ -34,8 +33,7 @@ inline void throwNativeException(JNIEnv* env, const char* name, const char* msg, const jobject exception = env->NewObject(exceptionClass, constructor, msgString, intStatus); CHECK(exception != NULL); - if (env->Throw(jthrowable(exception)) != JNI_OK) - { + if (env->Throw(jthrowable(exception)) != JNI_OK) { DLOGE("Failed throwing %s: %s (status 0x%x)", name, msg, status); } diff --git a/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/KinesisVideoClientWrapper.h b/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/KinesisVideoClientWrapper.h index cbe27bfe..47353bbe 100644 --- a/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/KinesisVideoClientWrapper.h +++ b/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/KinesisVideoClientWrapper.h @@ -6,55 +6,54 @@ #pragma once -#define HAVE_PTHREADS 1 // Makes threads.h use pthreads +#define HAVE_PTHREADS 1 // Makes threads.h use pthreads #include "SyncMutex.h" #include "TimedSemaphore.h" #include "JNICommon.h" #include "Parameters.h" -#define TO_WRAPPER_HANDLE(p) ((jlong) (p)) -#define FROM_WRAPPER_HANDLE(h) ((KinesisVideoClientWrapper*) (h)) +#define TO_WRAPPER_HANDLE(p) ((jlong) (p)) +#define FROM_WRAPPER_HANDLE(h) ((KinesisVideoClientWrapper*) (h)) #define JNI_VER JNI_VERSION_1_6 -#define CHK_JVM_EXCEPTION(env) \ - do { \ - /* If there was an exception we need */ \ - if (env->ExceptionCheck() == JNI_TRUE) { \ - /* Print out the message to the stderr */ \ - env->ExceptionDescribe(); \ - /* Clear the exception */ \ - env->ExceptionClear(); \ - /* Terminate the process as we didn't expect any exceptions */ \ - DLOGE("JVM threw an unexpected exception."); \ - retStatus = STATUS_INVALID_OPERATION; \ - goto CleanUp; \ - } \ +#define CHK_JVM_EXCEPTION(env) \ + do { \ + /* If there was an exception we need */ \ + if (env->ExceptionCheck() == JNI_TRUE) { \ + /* Print out the message to the stderr */ \ + env->ExceptionDescribe(); \ + /* Clear the exception */ \ + env->ExceptionClear(); \ + /* Terminate the process as we didn't expect any exceptions */ \ + DLOGE("JVM threw an unexpected exception."); \ + retStatus = STATUS_INVALID_OPERATION; \ + goto CleanUp; \ + } \ } while (FALSE) #ifdef ANDROID_BUILD -#define ATTACH_CURRENT_THREAD_TO_JVM(env) \ - do { \ - if (pWrapper->mJvm->AttachCurrentThread(&env, NULL) != 0) { \ - DLOGE("Fail to attache to JVM!");\ - return STATUS_INVALID_OPERATION; \ - } \ +#define ATTACH_CURRENT_THREAD_TO_JVM(env) \ + do { \ + if (pWrapper->mJvm->AttachCurrentThread(&env, NULL) != 0) { \ + DLOGE("Fail to attache to JVM!"); \ + return STATUS_INVALID_OPERATION; \ + } \ } while (FALSE) #else -#define ATTACH_CURRENT_THREAD_TO_JVM(env) \ - do { \ - if (pWrapper->mJvm->AttachCurrentThread((PVOID*) &env, NULL) != 0) { \ - DLOGE("Fail to attache to JVM!");\ - return STATUS_INVALID_OPERATION; \ - } \ +#define ATTACH_CURRENT_THREAD_TO_JVM(env) \ + do { \ + if (pWrapper->mJvm->AttachCurrentThread((PVOID*) &env, NULL) != 0) { \ + DLOGE("Fail to attache to JVM!"); \ + return STATUS_INVALID_OPERATION; \ + } \ } while (FALSE) #endif -class KinesisVideoClientWrapper -{ +class KinesisVideoClientWrapper { CLIENT_HANDLE mClientHandle; - static JavaVM *mJvm; // scope revised to static to make it accessible from static function- logPrintFunc + static JavaVM* mJvm; // scope revised to static to make it accessible from static function- logPrintFunc static jobject mGlobalJniObjRef; // scope revised to static to make it accessible from static function- logPrintFunc ClientCallbacks mClientCallbacks; DeviceInfo mDeviceInfo; @@ -124,55 +123,23 @@ class KinesisVideoClientWrapper static STATUS broadcastConditionVariableFunc(UINT64, CVAR); static STATUS waitConditionVariableFunc(UINT64, CVAR, MUTEX, UINT64); static VOID freeConditionVariableFunc(UINT64, CVAR); - static STATUS createStreamFunc(UINT64, - PCHAR, - PCHAR, - PCHAR, - PCHAR, - UINT64, - PServiceCallContext); - static STATUS describeStreamFunc(UINT64, - PCHAR, - PServiceCallContext); - static STATUS getStreamingEndpointFunc(UINT64, - PCHAR, - PCHAR, - PServiceCallContext); - static STATUS getStreamingTokenFunc(UINT64, - PCHAR, - STREAM_ACCESS_MODE, - PServiceCallContext); - static STATUS putStreamFunc(UINT64, - PCHAR, - PCHAR, - UINT64, - BOOL, - BOOL, - PCHAR, - PServiceCallContext); - - static STATUS tagResourceFunc(UINT64, - PCHAR, - UINT32, - PTag, - PServiceCallContext); - - static STATUS clientReadyFunc(UINT64, - CLIENT_HANDLE); - - static STATUS createDeviceFunc(UINT64, - PCHAR, - PServiceCallContext); - - static STATUS deviceCertToTokenFunc(UINT64, - PCHAR, - PServiceCallContext); + static STATUS createStreamFunc(UINT64, PCHAR, PCHAR, PCHAR, PCHAR, UINT64, PServiceCallContext); + static STATUS describeStreamFunc(UINT64, PCHAR, PServiceCallContext); + static STATUS getStreamingEndpointFunc(UINT64, PCHAR, PCHAR, PServiceCallContext); + static STATUS getStreamingTokenFunc(UINT64, PCHAR, STREAM_ACCESS_MODE, PServiceCallContext); + static STATUS putStreamFunc(UINT64, PCHAR, PCHAR, UINT64, BOOL, BOOL, PCHAR, PServiceCallContext); + + static STATUS tagResourceFunc(UINT64, PCHAR, UINT32, PTag, PServiceCallContext); + + static STATUS clientReadyFunc(UINT64, CLIENT_HANDLE); + + static STATUS createDeviceFunc(UINT64, PCHAR, PServiceCallContext); + + static STATUS deviceCertToTokenFunc(UINT64, PCHAR, PServiceCallContext); static VOID logPrintFunc(UINT32, PCHAR, PCHAR, ...); -public: - KinesisVideoClientWrapper(JNIEnv* env, - jobject thiz, - jobject deviceInfo); + public: + KinesisVideoClientWrapper(JNIEnv* env, jobject thiz, jobject deviceInfo); ~KinesisVideoClientWrapper(); SyncMutex& getSyncLock(); @@ -199,7 +166,8 @@ class KinesisVideoClientWrapper void deviceCertToTokenResult(jlong clientHandle, jint httpStatusCode, jbyteArray token, jint tokenSize, jlong expiration); void kinesisVideoStreamFragmentAck(jlong streamHandle, jlong uploadHandle, jobject fragmentAck); void kinesisVideoStreamParseFragmentAck(jlong streamHandle, jlong uploadHandle, jstring ack); -private: + + private: BOOL setCallbacks(JNIEnv* env, jobject thiz); }; diff --git a/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/Parameters.h b/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/Parameters.h index 119c6626..c48e5977 100644 --- a/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/Parameters.h +++ b/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/Parameters.h @@ -15,6 +15,6 @@ BOOL setStreamDescription(JNIEnv* env, jobject streamDescription, PStreamDescrip BOOL setStreamingEndpoint(JNIEnv* env, jstring streamingEndpoint, PCHAR pEndpoint); BOOL setStreamDataBuffer(JNIEnv* env, jobject dataBuffer, UINT32 offset, PBYTE* ppBuffer); BOOL releaseStreamDataBuffer(JNIEnv* env, jobject dataBuffer, UINT32 offset, PBYTE pBuffer); -BOOL setTags(JNIEnv *env, jobjectArray tagArray, PTag* ppTags, PUINT32 pTagCount); +BOOL setTags(JNIEnv* env, jobjectArray tagArray, PTag* ppTags, PUINT32 pTagCount); VOID releaseTags(PTag tags); #endif // __KINESIS_VIDEO_PARAMETERS_CONVERSION_H__ diff --git a/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/SyncMutex.h b/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/SyncMutex.h index 2dd0a394..e925a283 100644 --- a/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/SyncMutex.h +++ b/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/SyncMutex.h @@ -11,8 +11,7 @@ static const size_t gMaxMutexDescriptionSize = 100; static const char* const gDefaultMutexDescription = "mutex"; -class SyncMutex -{ +class SyncMutex { // Logging behavior char mMutexDescription[gMaxMutexDescriptionSize]; bool mLogsEnabled; @@ -21,11 +20,10 @@ class SyncMutex MUTEX mMutex; CVAR mCondition; - SyncMutex(const SyncMutex&); // Prevent copies + SyncMutex(const SyncMutex&); // Prevent copies SyncMutex& operator=(const SyncMutex&); // Prevent assignment -public: - + public: SyncMutex() { initialize(); @@ -80,8 +78,7 @@ class SyncMutex // Acquire the mutex. void lock(const char* function) { - if (mLogsEnabled) - { + if (mLogsEnabled) { DLOGI("%s: locking %s", function, mMutexDescription); } @@ -91,8 +88,7 @@ class SyncMutex // Release the mutex. void unlock(const char* function) { - if (mLogsEnabled) - { + if (mLogsEnabled) { DLOGI("%s: unlocking %s", function, mMutexDescription); } @@ -105,8 +101,7 @@ class SyncMutex MUTEX_LOCK(mMutex); UINT64 before = 0; - if (mLogsEnabled) - { + if (mLogsEnabled) { DLOGI("%s: waiting on %s", function, mMutexDescription); UINT64 before = GETTIME(); } @@ -114,8 +109,7 @@ class SyncMutex int status = CVAR_WAIT(mCondition, mMutex, INFINITE_TIME_VALUE); CHECK_EXT(status == 0, "pthread_cond_wait() returned Unix errno %d", status); - if (mLogsEnabled) - { + if (mLogsEnabled) { UINT64 after = GETTIME(); UINT64 elapsed_ms = (after - before) / HUNDREDS_OF_NANOS_IN_A_MILLISECOND; DLOGI("%s: waited %ldms for %s", function, elapsed_ms, mMutexDescription); @@ -127,8 +121,7 @@ class SyncMutex // Signal the condition variable, allowing all blocked threads to continue. void notifyAll(const char* function) { - if (mLogsEnabled) - { + if (mLogsEnabled) { DLOGI("%s: signalling %s", function, mMutexDescription); } @@ -142,16 +135,23 @@ class SyncMutex * exited (goto, return, etc). */ class Autolock { - SyncMutex& mLock; const char* mFunction; - public: - Autolock(SyncMutex& mutex, const char* function) : mLock(mutex), mFunction(function) {mLock.lock(function);} - Autolock(SyncMutex* mutex, const char* function) : mLock(*mutex), mFunction(function) {mLock.lock(function);} - ~Autolock() {mLock.unlock(mFunction);} + public: + Autolock(SyncMutex& mutex, const char* function) : mLock(mutex), mFunction(function) + { + mLock.lock(function); + } + Autolock(SyncMutex* mutex, const char* function) : mLock(*mutex), mFunction(function) + { + mLock.lock(function); + } + ~Autolock() + { + mLock.unlock(mFunction); + } }; - }; #endif // __SYNC_MUTEX_H__ diff --git a/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/TimedSemaphore.h b/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/TimedSemaphore.h index dfda7e97..d25a4182 100644 --- a/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/TimedSemaphore.h +++ b/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/TimedSemaphore.h @@ -6,16 +6,14 @@ #ifndef __TIMED_SEMAPHORE_H__ #define __TIMED_SEMAPHORE_H__ -class TimedSemaphore -{ - TimedSemaphore(const TimedSemaphore&); // Prevent copy construction +class TimedSemaphore { + TimedSemaphore(const TimedSemaphore&); // Prevent copy construction TimedSemaphore& operator=(const TimedSemaphore&); // Prevent assignment MUTEX mMutex; CVAR mCond; UINT32 mCount; -public: - + public: TimedSemaphore() : mCount(0) { mMutex = MUTEX_CREATE(FALSE); @@ -35,10 +33,8 @@ class TimedSemaphore { MUTEX_LOCK(mMutex); - while (mCount <= 0) - { - if (STATUS_FAILED(CVAR_WAIT(mCond, mMutex, INFINITE_TIME_VALUE))) - { + while (mCount <= 0) { + if (STATUS_FAILED(CVAR_WAIT(mCond, mMutex, INFINITE_TIME_VALUE))) { CRASH("Fatal error in semaphore wait"); break; } @@ -57,8 +53,7 @@ class TimedSemaphore MUTEX_LOCK(mMutex); bool sem = (mCount > 0); - if (sem) - { + if (sem) { mCount--; } @@ -78,20 +73,17 @@ class TimedSemaphore // Create default units duration UINT64 duration = relTimeOutMs * HUNDREDS_OF_NANOS_IN_A_MILLISECOND; - while (mCount == 0) - { + while (mCount == 0) { STATUS status = CVAR_WAIT(mCond, mMutex, duration); - if (status == STATUS_OPERATION_TIMED_OUT) - { + if (status == STATUS_OPERATION_TIMED_OUT) { retVal = false; break; } // Any other failure is fatal - if (STATUS_FAILED(status)) - { + if (STATUS_FAILED(status)) { CRASH("Fatal error in timed semaphore wait"); - + // Unreachable - just to keep some static code analysis tools happy break; } diff --git a/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni.h b/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni.h index f4b8120a..42cae5aa 100644 --- a/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni.h +++ b/src/JNI/include/com/amazonaws/kinesis/video/producer/jni/com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni.h @@ -15,200 +15,209 @@ extern "C" { * Method: getNativeLibraryVersion * Signature: ()Ljava/lang/String; */ -JNIEXPORT jstring JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getNativeLibraryVersion - (JNIEnv *, jobject); +JNIEXPORT jstring JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getNativeLibraryVersion(JNIEnv*, + jobject); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: getNativeCodeCompileTime * Signature: ()Ljava/lang/String; */ -JNIEXPORT jstring JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getNativeCodeCompileTime - (JNIEnv *, jobject); +JNIEXPORT jstring JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getNativeCodeCompileTime(JNIEnv*, + jobject); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: createKinesisVideoClient * Signature: (Lcom/amazonaws/kinesisvideo/producer/DeviceInfo;)J */ -JNIEXPORT jlong JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createKinesisVideoClient - (JNIEnv *, jobject, jobject); +JNIEXPORT jlong JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createKinesisVideoClient(JNIEnv*, jobject, + jobject); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: freeKinesisVideoClient * Signature: (J)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_freeKinesisVideoClient - (JNIEnv *, jobject, jlong); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_freeKinesisVideoClient(JNIEnv*, jobject, + jlong); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: stopKinesisVideoStreams * Signature: (J)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_stopKinesisVideoStreams - (JNIEnv *, jobject, jlong); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_stopKinesisVideoStreams(JNIEnv*, jobject, + jlong); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: stopKinesisVideoStream * Signature: (JJ)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_stopKinesisVideoStream -(JNIEnv *, jobject, jlong, jlong); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_stopKinesisVideoStream(JNIEnv*, jobject, + jlong, jlong); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: freeKinesisVideoStream * Signature: (JJ)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_freeKinesisVideoStream - (JNIEnv *, jobject, jlong, jlong); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_freeKinesisVideoStream(JNIEnv*, jobject, + jlong, jlong); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: getKinesisVideoMetrics * Signature: (JLcom/amazonaws/kinesisvideo/internal/producer/KinesisVideoMetrics;)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getKinesisVideoMetrics -(JNIEnv *, jobject, jlong, jobject); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getKinesisVideoMetrics(JNIEnv*, jobject, + jlong, jobject); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: getKinesisVideoStreamMetrics * Signature: (JJLcom/amazonaws/kinesisvideo/internal/producer/KinesisVideoStreamMetrics;)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getKinesisVideoStreamMetrics -(JNIEnv *, jobject, jlong, jlong, jobject); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getKinesisVideoStreamMetrics(JNIEnv*, + jobject, + jlong, jlong, + jobject); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: createKinesisVideoStream * Signature: (JLcom/amazonaws/kinesisvideo/producer/StreamInfo;)J */ -JNIEXPORT jlong JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createKinesisVideoStream - (JNIEnv *, jobject, jlong, jobject); +JNIEXPORT jlong JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createKinesisVideoStream(JNIEnv*, jobject, + jlong, jobject); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: putKinesisVideoFrame * Signature: (JJLcom/amazonaws/kinesisvideo/producer/KinesisVideoFrame;)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_putKinesisVideoFrame - (JNIEnv *, jobject, jlong, jlong, jobject); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_putKinesisVideoFrame(JNIEnv*, jobject, + jlong, jlong, + jobject); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: putKinesisVideoFragmentMetadata * Signature: (JJLjava/lang/String;Ljava/lang/String;Z)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_putKinesisVideoFragmentMetadata - (JNIEnv*, jobject, jlong, jlong, jstring, jstring, jboolean); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_putKinesisVideoFragmentMetadata( + JNIEnv*, jobject, jlong, jlong, jstring, jstring, jboolean); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: kinesisVideoStreamFragmentAck * Signature: (JJJLcom/amazonaws/kinesisvideo/producer/KinesisVideoFragmentAck;)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamFragmentAck - (JNIEnv *, jobject, jlong, jlong, jlong, jobject); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamFragmentAck( + JNIEnv*, jobject, jlong, jlong, jlong, jobject); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: kinesisVideoStreamParseFragmentAck * Signature: (JJJLjava/lang/String;)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamParseFragmentAck -(JNIEnv *, jobject, jlong, jlong, jlong, jstring); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamParseFragmentAck( + JNIEnv*, jobject, jlong, jlong, jlong, jstring); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: describeStreamResultEvent * Signature: (JJILcom/amazonaws/kinesisvideo/producer/StreamDescription;)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_describeStreamResultEvent - (JNIEnv *, jobject, jlong, jlong, jint, jobject); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_describeStreamResultEvent(JNIEnv*, jobject, + jlong, jlong, + jint, jobject); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: getStreamingEndpointResultEvent * Signature: (JJILjava/lang/String;)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getStreamingEndpointResultEvent - (JNIEnv *, jobject, jlong, jlong, jint, jstring); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getStreamingEndpointResultEvent( + JNIEnv*, jobject, jlong, jlong, jint, jstring); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: getStreamingTokenResultEvent * Signature: (JJI[BIJ)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getStreamingTokenResultEvent - (JNIEnv *, jobject, jlong, jlong, jint, jobject, jint, jlong); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getStreamingTokenResultEvent( + JNIEnv*, jobject, jlong, jlong, jint, jobject, jint, jlong); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: createStreamResultEvent * Signature: (JJILjava/lang/String;)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createStreamResultEvent - (JNIEnv *, jobject, jlong, jlong, jint, jstring); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createStreamResultEvent(JNIEnv*, jobject, + jlong, jlong, jint, + jstring); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: putStreamResultEvent * Signature: (JJIJ)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_putStreamResultEvent - (JNIEnv *, jobject, jlong, jlong, jint, jlong); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_putStreamResultEvent(JNIEnv*, jobject, + jlong, jlong, jint, + jlong); /* -* Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni -* Method: tagResourceResultEvent -* Signature: (JJI)V -*/ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_tagResourceResultEvent - (JNIEnv *, jobject, jlong, jlong, jint); + * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni + * Method: tagResourceResultEvent + * Signature: (JJI)V + */ +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_tagResourceResultEvent(JNIEnv*, jobject, + jlong, jlong, jint); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: getKinesisVideoStreamData * Signature: (JJJ[BIILcom/amazonaws/kinesisvideo/internal/producer/ReadResult;)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getKinesisVideoStreamData - (JNIEnv *, jobject, jlong, jlong, jlong, jobject, jint, jint, jobject); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_getKinesisVideoStreamData( + JNIEnv*, jobject, jlong, jlong, jlong, jobject, jint, jint, jobject); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: kinesisVideoStreamFormatChanged * Signature: (JJ[BJ)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamFormatChanged - (JNIEnv *, jobject, jlong, jlong, jobject, jlong); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamFormatChanged( + JNIEnv*, jobject, jlong, jlong, jobject, jlong); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: createDeviceResultEvent * Signature: (JJILjava/lang/String;)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createDeviceResultEvent - (JNIEnv *, jobject, jlong, jlong, jint, jstring); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_createDeviceResultEvent(JNIEnv*, jobject, + jlong, jlong, jint, + jstring); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: deviceCertToTokenResultEvent * Signature: (JJI[BIJ)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_deviceCertToTokenResultEvent - (JNIEnv *, jobject, jlong, jlong, jint, jobject, jint, jlong); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_deviceCertToTokenResultEvent( + JNIEnv*, jobject, jlong, jlong, jint, jobject, jint, jlong); /* * Class: com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni * Method: kinesisVideoStreamTerminated * Signature: (JJJI)V */ -JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamTerminated -(JNIEnv *, jobject, jlong, jlong, jlong, jint); +JNIEXPORT void JNICALL Java_com_amazonaws_kinesisvideo_internal_producer_jni_NativeKinesisVideoProducerJni_kinesisVideoStreamTerminated(JNIEnv*, + jobject, + jlong, jlong, + jlong, jint); #ifdef __cplusplus } diff --git a/src/KinesisVideoProducer.cpp b/src/KinesisVideoProducer.cpp index 69c57236..490ad375 100644 --- a/src/KinesisVideoProducer.cpp +++ b/src/KinesisVideoProducer.cpp @@ -1,7 +1,10 @@ #include "KinesisVideoProducer.h" #include "Logger.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { LOGGER_TAG("com.amazonaws.kinesis.video"); @@ -9,33 +12,22 @@ using std::shared_ptr; using std::stringstream; using std::unique_ptr; -unique_ptr KinesisVideoProducer::create( - unique_ptr device_info_provider, - unique_ptr client_callback_provider, - unique_ptr stream_callback_provider, - unique_ptr credential_provider, - const std::string ®ion, - const std::string &control_plane_uri, - const std::string &user_agent_name) { - - unique_ptr callback_provider(new DefaultCallbackProvider(std::move(client_callback_provider), - std::move(stream_callback_provider), - std::move(credential_provider), - region, - control_plane_uri, - user_agent_name, - device_info_provider->getCustomUserAgent(), - EMPTY_STRING, - false, - DEFAULT_ENDPOINT_CACHE_UPDATE_PERIOD)); +unique_ptr KinesisVideoProducer::create(unique_ptr device_info_provider, + unique_ptr client_callback_provider, + unique_ptr stream_callback_provider, + unique_ptr credential_provider, const std::string& region, + const std::string& control_plane_uri, const std::string& user_agent_name) +{ + unique_ptr callback_provider(new DefaultCallbackProvider( + std::move(client_callback_provider), std::move(stream_callback_provider), std::move(credential_provider), region, control_plane_uri, + user_agent_name, device_info_provider->getCustomUserAgent(), EMPTY_STRING, false, DEFAULT_ENDPOINT_CACHE_UPDATE_PERIOD)); return KinesisVideoProducer::create(std::move(device_info_provider), std::move(callback_provider)); } -unique_ptr KinesisVideoProducer::create( - unique_ptr device_info_provider, - unique_ptr callback_provider) { - +unique_ptr KinesisVideoProducer::create(unique_ptr device_info_provider, + unique_ptr callback_provider) +{ CLIENT_HANDLE client_handle; DeviceInfo device_info = device_info_provider->getDeviceInfo(); @@ -57,60 +49,39 @@ unique_ptr KinesisVideoProducer::create( return kinesis_video_producer; } -unique_ptr KinesisVideoProducer::createSync( - unique_ptr device_info_provider, - unique_ptr client_callback_provider, - unique_ptr stream_callback_provider, - unique_ptr credential_provider, - const std::string ®ion, - const std::string &control_plane_uri, - const std::string &user_agent_name, - bool is_caching_endpoint, - uint64_t caching_update_period) { - - unique_ptr callback_provider(new DefaultCallbackProvider(std::move(client_callback_provider), - std::move(stream_callback_provider), - std::move(credential_provider), - region, - control_plane_uri, - user_agent_name, - device_info_provider->getCustomUserAgent(), - device_info_provider->getCertPath(), - is_caching_endpoint, - caching_update_period)); +unique_ptr KinesisVideoProducer::createSync(unique_ptr device_info_provider, + unique_ptr client_callback_provider, + unique_ptr stream_callback_provider, + unique_ptr credential_provider, const std::string& region, + const std::string& control_plane_uri, const std::string& user_agent_name, + bool is_caching_endpoint, uint64_t caching_update_period) +{ + unique_ptr callback_provider( + new DefaultCallbackProvider(std::move(client_callback_provider), std::move(stream_callback_provider), std::move(credential_provider), region, + control_plane_uri, user_agent_name, device_info_provider->getCustomUserAgent(), + device_info_provider->getCertPath(), is_caching_endpoint, caching_update_period)); return KinesisVideoProducer::createSync(std::move(device_info_provider), std::move(callback_provider)); } -unique_ptr KinesisVideoProducer::createSync( - unique_ptr device_info_provider, - unique_ptr client_callback_provider, - unique_ptr stream_callback_provider, - unique_ptr credential_provider, - API_CALL_CACHE_TYPE api_call_caching, - const std::string ®ion, - const std::string &control_plane_uri, - const std::string &user_agent_name, - uint64_t caching_update_period) { - - unique_ptr callback_provider(new DefaultCallbackProvider(std::move(client_callback_provider), - std::move(stream_callback_provider), - std::move(credential_provider), - region, - control_plane_uri, - user_agent_name, - device_info_provider->getCustomUserAgent(), - device_info_provider->getCertPath(), - api_call_caching, - caching_update_period)); +unique_ptr KinesisVideoProducer::createSync(unique_ptr device_info_provider, + unique_ptr client_callback_provider, + unique_ptr stream_callback_provider, + unique_ptr credential_provider, + API_CALL_CACHE_TYPE api_call_caching, const std::string& region, + const std::string& control_plane_uri, const std::string& user_agent_name, + uint64_t caching_update_period) +{ + unique_ptr callback_provider(new DefaultCallbackProvider( + std::move(client_callback_provider), std::move(stream_callback_provider), std::move(credential_provider), region, control_plane_uri, + user_agent_name, device_info_provider->getCustomUserAgent(), device_info_provider->getCertPath(), api_call_caching, caching_update_period)); return KinesisVideoProducer::createSync(std::move(device_info_provider), std::move(callback_provider)); } -unique_ptr KinesisVideoProducer::createSync( - unique_ptr device_info_provider, - unique_ptr callback_provider) { - +unique_ptr KinesisVideoProducer::createSync(unique_ptr device_info_provider, + unique_ptr callback_provider) +{ CLIENT_HANDLE client_handle; DeviceInfo device_info = device_info_provider->getDeviceInfo(); @@ -132,21 +103,22 @@ unique_ptr KinesisVideoProducer::createSync( return kinesis_video_producer; } -shared_ptr KinesisVideoProducer::createStream(unique_ptr stream_definition) { +shared_ptr KinesisVideoProducer::createStream(unique_ptr stream_definition) +{ assert(stream_definition.get()); if (stream_definition->getTrackCount() > MAX_SUPPORTED_TRACK_COUNT_PER_STREAM) { LOG_AND_THROW("Exceeded maximum track count: " + std::to_string(MAX_SUPPORTED_TRACK_COUNT_PER_STREAM)); } StreamInfo stream_info = stream_definition->getStreamInfo(); - std::shared_ptr kinesis_video_stream(new KinesisVideoStream(*this, stream_definition->getStreamName()), KinesisVideoStream::videoStreamDeleter); + std::shared_ptr kinesis_video_stream(new KinesisVideoStream(*this, stream_definition->getStreamName()), + KinesisVideoStream::videoStreamDeleter); STATUS status = createKinesisVideoStream(client_handle_, &stream_info, kinesis_video_stream->getStreamHandle()); if (STATUS_FAILED(status)) { stringstream status_strstrm; status_strstrm << std::hex << status; - LOG_AND_THROW(" Unable to create Kinesis Video stream. " + stream_definition->getStreamName() + - " Error status: 0x" + status_strstrm.str()); + LOG_AND_THROW(" Unable to create Kinesis Video stream. " + stream_definition->getStreamName() + " Error status: 0x" + status_strstrm.str()); } // Add to the map @@ -155,22 +127,22 @@ shared_ptr KinesisVideoProducer::createStream(unique_ptr KinesisVideoProducer::createStreamSync(unique_ptr stream_definition) { - +shared_ptr KinesisVideoProducer::createStreamSync(unique_ptr stream_definition) +{ assert(stream_definition.get()); if (stream_definition->getTrackCount() > MAX_SUPPORTED_TRACK_COUNT_PER_STREAM) { LOG_AND_THROW("Exceeded maximum track count: " + std::to_string(MAX_SUPPORTED_TRACK_COUNT_PER_STREAM)); } StreamInfo stream_info = stream_definition->getStreamInfo(); - std::shared_ptr kinesis_video_stream(new KinesisVideoStream(*this, stream_definition->getStreamName()), KinesisVideoStream::videoStreamDeleter); + std::shared_ptr kinesis_video_stream(new KinesisVideoStream(*this, stream_definition->getStreamName()), + KinesisVideoStream::videoStreamDeleter); STATUS status = createKinesisVideoStreamSync(client_handle_, &stream_info, kinesis_video_stream->getStreamHandle()); if (STATUS_FAILED(status)) { stringstream status_strstrm; status_strstrm << std::hex << status; - LOG_AND_THROW(" Unable to create Kinesis Video stream. " + stream_definition->getStreamName() + - " Error status: 0x" + status_strstrm.str()); + LOG_AND_THROW(" Unable to create Kinesis Video stream. " + stream_definition->getStreamName() + " Error status: 0x" + status_strstrm.str()); } // Add to the map @@ -179,7 +151,8 @@ shared_ptr KinesisVideoProducer::createStreamSync(unique_ptr return kinesis_video_stream; } -void KinesisVideoProducer::freeStream(std::shared_ptr kinesis_video_stream) { +void KinesisVideoProducer::freeStream(std::shared_ptr kinesis_video_stream) +{ if (nullptr == kinesis_video_stream) { LOG_AND_THROW("Kinesis Video stream can't be null"); } @@ -197,7 +170,8 @@ void KinesisVideoProducer::freeStream(std::shared_ptr kinesi active_streams_.remove(stream_handle); } -void KinesisVideoProducer::freeStreams() { +void KinesisVideoProducer::freeStreams() +{ { std::lock_guard lock(free_client_mutex_); auto num_streams = active_streams_.getMap().size(); @@ -207,15 +181,15 @@ void KinesisVideoProducer::freeStreams() { try { freeStream(stream); LOG_INFO("Completed freeing stream " << stream->stream_name_); - } catch (std::runtime_error &err) { + } catch (std::runtime_error& err) { LOG_ERROR("Failed to free stream " << stream->stream_name_ << ". Error: " << err.what()); } - } } } -KinesisVideoProducer::~KinesisVideoProducer() { +KinesisVideoProducer::~KinesisVideoProducer() +{ // Free the streams freeStreams(); @@ -224,7 +198,8 @@ KinesisVideoProducer::~KinesisVideoProducer() { LOG_INFO("Completed freeing client"); } -KinesisVideoProducerMetrics KinesisVideoProducer::getMetrics() const { +KinesisVideoProducerMetrics KinesisVideoProducer::getMetrics() const +{ STATUS status = ::getKinesisVideoMetrics(client_handle_, (PClientMetrics) client_metrics_.getRawMetrics()); LOG_AND_THROW_IF(STATUS_FAILED(status), "Failed to get producer client metrics with: " << status); return client_metrics_; diff --git a/src/KinesisVideoProducer.h b/src/KinesisVideoProducer.h index da10236f..54d3b4f2 100644 --- a/src/KinesisVideoProducer.h +++ b/src/KinesisVideoProducer.h @@ -23,7 +23,10 @@ #include "com/amazonaws/kinesis/video/client/Include.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { /** * Client ready timeout duration. @@ -40,64 +43,56 @@ namespace com { namespace amazonaws { namespace kinesis { namespace video { * We will add extra 10 milliseconds to account for thread scheduling to ensure the callback is complete. */ #define CLIENT_STREAM_CLOSED_CALLBACK_AWAIT_TIME_MILLIS (10 + TIMEOUT_AFTER_STREAM_STOPPED + TIMEOUT_WAIT_FOR_CURL_BUFFER) -#define CONTROL_PLANE_URI_ENV_VAR ((PCHAR) "CONTROL_PLANE_URI") +#define CONTROL_PLANE_URI_ENV_VAR ((PCHAR) "CONTROL_PLANE_URI") /** -* Kinesis Video client interface for real time streaming. The structure of this class is that each instance of type -* is a singleton where T is the implementation of the DeviceInfoProvider interface and U is the implementation of the -* CallbackProvider interface. The reason for using the singleton is that Kinesis Video client has logic that benefits -* throughput and network congestion avoidance by sharing state across multiple streams. We balance this benefit with -* flexibility in the implementation of the application logic that provide the device information and Kinesis Video -* user-implemented application callbacks. The division by type allows for easy migrations to new callback -* implementations, such as different network thread implementations or different handling of latency pressure by the -* application without perturbing the existing system. -* -* Example Usage: -* @code: -* auto client(KinesisVideoClient::getInstance()); -* @endcode -* -*/ + * Kinesis Video client interface for real time streaming. The structure of this class is that each instance of type + * is a singleton where T is the implementation of the DeviceInfoProvider interface and U is the implementation of the + * CallbackProvider interface. The reason for using the singleton is that Kinesis Video client has logic that benefits + * throughput and network congestion avoidance by sharing state across multiple streams. We balance this benefit with + * flexibility in the implementation of the application logic that provide the device information and Kinesis Video + * user-implemented application callbacks. The division by type allows for easy migrations to new callback + * implementations, such as different network thread implementations or different handling of latency pressure by the + * application without perturbing the existing system. + * + * Example Usage: + * @code: + * auto client(KinesisVideoClient::getInstance()); + * @endcode + * + */ class KinesisVideoStream; class KinesisVideoProducer { -public: - static std::unique_ptr create( - std::unique_ptr device_info_provider, - std::unique_ptr client_callback_provider, - std::unique_ptr stream_callback_provider, - std::unique_ptr credential_provider, - const std::string ®ion = DEFAULT_AWS_REGION, - const std::string &control_plane_uri = "", - const std::string &user_agent_name = DEFAULT_USER_AGENT_NAME); - - static std::unique_ptr create( - std::unique_ptr device_info_provider, - std::unique_ptr callback_provider); - - static std::unique_ptr createSync( - std::unique_ptr device_info_provider, - std::unique_ptr client_callback_provider, - std::unique_ptr stream_callback_provider, - std::unique_ptr credential_provider, - const std::string ®ion = DEFAULT_AWS_REGION, - const std::string &control_plane_uri = "", - const std::string &user_agent_name = DEFAULT_USER_AGENT_NAME, - bool is_caching_endpoint = false, - uint64_t caching_update_period = DEFAULT_ENDPOINT_CACHE_UPDATE_PERIOD); - - static std::unique_ptr createSync( - std::unique_ptr device_info_provider, - std::unique_ptr client_callback_provider, - std::unique_ptr stream_callback_provider, - std::unique_ptr credential_provider, - API_CALL_CACHE_TYPE api_call_caching = API_CALL_CACHE_TYPE_ALL, - const std::string ®ion = DEFAULT_AWS_REGION, - const std::string &control_plane_uri = "", - const std::string &user_agent_name = DEFAULT_USER_AGENT_NAME, - uint64_t caching_update_period = DEFAULT_ENDPOINT_CACHE_UPDATE_PERIOD); - - static std::unique_ptr createSync( - std::unique_ptr device_info_provider, - std::unique_ptr callback_provider); + public: + static std::unique_ptr create(std::unique_ptr device_info_provider, + std::unique_ptr client_callback_provider, + std::unique_ptr stream_callback_provider, + std::unique_ptr credential_provider, + const std::string& region = DEFAULT_AWS_REGION, const std::string& control_plane_uri = "", + const std::string& user_agent_name = DEFAULT_USER_AGENT_NAME); + + static std::unique_ptr create(std::unique_ptr device_info_provider, + std::unique_ptr callback_provider); + + static std::unique_ptr createSync(std::unique_ptr device_info_provider, + std::unique_ptr client_callback_provider, + std::unique_ptr stream_callback_provider, + std::unique_ptr credential_provider, + const std::string& region = DEFAULT_AWS_REGION, const std::string& control_plane_uri = "", + const std::string& user_agent_name = DEFAULT_USER_AGENT_NAME, + bool is_caching_endpoint = false, + uint64_t caching_update_period = DEFAULT_ENDPOINT_CACHE_UPDATE_PERIOD); + + static std::unique_ptr createSync(std::unique_ptr device_info_provider, + std::unique_ptr client_callback_provider, + std::unique_ptr stream_callback_provider, + std::unique_ptr credential_provider, + API_CALL_CACHE_TYPE api_call_caching = API_CALL_CACHE_TYPE_ALL, + const std::string& region = DEFAULT_AWS_REGION, const std::string& control_plane_uri = "", + const std::string& user_agent_name = DEFAULT_USER_AGENT_NAME, + uint64_t caching_update_period = DEFAULT_ENDPOINT_CACHE_UPDATE_PERIOD); + + static std::unique_ptr createSync(std::unique_ptr device_info_provider, + std::unique_ptr callback_provider); virtual ~KinesisVideoProducer(); @@ -144,16 +139,17 @@ class KinesisVideoProducer { /** * Returns the raw client handle */ - CLIENT_HANDLE getClientHandle() const { + CLIENT_HANDLE getClientHandle() const + { return client_handle_; } -protected: - + protected: /** * Frees the resources in the underlying Kinesis Video client. */ - void freeKinesisVideoClient() { + void freeKinesisVideoClient() + { if (nullptr != callback_provider_) { callback_provider_->shutdown(); } @@ -164,7 +160,8 @@ class KinesisVideoProducer { /** * Initializes an empty class. The real initialization happens through the static functions. */ - KinesisVideoProducer() : client_handle_(INVALID_CLIENT_HANDLE_VALUE) { + KinesisVideoProducer() : client_handle_(INVALID_CLIENT_HANDLE_VALUE) + { } /** diff --git a/src/KinesisVideoProducerMetrics.h b/src/KinesisVideoProducerMetrics.h index 57824971..f1528b6c 100644 --- a/src/KinesisVideoProducerMetrics.h +++ b/src/KinesisVideoProducerMetrics.h @@ -4,19 +4,21 @@ #include "com/amazonaws/kinesis/video/client/Include.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { /** -* Wraps around the client metrics class -*/ + * Wraps around the client metrics class + */ class KinesisVideoProducerMetrics { - -public: - + public: /** * Default constructor */ - KinesisVideoProducerMetrics() { + KinesisVideoProducerMetrics() + { memset(&client_metrics_, 0x00, sizeof(::ClientMetrics)); client_metrics_.version = CLIENT_METRICS_CURRENT_VERSION; } @@ -24,57 +26,65 @@ class KinesisVideoProducerMetrics { /** * Returns the overall content store size in bytes */ - uint64_t getContentStoreSizeSize() const { + uint64_t getContentStoreSizeSize() const + { return client_metrics_.contentStoreSize; } /** * Returns the content store available size in bytes */ - uint64_t getContentStoreAvailableSize() const { + uint64_t getContentStoreAvailableSize() const + { return client_metrics_.contentStoreAvailableSize; } /** * Returns the content store allocated size in bytes */ - uint64_t getContentStoreAllocatedSize() const { + uint64_t getContentStoreAllocatedSize() const + { return client_metrics_.contentStoreAllocatedSize; } /** * Returns the size in bytes allocated for all content view object for all streams */ - uint64_t getTotalContentViewsSize() const { + uint64_t getTotalContentViewsSize() const + { return client_metrics_.totalContentViewsSize; } /** * Returns the observed frame rate in frames per second for all streams */ - uint64_t getTotalFrameRate() const { + uint64_t getTotalFrameRate() const + { return client_metrics_.totalFrameRate; } /** * Returns the elementary frame rate in frames per second for all streams */ - uint64_t getTotalElementaryFrameRate() const { + uint64_t getTotalElementaryFrameRate() const + { return client_metrics_.totalElementaryFrameRate; } /** * Returns the observed transfer rate in bytes per second for all streams */ - uint64_t getTotalTransferRate() const { + uint64_t getTotalTransferRate() const + { return client_metrics_.totalTransferRate; } - const ::ClientMetrics* getRawMetrics() const { + const ::ClientMetrics* getRawMetrics() const + { return &client_metrics_; } -private: + private: /** * Underlying metrics object */ diff --git a/src/KinesisVideoStream.cpp b/src/KinesisVideoStream.cpp index b7da5eec..7a38f253 100644 --- a/src/KinesisVideoStream.cpp +++ b/src/KinesisVideoStream.cpp @@ -2,15 +2,17 @@ #include "KinesisVideoStream.h" #include "KinesisVideoStreamMetrics.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { LOGGER_TAG("com.amazonaws.kinesis.video"); KinesisVideoStream::KinesisVideoStream(const KinesisVideoProducer& kinesis_video_producer, const std::string stream_name) - : stream_handle_(INVALID_STREAM_HANDLE_VALUE), - stream_name_(stream_name), - kinesis_video_producer_(kinesis_video_producer), - debug_dump_frame_info_(false) { + : stream_handle_(INVALID_STREAM_HANDLE_VALUE), stream_name_(stream_name), kinesis_video_producer_(kinesis_video_producer), + debug_dump_frame_info_(false) +{ LOG_INFO("Creating Kinesis Video Stream " << stream_name_); // the handle is NULL to start. We will set it later once Kinesis Video PIC gives us a stream handle. @@ -19,10 +21,11 @@ KinesisVideoStream::KinesisVideoStream(const KinesisVideoProducer& kinesis_video } } -bool KinesisVideoStream::putFrame(KinesisVideoFrame& frame) const { +bool KinesisVideoStream::putFrame(KinesisVideoFrame& frame) const +{ if (debug_dump_frame_info_) { - LOG_DEBUG("[" << this->stream_name_ << "] pts: " << frame.presentationTs << ", dts: " << frame.decodingTs << ", duration: " << frame.duration << ", size: " << frame.size << ", trackId: " << frame.trackId - << ", isKey: " << CHECK_FRAME_FLAG_KEY_FRAME(frame.flags)); + LOG_DEBUG("[" << this->stream_name_ << "] pts: " << frame.presentationTs << ", dts: " << frame.decodingTs << ", duration: " << frame.duration + << ", size: " << frame.size << ", trackId: " << frame.trackId << ", isKey: " << CHECK_FRAME_FLAG_KEY_FRAME(frame.flags)); } assert(0 != stream_handle_); @@ -43,20 +46,18 @@ bool KinesisVideoStream::putFrame(KinesisVideoFrame& frame) const { auto transfer_rate = 8 * stream_metrics.getCurrentTransferRate(); LOG_DEBUG("Kinesis Video client and stream metrics for " - << this->stream_name_ - << "\n\t>> Overall storage byte size: " << client_metrics.getContentStoreSizeSize() - << "\n\t>> Available storage byte size: " << client_metrics.getContentStoreAvailableSize() - << "\n\t>> Allocated storage byte size: " << client_metrics.getContentStoreAllocatedSize() - << "\n\t>> Total view allocation byte size: " << client_metrics.getTotalContentViewsSize() - << "\n\t>> Total streams elementary frame rate (fps): " << client_metrics.getTotalElementaryFrameRate() - << "\n\t>> Total streams transfer rate (bps): " << total_transfer_tate << " (" << total_transfer_tate / 1024 << " Kbps)" - << "\n\t>> Current view duration (ms): " << stream_metrics.getCurrentViewDuration().count() - << "\n\t>> Overall view duration (ms): " << stream_metrics.getOverallViewDuration().count() - << "\n\t>> Current view byte size: " << stream_metrics.getCurrentViewSize() - << "\n\t>> Overall view byte size: " << stream_metrics.getOverallViewSize() - << "\n\t>> Current elementary frame rate (fps): " << stream_metrics.getCurrentElementaryFrameRate() - << "\n\t>> Current transfer rate (bps): " << transfer_rate << " (" << transfer_rate / 1024 << " Kbps)"); - } catch (std::runtime_error &err) { + << this->stream_name_ << "\n\t>> Overall storage byte size: " << client_metrics.getContentStoreSizeSize() + << "\n\t>> Available storage byte size: " << client_metrics.getContentStoreAvailableSize() + << "\n\t>> Allocated storage byte size: " << client_metrics.getContentStoreAllocatedSize() + << "\n\t>> Total view allocation byte size: " << client_metrics.getTotalContentViewsSize() + << "\n\t>> Total streams elementary frame rate (fps): " << client_metrics.getTotalElementaryFrameRate() + << "\n\t>> Total streams transfer rate (bps): " << total_transfer_tate << " (" << total_transfer_tate / 1024 << " Kbps)" + << "\n\t>> Current view duration (ms): " << stream_metrics.getCurrentViewDuration().count() + << "\n\t>> Overall view duration (ms): " << stream_metrics.getOverallViewDuration().count() << "\n\t>> Current view byte size: " + << stream_metrics.getCurrentViewSize() << "\n\t>> Overall view byte size: " << stream_metrics.getOverallViewSize() + << "\n\t>> Current elementary frame rate (fps): " << stream_metrics.getCurrentElementaryFrameRate() + << "\n\t>> Current transfer rate (bps): " << transfer_rate << " (" << transfer_rate / 1024 << " Kbps)"); + } catch (std::runtime_error& err) { LOG_ERROR("Failed to get metrics. Error: " << err.what()); } } @@ -65,7 +66,8 @@ bool KinesisVideoStream::putFrame(KinesisVideoFrame& frame) const { return true; } -bool KinesisVideoStream::start(const std::string& hexEncodedCodecPrivateData, uint64_t trackId) { +bool KinesisVideoStream::start(const std::string& hexEncodedCodecPrivateData, uint64_t trackId) +{ // Hex-decode the string const char* pStrCpd = hexEncodedCodecPrivateData.c_str(); UINT32 size = 0; @@ -91,7 +93,7 @@ bool KinesisVideoStream::start(const std::string& hexEncodedCodecPrivateData, ui } // Start the stream with the binary codec private data buffer - bool retVal = start(reinterpret_cast (pBuffer), size, trackId); + bool retVal = start(reinterpret_cast(pBuffer), size, trackId); // Free the allocated buffer before returning ::free(pBuffer); @@ -99,11 +101,12 @@ bool KinesisVideoStream::start(const std::string& hexEncodedCodecPrivateData, ui return retVal; } -bool KinesisVideoStream::start(const unsigned char* codecPrivateData, size_t codecPrivateDataSize, uint64_t trackId) { +bool KinesisVideoStream::start(const unsigned char* codecPrivateData, size_t codecPrivateDataSize, uint64_t trackId) +{ STATUS status; - if (STATUS_FAILED(status = kinesisVideoStreamFormatChanged(stream_handle_, (UINT32) codecPrivateDataSize, - (PBYTE) codecPrivateData, (UINT64) trackId))) { + if (STATUS_FAILED( + status = kinesisVideoStreamFormatChanged(stream_handle_, (UINT32) codecPrivateDataSize, (PBYTE) codecPrivateData, (UINT64) trackId))) { LOG_ERROR("Failed to set the codec private data with: " << status << " for " << this->stream_name_); return false; } @@ -112,13 +115,15 @@ bool KinesisVideoStream::start(const unsigned char* codecPrivateData, size_t cod return start(); } -bool KinesisVideoStream::start() { +bool KinesisVideoStream::start() +{ // No-op for now return true; } -bool KinesisVideoStream::resetConnection() { +bool KinesisVideoStream::resetConnection() +{ STATUS status = STATUS_SUCCESS; if (STATUS_FAILED(status = kinesisVideoStreamResetConnection(stream_handle_))) { @@ -129,7 +134,8 @@ bool KinesisVideoStream::resetConnection() { return true; } -bool KinesisVideoStream::resetStream() { +bool KinesisVideoStream::resetStream() +{ STATUS status = STATUS_SUCCESS; if (STATUS_FAILED(status = kinesisVideoStreamResetStream(stream_handle_))) { @@ -140,14 +146,16 @@ bool KinesisVideoStream::resetStream() { return true; } -void KinesisVideoStream::free() { +void KinesisVideoStream::free() +{ LOG_INFO("Freeing Kinesis Video Stream for " << this->stream_name_); // Free the underlying stream std::call_once(free_kinesis_video_stream_flag_, freeKinesisVideoStream, getStreamHandle()); } -bool KinesisVideoStream::stop() { +bool KinesisVideoStream::stop() +{ STATUS status; if (STATUS_FAILED(status = stopKinesisVideoStream(stream_handle_))) { @@ -158,7 +166,8 @@ bool KinesisVideoStream::stop() { return true; } -bool KinesisVideoStream::stopSync() { +bool KinesisVideoStream::stopSync() +{ STATUS status; if (STATUS_FAILED(status = stopKinesisVideoStreamSync(stream_handle_))) { LOG_ERROR("Failed to stop the stream with: " << status << " for " << this->stream_name_); @@ -168,14 +177,16 @@ bool KinesisVideoStream::stopSync() { return true; } -KinesisVideoStreamMetrics KinesisVideoStream::getMetrics() const { +KinesisVideoStreamMetrics KinesisVideoStream::getMetrics() const +{ STATUS status = ::getKinesisVideoStreamMetrics(stream_handle_, (PStreamMetrics) stream_metrics_.getRawMetrics()); LOG_AND_THROW_IF(STATUS_FAILED(status), "Failed to get stream metrics with: " << status << " for " << this->stream_name_); return stream_metrics_; } -bool KinesisVideoStream::putFragmentMetadata(const std::string &name, const std::string &value, bool persistent){ +bool KinesisVideoStream::putFragmentMetadata(const std::string& name, const std::string& value, bool persistent) +{ const char* pMetadataName = name.c_str(); const char* pMetadataValue = value.c_str(); STATUS status = ::putKinesisVideoFragmentMetadata(stream_handle_, (PCHAR) pMetadataName, (PCHAR) pMetadataValue, persistent); @@ -187,7 +198,8 @@ bool KinesisVideoStream::putFragmentMetadata(const std::string &name, const std: return true; } -KinesisVideoStream::~KinesisVideoStream() { +KinesisVideoStream::~KinesisVideoStream() +{ free(); } diff --git a/src/KinesisVideoStream.h b/src/KinesisVideoStream.h index 8d47660e..63ff49dc 100644 --- a/src/KinesisVideoStream.h +++ b/src/KinesisVideoStream.h @@ -11,7 +11,10 @@ #include "KinesisVideoStreamMetrics.h" #include "StreamDefinition.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { /** * Stream stop timeout duration. @@ -22,35 +25,36 @@ namespace com { namespace amazonaws { namespace kinesis { namespace video { #define DEBUG_DUMP_FRAME_INFO "DEBUG_DUMP_FRAME_INFO" /** -* This definition comes from the Kinesis Video PIC, the typedef is to allow differentiation in case of other "Frame" definitions. -*/ + * This definition comes from the Kinesis Video PIC, the typedef is to allow differentiation in case of other "Frame" definitions. + */ using KinesisVideoFrame = ::Frame; /** -* KinesisVideoStream is responsible for streaming any type of data into KinesisVideo service -* -* Example Usage: -* @code: -* auto client(KinesisVideoClient::getInstance()); -* auto stream(client.createStream(StreamInfo(...)); -* stream.start() -* -* // Assumes you have a keepStreaming() and getFrameFromSrc() methods that do what their name sounds like they do. -* while(keepStreaming()) { -* stream.putFrame(getFrameFromSrc()); -* } -* stream.stop(); -* @endcode -*/ + * KinesisVideoStream is responsible for streaming any type of data into KinesisVideo service + * + * Example Usage: + * @code: + * auto client(KinesisVideoClient::getInstance()); + * auto stream(client.createStream(StreamInfo(...)); + * stream.start() + * + * // Assumes you have a keepStreaming() and getFrameFromSrc() methods that do what their name sounds like they do. + * while(keepStreaming()) { + * stream.putFrame(getFrameFromSrc()); + * } + * stream.stop(); + * @endcode + */ class KinesisVideoProducer; class KinesisVideoStream { friend KinesisVideoProducer; -public: + public: /** * @return A pointer to the Kinesis Video STREAM_HANDLE for this instance. */ - PSTREAM_HANDLE getStreamHandle() { + PSTREAM_HANDLE getStreamHandle() + { return &stream_handle_; } @@ -128,29 +132,32 @@ class KinesisVideoStream { */ bool stopSync(); - bool operator==(const KinesisVideoStream &rhs) const { - return stream_handle_ == rhs.stream_handle_ && - stream_name_ == rhs.stream_name_; + bool operator==(const KinesisVideoStream& rhs) const + { + return stream_handle_ == rhs.stream_handle_ && stream_name_ == rhs.stream_name_; } - bool operator!=(const KinesisVideoStream &rhs) const { + bool operator!=(const KinesisVideoStream& rhs) const + { return !(rhs == *this); } - KinesisVideoStream(const KinesisVideoStream &rhs) - : stream_handle_(rhs.stream_handle_), - kinesis_video_producer_(rhs.kinesis_video_producer_), - stream_name_(rhs.stream_name_) {} + KinesisVideoStream(const KinesisVideoStream& rhs) + : stream_handle_(rhs.stream_handle_), kinesis_video_producer_(rhs.kinesis_video_producer_), stream_name_(rhs.stream_name_) + { + } - std::string getStreamName() { + std::string getStreamName() + { return stream_name_; } - const KinesisVideoProducer& getProducer() const { + const KinesisVideoProducer& getProducer() const + { return kinesis_video_producer_; } -protected: + protected: /** * Non-public constructor as streams should be only created by the producer client */ @@ -164,7 +171,8 @@ class KinesisVideoStream { /** * Static function to call destructor needed for the shared_ptr in the producer client object */ - static void videoStreamDeleter(KinesisVideoStream* kinesis_video_stream) { + static void videoStreamDeleter(KinesisVideoStream* kinesis_video_stream) + { delete kinesis_video_stream; } diff --git a/src/KinesisVideoStreamMetrics.h b/src/KinesisVideoStreamMetrics.h index e92c5717..66355621 100644 --- a/src/KinesisVideoStreamMetrics.h +++ b/src/KinesisVideoStreamMetrics.h @@ -4,19 +4,21 @@ #include "com/amazonaws/kinesis/video/client/Include.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { /** -* Wraps around the stream metrics class -*/ + * Wraps around the stream metrics class + */ class KinesisVideoStreamMetrics { - -public: - + public: /** * Default constructor */ - KinesisVideoStreamMetrics() { + KinesisVideoStreamMetrics() + { memset(&stream_metrics_, 0x00, sizeof(::StreamMetrics)); stream_metrics_.version = STREAM_METRICS_CURRENT_VERSION; } @@ -24,57 +26,65 @@ class KinesisVideoStreamMetrics { /** * Returns the current view duration in millis */ - std::chrono::duration getCurrentViewDuration() const { + std::chrono::duration getCurrentViewDuration() const + { return std::chrono::milliseconds(stream_metrics_.currentViewDuration / HUNDREDS_OF_NANOS_IN_A_MILLISECOND); } /** * Returns the overall view duration in millis */ - std::chrono::duration getOverallViewDuration() const { + std::chrono::duration getOverallViewDuration() const + { return std::chrono::milliseconds(stream_metrics_.overallViewDuration / HUNDREDS_OF_NANOS_IN_A_MILLISECOND); } /** * Returns the current view size in bytes */ - uint64_t getCurrentViewSize() const { + uint64_t getCurrentViewSize() const + { return stream_metrics_.currentViewSize; } /** * Returns the overall view size in bytes */ - uint64_t getOverallViewSize() const { + uint64_t getOverallViewSize() const + { return stream_metrics_.overallViewSize; } /** * Returns the observed frame rate in frames per second */ - double getCurrentFrameRate() const { + double getCurrentFrameRate() const + { return stream_metrics_.currentFrameRate; } /** * Returns elementary stream frame rate in frames per second */ - double getCurrentElementaryFrameRate() const { + double getCurrentElementaryFrameRate() const + { return stream_metrics_.elementaryFrameRate; } /** * Returns the observed transfer rate in bytes per second */ - uint64_t getCurrentTransferRate() const { + uint64_t getCurrentTransferRate() const + { return stream_metrics_.currentTransferRate; } - const ::StreamMetrics* getRawMetrics() const { + const ::StreamMetrics* getRawMetrics() const + { return &stream_metrics_; } -private: + private: /** * Underlying metrics object */ diff --git a/src/Logger.h b/src/Logger.h index 072e6db3..f32bf759 100644 --- a/src/Logger.h +++ b/src/Logger.h @@ -9,31 +9,34 @@ #include #include -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { // configure the logger by loading configuration from specific properties file. // generally, it should be called only once in your main() function. -#define LOG_CONFIGURE(filename) \ - try { \ - log4cplus::PropertyConfigurator::doConfigure(filename); \ - } catch(...) { \ - LOG4CPLUS_ERROR(log4cplus::Logger::getRoot(), "Exception occured while opening " << filename); \ - } +#define LOG_CONFIGURE(filename) \ + try { \ + log4cplus::PropertyConfigurator::doConfigure(filename); \ + } catch (...) { \ + LOG4CPLUS_ERROR(log4cplus::Logger::getRoot(), "Exception occured while opening " << filename); \ + } -#if LOG4CPLUS_VERSION < LOG4CPLUS_MAKE_VERSION(2,0,0) - #define _LOG_CONFIGURE_CONSOLE(level, logToStdErr) \ - log4cplus::helpers::SharedObjectPtr _appender(new log4cplus::ConsoleAppender()); \ - std::auto_ptr _layout(new log4cplus::PatternLayout("%D [%t] ")); \ - _appender->setLayout(_layout); \ - log4cplus::BasicConfigurator::doConfigure(log4cplus::Logger::getDefaultHierarchy(), logToStdErr); \ - log4cplus::Logger::getRoot().addAppender(_appender); \ +#if LOG4CPLUS_VERSION < LOG4CPLUS_MAKE_VERSION(2, 0, 0) +#define _LOG_CONFIGURE_CONSOLE(level, logToStdErr) \ + log4cplus::helpers::SharedObjectPtr _appender(new log4cplus::ConsoleAppender()); \ + std::auto_ptr _layout(new log4cplus::PatternLayout("%D [%t] ")); \ + _appender->setLayout(_layout); \ + log4cplus::BasicConfigurator::doConfigure(log4cplus::Logger::getDefaultHierarchy(), logToStdErr); \ + log4cplus::Logger::getRoot().addAppender(_appender); \ log4cplus::Logger::getRoot().setLogLevel(log4cplus::getLogLevelManager().fromString(level)); #else - #define _LOG_CONFIGURE_CONSOLE(level, logToStdErr) \ - log4cplus::helpers::SharedObjectPtr _appender(new log4cplus::ConsoleAppender()); \ - _appender->setLayout(std::move(std::unique_ptr(new log4cplus::PatternLayout("%D [%t] ")))); \ - log4cplus::BasicConfigurator::doConfigure(log4cplus::Logger::getDefaultHierarchy(), logToStdErr); \ - log4cplus::Logger::getRoot().addAppender(_appender); \ +#define _LOG_CONFIGURE_CONSOLE(level, logToStdErr) \ + log4cplus::helpers::SharedObjectPtr _appender(new log4cplus::ConsoleAppender()); \ + _appender->setLayout(std::move(std::unique_ptr(new log4cplus::PatternLayout("%D [%t] ")))); \ + log4cplus::BasicConfigurator::doConfigure(log4cplus::Logger::getDefaultHierarchy(), logToStdErr); \ + log4cplus::Logger::getRoot().addAppender(_appender); \ log4cplus::Logger::getRoot().setLogLevel(log4cplus::getLogLevelManager().fromString(level)); #endif @@ -51,42 +54,39 @@ namespace com { namespace amazonaws { namespace kinesis { namespace video { // logging macros - any usage must be preceded by a LOGGER_TAG definition visible at the current scope. // failure to use the LOGGER_TAG macro will result in "error: 'KinesisVideoLogger' has not been declared" -#define LOG_TRACE(msg) LOG4CPLUS_TRACE(KinesisVideoLogger::getInstance(), msg); -#define LOG_DEBUG(msg) LOG4CPLUS_DEBUG(KinesisVideoLogger::getInstance(), msg); -#define LOG_INFO(msg) LOG4CPLUS_INFO(KinesisVideoLogger::getInstance(), msg); -#define LOG_WARN(msg) LOG4CPLUS_WARN(KinesisVideoLogger::getInstance(), msg); -#define LOG_ERROR(msg) LOG4CPLUS_ERROR(KinesisVideoLogger::getInstance(), msg); -#define LOG_FATAL(msg) LOG4CPLUS_FATAL(KinesisVideoLogger::getInstance(), msg); +#define LOG_TRACE(msg) LOG4CPLUS_TRACE(KinesisVideoLogger::getInstance(), msg); +#define LOG_DEBUG(msg) LOG4CPLUS_DEBUG(KinesisVideoLogger::getInstance(), msg); +#define LOG_INFO(msg) LOG4CPLUS_INFO(KinesisVideoLogger::getInstance(), msg); +#define LOG_WARN(msg) LOG4CPLUS_WARN(KinesisVideoLogger::getInstance(), msg); +#define LOG_ERROR(msg) LOG4CPLUS_ERROR(KinesisVideoLogger::getInstance(), msg); +#define LOG_FATAL(msg) LOG4CPLUS_FATAL(KinesisVideoLogger::getInstance(), msg); -#define LOG_AND_THROW(msg) \ - do { \ - std::ostringstream __oss; \ - __oss << msg; \ - LOG_ERROR(__oss.str()); \ - throw std::runtime_error(__oss.str()); \ - } while (0) +#define LOG_AND_THROW(msg) \ + do { \ + std::ostringstream __oss; \ + __oss << msg; \ + LOG_ERROR(__oss.str()); \ + throw std::runtime_error(__oss.str()); \ + } while (0) -#define LOG_AND_THROW_IF(cond, msg) \ - if (cond) { \ - LOG_AND_THROW(msg); \ - } +#define LOG_AND_THROW_IF(cond, msg) \ + if (cond) { \ + LOG_AND_THROW(msg); \ + } -#define ASSERT_MSG(cond, msg) \ - LOG_AND_THROW_IF(!(cond), \ - __FILE__ << ":" << __LINE__ << ": " << msg << ": " << #cond) - -#define ASSERT(cond) \ - ASSERT_MSG(cond, "Assertion failed"); +#define ASSERT_MSG(cond, msg) LOG_AND_THROW_IF(!(cond), __FILE__ << ":" << __LINE__ << ": " << msg << ": " << #cond) +#define ASSERT(cond) ASSERT_MSG(cond, "Assertion failed"); // defines a class which contains a logger instance with the given tag -#define LOGGER_TAG(tag) \ - struct KinesisVideoLogger { \ - static log4cplus::Logger& getInstance() { \ - static log4cplus::Logger s_logger = log4cplus::Logger::getInstance(tag); \ - return s_logger; \ - } \ - }; +#define LOGGER_TAG(tag) \ + struct KinesisVideoLogger { \ + static log4cplus::Logger& getInstance() \ + { \ + static log4cplus::Logger s_logger = log4cplus::Logger::getInstance(tag); \ + return s_logger; \ + } \ + }; } // namespace video } // namespace kinesis diff --git a/src/StreamCallbackProvider.h b/src/StreamCallbackProvider.h index 807d65ab..eaed3a1b 100644 --- a/src/StreamCallbackProvider.h +++ b/src/StreamCallbackProvider.h @@ -4,27 +4,30 @@ #include "com/amazonaws/kinesis/video/client/Include.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { /** -* Kinesis Video Stream level callback provider -* -* getStreamUnderflowReportCallback(); -* getStreamLatencyPressureCallback(); -* getStreamConnectionStaleCallback(); -* getDroppedFrameReportCallback(); -* getDroppedFragmentReportCallback(); -* getStreamErrorReportCallback(); -* getStreamReadyCallback(); -* getStreamClosedCallback(); -* getStreamDataAvailableCallback(); -* getFragmentAckReceivedCallback(); -* -* The optional callbacks are virtual, but there are default implementations defined for them that return nullptr, -* which will therefore use the defaults provided by the Kinesis Video SDK. -*/ + * Kinesis Video Stream level callback provider + * + * getStreamUnderflowReportCallback(); + * getStreamLatencyPressureCallback(); + * getStreamConnectionStaleCallback(); + * getDroppedFrameReportCallback(); + * getDroppedFragmentReportCallback(); + * getStreamErrorReportCallback(); + * getStreamReadyCallback(); + * getStreamClosedCallback(); + * getStreamDataAvailableCallback(); + * getFragmentAckReceivedCallback(); + * + * The optional callbacks are virtual, but there are default implementations defined for them that return nullptr, + * which will therefore use the defaults provided by the Kinesis Video SDK. + */ class StreamCallbackProvider { -public: + public: /** * Returns the custom data for this object to be used with the callbacks. * @@ -32,7 +35,8 @@ class StreamCallbackProvider { */ virtual UINT64 getCallbackCustomData() = 0; - virtual BufferDurationOverflowPressureFunc getBufferDurationOverFlowCallback() { + virtual BufferDurationOverflowPressureFunc getBufferDurationOverFlowCallback() + { return nullptr; } @@ -48,7 +52,8 @@ class StreamCallbackProvider { * * @return a function pointer conforming to the description above. */ - virtual StreamUnderflowReportFunc getStreamUnderflowReportCallback() { + virtual StreamUnderflowReportFunc getStreamUnderflowReportCallback() + { return nullptr; }; @@ -65,7 +70,8 @@ class StreamCallbackProvider { * * @return a function pointer conforming to the description above. */ - virtual StreamLatencyPressureFunc getStreamLatencyPressureCallback() { + virtual StreamLatencyPressureFunc getStreamLatencyPressureCallback() + { return nullptr; }; @@ -81,7 +87,8 @@ class StreamCallbackProvider { * * @return a function pointer conforming to the description above. */ - virtual DroppedFrameReportFunc getDroppedFrameReportCallback() { + virtual DroppedFrameReportFunc getDroppedFrameReportCallback() + { return nullptr; }; @@ -97,7 +104,8 @@ class StreamCallbackProvider { * * @return a function pointer conforming to the description above. */ - virtual StreamConnectionStaleFunc getStreamConnectionStaleCallback() { + virtual StreamConnectionStaleFunc getStreamConnectionStaleCallback() + { return nullptr; }; @@ -114,7 +122,8 @@ class StreamCallbackProvider { * * @return a function pointer conforming to the description above. */ - virtual DroppedFragmentReportFunc getDroppedFragmentReportCallback() { + virtual DroppedFragmentReportFunc getDroppedFragmentReportCallback() + { return nullptr; }; @@ -131,7 +140,8 @@ class StreamCallbackProvider { * * @return a function pointer conforming to the description above. */ - virtual StreamErrorReportFunc getStreamErrorReportCallback() { + virtual StreamErrorReportFunc getStreamErrorReportCallback() + { return nullptr; }; @@ -147,7 +157,8 @@ class StreamCallbackProvider { * * @return a function pointer conforming to the description above. */ - virtual StreamReadyFunc getStreamReadyCallback() { + virtual StreamReadyFunc getStreamReadyCallback() + { return nullptr; }; @@ -163,7 +174,8 @@ class StreamCallbackProvider { * * @return a function pointer conforming to the description above. */ - virtual StreamClosedFunc getStreamClosedCallback() { + virtual StreamClosedFunc getStreamClosedCallback() + { return nullptr; }; @@ -182,7 +194,8 @@ class StreamCallbackProvider { * * @return a function pointer conforming to the description above. */ - virtual StreamDataAvailableFunc getStreamDataAvailableCallback() { + virtual StreamDataAvailableFunc getStreamDataAvailableCallback() + { return nullptr; }; @@ -199,7 +212,8 @@ class StreamCallbackProvider { * * @return a function pointer conforming to the description above. */ - virtual FragmentAckReceivedFunc getFragmentAckReceivedCallback() { + virtual FragmentAckReceivedFunc getFragmentAckReceivedCallback() + { return nullptr; }; @@ -216,11 +230,12 @@ class StreamCallbackProvider { * * @return a function pointer conforming to the description above. */ - virtual BufferDurationOverflowPressureFunc getBufferDurationOverflowPressureCallback() { + virtual BufferDurationOverflowPressureFunc getBufferDurationOverflowPressureCallback() + { return nullptr; }; - virtual ~StreamCallbackProvider() {}; + virtual ~StreamCallbackProvider(){}; }; } // namespace video diff --git a/src/StreamDefinition.cpp b/src/StreamDefinition.cpp index 044b7cbf..d896bffa 100644 --- a/src/StreamDefinition.cpp +++ b/src/StreamDefinition.cpp @@ -1,7 +1,10 @@ #include "StreamDefinition.h" #include "Logger.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { LOGGER_TAG("com.amazonaws.kinesis.video"); @@ -14,58 +17,33 @@ using std::chrono::duration; using std::chrono::duration_cast; using std::chrono::nanoseconds; -StreamDefinition::StreamDefinition( - string stream_name, - duration > retention_period, - const map *tags, - string kms_key_id, - STREAMING_TYPE streaming_type, - string content_type, - duration max_latency, - duration fragment_duration, - duration timecode_scale, - bool key_frame_fragmentation, - bool frame_timecodes, - bool absolute_fragment_times, - bool fragment_acks, - bool restart_on_error, - bool recalculate_metrics, - bool allowStreamCreation, - uint32_t nal_adaptation_flags, - uint32_t frame_rate, - uint32_t avg_bandwidth_bps, - duration buffer_duration, - duration replay_duration, - duration connection_staleness, - string codec_id, string track_name, - const uint8_t *codecPrivateData, - uint32_t codecPrivateDataSize, - MKV_TRACK_INFO_TYPE track_type, - const vector segment_uuid, - const uint64_t default_track_id, - CONTENT_STORE_PRESSURE_POLICY contentStorePressurePolicy, - CONTENT_VIEW_OVERFLOW_POLICY contentViewOverflowPolicy) - : tags_(tags), - stream_name_(stream_name) { +StreamDefinition::StreamDefinition(string stream_name, duration> retention_period, const map* tags, + string kms_key_id, STREAMING_TYPE streaming_type, string content_type, duration max_latency, + duration fragment_duration, duration timecode_scale, + bool key_frame_fragmentation, bool frame_timecodes, bool absolute_fragment_times, bool fragment_acks, + bool restart_on_error, bool recalculate_metrics, bool allowStreamCreation, uint32_t nal_adaptation_flags, + uint32_t frame_rate, uint32_t avg_bandwidth_bps, duration buffer_duration, + duration replay_duration, duration connection_staleness, string codec_id, string track_name, + const uint8_t* codecPrivateData, uint32_t codecPrivateDataSize, MKV_TRACK_INFO_TYPE track_type, + const vector segment_uuid, const uint64_t default_track_id, + CONTENT_STORE_PRESSURE_POLICY contentStorePressurePolicy, CONTENT_VIEW_OVERFLOW_POLICY contentViewOverflowPolicy) + : tags_(tags), stream_name_(stream_name) +{ memset(&stream_info_, 0x00, sizeof(StreamInfo)); LOG_AND_THROW_IF(MAX_STREAM_NAME_LEN < stream_name.size(), "StreamName exceeded max length " << MAX_STREAM_NAME_LEN); strcpy(stream_info_.name, stream_name.c_str()); stream_info_.version = STREAM_INFO_CURRENT_VERSION; - stream_info_.retention = duration_cast( - retention_period).count() / DEFAULT_TIME_UNIT_IN_NANOS; + stream_info_.retention = duration_cast(retention_period).count() / DEFAULT_TIME_UNIT_IN_NANOS; LOG_AND_THROW_IF(MAX_ARN_LEN < kms_key_id.size(), "KMS Key Id exceeded max length of " << MAX_ARN_LEN); strcpy(stream_info_.kmsKeyId, kms_key_id.c_str()); LOG_AND_THROW_IF(MAX_CONTENT_TYPE_LEN < content_type.size(), "ContentType exceeded max length of " << MAX_CONTENT_TYPE_LEN); strcpy(stream_info_.streamCaps.contentType, content_type.c_str()); stream_info_.streamCaps.streamingType = streaming_type; stream_info_.streamCaps.adaptive = FALSE; - stream_info_.streamCaps.maxLatency = duration_cast( - max_latency).count() / DEFAULT_TIME_UNIT_IN_NANOS; - stream_info_.streamCaps.fragmentDuration = duration_cast( - fragment_duration).count() / DEFAULT_TIME_UNIT_IN_NANOS; - stream_info_.streamCaps.timecodeScale = duration_cast( - timecode_scale).count() / DEFAULT_TIME_UNIT_IN_NANOS; + stream_info_.streamCaps.maxLatency = duration_cast(max_latency).count() / DEFAULT_TIME_UNIT_IN_NANOS; + stream_info_.streamCaps.fragmentDuration = duration_cast(fragment_duration).count() / DEFAULT_TIME_UNIT_IN_NANOS; + stream_info_.streamCaps.timecodeScale = duration_cast(timecode_scale).count() / DEFAULT_TIME_UNIT_IN_NANOS; stream_info_.streamCaps.keyFrameFragmentation = key_frame_fragmentation; stream_info_.streamCaps.frameTimecodes = frame_timecodes; stream_info_.streamCaps.absoluteFragmentTimes = absolute_fragment_times; @@ -76,12 +54,9 @@ StreamDefinition::StreamDefinition( stream_info_.streamCaps.nalAdaptationFlags = nal_adaptation_flags; stream_info_.streamCaps.frameRate = frame_rate; stream_info_.streamCaps.avgBandwidthBps = avg_bandwidth_bps; - stream_info_.streamCaps.bufferDuration = duration_cast( - buffer_duration).count() / DEFAULT_TIME_UNIT_IN_NANOS; - stream_info_.streamCaps.replayDuration = duration_cast( - replay_duration).count() / DEFAULT_TIME_UNIT_IN_NANOS; - stream_info_.streamCaps.connectionStalenessDuration = duration_cast( - connection_staleness).count() / DEFAULT_TIME_UNIT_IN_NANOS; + stream_info_.streamCaps.bufferDuration = duration_cast(buffer_duration).count() / DEFAULT_TIME_UNIT_IN_NANOS; + stream_info_.streamCaps.replayDuration = duration_cast(replay_duration).count() / DEFAULT_TIME_UNIT_IN_NANOS; + stream_info_.streamCaps.connectionStalenessDuration = duration_cast(connection_staleness).count() / DEFAULT_TIME_UNIT_IN_NANOS; stream_info_.streamCaps.frameOrderingMode = FRAME_ORDER_MODE_PASS_THROUGH; stream_info_.streamCaps.storePressurePolicy = contentStorePressurePolicy; stream_info_.streamCaps.viewOverflowPolicy = contentViewOverflowPolicy; @@ -100,55 +75,52 @@ StreamDefinition::StreamDefinition( track_info_.push_back(StreamTrackInfo{default_track_id, track_name, codec_id, codecPrivateData, codecPrivateDataSize, track_type}); // Set the tags - stream_info_.tagCount = (UINT32)tags_.count(); + stream_info_.tagCount = (UINT32) tags_.count(); stream_info_.tags = tags_.asPTag(); } -void StreamDefinition::addTrack(const uint64_t track_id, - const string &track_name, - const string &codec_id, - MKV_TRACK_INFO_TYPE track_type, - const uint8_t* codecPrivateData, - uint32_t codecPrivateDataSize) { +void StreamDefinition::addTrack(const uint64_t track_id, const string& track_name, const string& codec_id, MKV_TRACK_INFO_TYPE track_type, + const uint8_t* codecPrivateData, uint32_t codecPrivateDataSize) +{ stream_info_.streamCaps.frameOrderingMode = FRAME_ORDERING_MODE_MULTI_TRACK_AV_COMPARE_PTS_ONE_MS_COMPENSATE_EOFR; - track_info_.push_back(StreamTrackInfo{track_id, - track_name, - codec_id, - codecPrivateData, - codecPrivateDataSize, - track_type}); + track_info_.push_back(StreamTrackInfo{track_id, track_name, codec_id, codecPrivateData, codecPrivateDataSize, track_type}); } -void StreamDefinition::setFrameOrderMode(FRAME_ORDER_MODE mode) { +void StreamDefinition::setFrameOrderMode(FRAME_ORDER_MODE mode) +{ stream_info_.streamCaps.frameOrderingMode = mode; } -StreamDefinition::~StreamDefinition() { +StreamDefinition::~StreamDefinition() +{ for (size_t i = 0; i < stream_info_.tagCount; ++i) { - Tag &tag = stream_info_.tags[i]; + Tag& tag = stream_info_.tags[i]; free(tag.name); free(tag.value); } free(stream_info_.tags); - delete [] stream_info_.streamCaps.trackInfoList; + delete[] stream_info_.streamCaps.trackInfoList; } -const string& StreamDefinition::getStreamName() const { +const string& StreamDefinition::getStreamName() const +{ return stream_name_; } -const size_t StreamDefinition::getTrackCount() const { +const size_t StreamDefinition::getTrackCount() const +{ return track_info_.size(); } -const StreamInfo& StreamDefinition::getStreamInfo() { +const StreamInfo& StreamDefinition::getStreamInfo() +{ stream_info_.streamCaps.trackInfoCount = static_cast(track_info_.size()); stream_info_.streamCaps.trackInfoList = new TrackInfo[track_info_.size()]; memset(stream_info_.streamCaps.trackInfoList, 0, sizeof(TrackInfo) * track_info_.size()); for (size_t i = 0; i < track_info_.size(); ++i) { - TrackInfo &trackInfo = stream_info_.streamCaps.trackInfoList[i]; + TrackInfo& trackInfo = stream_info_.streamCaps.trackInfoList[i]; trackInfo.trackId = track_info_[i].track_id; trackInfo.trackType = track_info_[i].track_type; diff --git a/src/StreamDefinition.h b/src/StreamDefinition.h index d320326a..795dca48 100644 --- a/src/StreamDefinition.h +++ b/src/StreamDefinition.h @@ -14,7 +14,10 @@ #define DEFAULT_TRACK_ID 1 -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { /** * StreamTrackInfo__ shadows TrackInfo struct in mkvgen/include. @@ -30,57 +33,36 @@ typedef struct StreamTrackInfo__ { } StreamTrackInfo; /** -* Models all metadata necessary to describe a stream -* -*/ + * Models all metadata necessary to describe a stream + * + */ class StreamDefinition { - -public: + public: /** * @param stream_name Human readable name of the stream. Usually: .camera_ * @param transforms The type transformation to be done in the encoder. (e.g. convert I420 -> h.264, etc.) * @param tags The Kinesis Video PIC stream tags which should be set for this stream. */ - StreamDefinition( - std::string stream_name, - std::chrono::duration> retention_period, - const std::map* tags = nullptr, - std::string kms_key_id = "", - STREAMING_TYPE streaming_type = STREAMING_TYPE_REALTIME, - std::string content_type = "video/h264", - std::chrono::duration max_latency = std::chrono::milliseconds::zero(), - std::chrono::duration fragment_duration = std::chrono::milliseconds(2000), - std::chrono::duration timecode_scale = std::chrono::milliseconds(1), - bool key_frame_fragmentation = true, - bool frame_timecodes = true, - bool absolute_fragment_times = true, - bool fragment_acks = true, - bool restart_on_error = true, - bool recalculate_metrics = true, - bool allow_stream_creation = true, - uint32_t nal_adaptation_flags = NAL_ADAPTATION_ANNEXB_NALS | NAL_ADAPTATION_ANNEXB_CPD_NALS, - uint32_t frame_rate = 25, - uint32_t avg_bandwidth_bps = 4 * 1024 * 1024, - std::chrono::duration buffer_duration = std::chrono::seconds(120), - std::chrono::duration replay_duration = std::chrono::seconds(40), - std::chrono::duration connection_staleness = std::chrono::seconds(30), - std::string codec_id = "V_MPEG4/ISO/AVC", - std::string track_name = "kinesis_video", - const uint8_t* codecPrivateData = nullptr, - uint32_t codecPrivateDataSize = 0, - MKV_TRACK_INFO_TYPE track_type = MKV_TRACK_INFO_TYPE_VIDEO, - const std::vector segment_uuid = std::vector(), - const uint64_t default_track_id = DEFAULT_TRACK_ID, - CONTENT_STORE_PRESSURE_POLICY contentStorePressurePolicy = CONTENT_STORE_PRESSURE_POLICY_DROP_TAIL_ITEM, - CONTENT_VIEW_OVERFLOW_POLICY contentViewOverflowPolicy = CONTENT_VIEW_OVERFLOW_POLICY_DROP_UNTIL_FRAGMENT_START - ); - - void addTrack(const uint64_t track_id, - const std::string &track_name, - const std::string &codec_id, - MKV_TRACK_INFO_TYPE track_type, - const uint8_t* codecPrivateData = nullptr, - uint32_t codecPrivateDataSize = 0); + StreamDefinition(std::string stream_name, std::chrono::duration> retention_period, + const std::map* tags = nullptr, std::string kms_key_id = "", + STREAMING_TYPE streaming_type = STREAMING_TYPE_REALTIME, std::string content_type = "video/h264", + std::chrono::duration max_latency = std::chrono::milliseconds::zero(), + std::chrono::duration fragment_duration = std::chrono::milliseconds(2000), + std::chrono::duration timecode_scale = std::chrono::milliseconds(1), bool key_frame_fragmentation = true, + bool frame_timecodes = true, bool absolute_fragment_times = true, bool fragment_acks = true, bool restart_on_error = true, + bool recalculate_metrics = true, bool allow_stream_creation = true, + uint32_t nal_adaptation_flags = NAL_ADAPTATION_ANNEXB_NALS | NAL_ADAPTATION_ANNEXB_CPD_NALS, uint32_t frame_rate = 25, + uint32_t avg_bandwidth_bps = 4 * 1024 * 1024, std::chrono::duration buffer_duration = std::chrono::seconds(120), + std::chrono::duration replay_duration = std::chrono::seconds(40), + std::chrono::duration connection_staleness = std::chrono::seconds(30), std::string codec_id = "V_MPEG4/ISO/AVC", + std::string track_name = "kinesis_video", const uint8_t* codecPrivateData = nullptr, uint32_t codecPrivateDataSize = 0, + MKV_TRACK_INFO_TYPE track_type = MKV_TRACK_INFO_TYPE_VIDEO, const std::vector segment_uuid = std::vector(), + const uint64_t default_track_id = DEFAULT_TRACK_ID, + CONTENT_STORE_PRESSURE_POLICY contentStorePressurePolicy = CONTENT_STORE_PRESSURE_POLICY_DROP_TAIL_ITEM, + CONTENT_VIEW_OVERFLOW_POLICY contentViewOverflowPolicy = CONTENT_VIEW_OVERFLOW_POLICY_DROP_UNTIL_FRAGMENT_START); + + void addTrack(const uint64_t track_id, const std::string& track_name, const std::string& codec_id, MKV_TRACK_INFO_TYPE track_type, + const uint8_t* codecPrivateData = nullptr, uint32_t codecPrivateDataSize = 0); void setFrameOrderMode(FRAME_ORDER_MODE mode); @@ -101,7 +83,7 @@ class StreamDefinition { */ const StreamInfo& getStreamInfo(); -private: + private: /** * Human readable name of the stream. Usually: .camera_ */ @@ -125,7 +107,7 @@ class StreamDefinition { /** * Segment UUID bytes */ - uint8_t segment_uuid_[MKV_SEGMENT_UUID_LEN]; + uint8_t segment_uuid_[MKV_SEGMENT_UUID_LEN]; }; } // namespace video diff --git a/src/StreamTags.cpp b/src/StreamTags.cpp index 72f22d88..d702a138 100644 --- a/src/StreamTags.cpp +++ b/src/StreamTags.cpp @@ -2,26 +2,32 @@ #include "StreamTags.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { using std::get; -using std::string; using std::map; +using std::string; -StreamTags::StreamTags(const map* tags) : tags_(tags) {} +StreamTags::StreamTags(const map* tags) : tags_(tags) +{ +} -PTag StreamTags::asPTag() const { +PTag StreamTags::asPTag() const +{ if (nullptr == tags_) { return nullptr; } PTag tags = reinterpret_cast(malloc(sizeof(Tag) * tags_->size())); size_t i = 0; - for (const auto &pair : *tags_) { - Tag &tag = tags[i]; + for (const auto& pair : *tags_) { + Tag& tag = tags[i]; tag.version = TAG_CURRENT_VERSION; - auto &name = get<0>(pair); - auto &val = get<1>(pair); + auto& name = get<0>(pair); + auto& val = get<1>(pair); assert(MAX_TAG_NAME_LEN >= name.size()); assert(MAX_TAG_VALUE_LEN >= val.size()); @@ -35,7 +41,8 @@ PTag StreamTags::asPTag() const { return tags; } -size_t StreamTags::count() const { +size_t StreamTags::count() const +{ if (nullptr != tags_) { return tags_->size(); } else { diff --git a/src/StreamTags.h b/src/StreamTags.h index 730be624..bce8edf2 100644 --- a/src/StreamTags.h +++ b/src/StreamTags.h @@ -8,13 +8,16 @@ #include #include -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { /** -* Holds a map of tags which are to be set on the Kinesis Video stream. -*/ + * Holds a map of tags which are to be set on the Kinesis Video stream. + */ class StreamTags { -public: + public: explicit StreamTags(const std::map* tags); /** @@ -27,14 +30,12 @@ class StreamTags { */ PTag asPTag() const; -private: - + private: /** * Mapping of key/val pairs which are to be set on the Kinesis Video stream for which the tags are associated in the * stream definition. */ const std::map* tags_; - }; } // namespace video diff --git a/src/ThreadSafeMap.h b/src/ThreadSafeMap.h index 40f457ed..50fe34eb 100644 --- a/src/ThreadSafeMap.h +++ b/src/ThreadSafeMap.h @@ -6,7 +6,10 @@ #include #include -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { /** * Thread safe implementation of std::map. @@ -14,13 +17,14 @@ namespace com { namespace amazonaws { namespace kinesis { namespace video { * @tparam V */ template class ThreadSafeMap { -public: + public: /** * Put an item into the map. * @param k key * @param v value */ - void put(K k, V v) { + void put(K k, V v) + { std::lock_guard lock(mutex_); map_.emplace(std::pair(k, v)); } @@ -30,7 +34,8 @@ template class ThreadSafeMap { * @param k Key to look up. * @return The value at k or nullptr. */ - V get(K k) { + V get(K k) + { std::unique_lock lock(mutex_); if (contains(k)) { return map_[k]; @@ -44,7 +49,8 @@ template class ThreadSafeMap { * @param index Index of the item * @return The value at index or nullptr. */ - V getAt(int index) { + V getAt(int index) + { std::unique_lock lock(mutex_); int cur_index = 0; V ret_value = nullptr; @@ -69,7 +75,8 @@ template class ThreadSafeMap { * Remove the pair stored the map at k, if it exists. * @param k Key to be removed. */ - void remove(K k) { + void remove(K k) + { std::unique_lock lock(mutex_); auto it = map_.find(k); if (it != map_.end()) { @@ -82,7 +89,8 @@ template class ThreadSafeMap { * @param k Key to be checked * @return True if the key exists and false otherwise. */ - bool exists(K k) { + bool exists(K k) + { std::unique_lock lock(mutex_); return contains(k); } @@ -90,17 +98,19 @@ template class ThreadSafeMap { /** * UNSAFE!!! Returns the underlying map */ - std::map getMap() { + std::map getMap() + { return map_; } -private: + private: /** * Private function to check whether the key exists. * * NOTE: This is a thread unsafe op. */ - bool contains(K k) { + bool contains(K k) + { return map_.find(k) != map_.end(); } /** @@ -112,7 +122,6 @@ template class ThreadSafeMap { * Mutual exclusion over R/W operations on the map. */ std::mutex mutex_; - }; } // namespace video diff --git a/src/common/PutFrameHelper.cpp b/src/common/PutFrameHelper.cpp index 01cab034..40274cfb 100644 --- a/src/common/PutFrameHelper.cpp +++ b/src/common/PutFrameHelper.cpp @@ -1,50 +1,52 @@ #include "PutFrameHelper.h" #include -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { LOGGER_TAG("com.amazonaws.kinesis.video"); using std::shared_ptr; -PutFrameHelper::PutFrameHelper( - shared_ptr kinesis_video_stream, - uint64_t mkv_timecode_scale_ns, - uint32_t max_audio_queue_size, - uint32_t max_video_queue_size, - uint32_t initial_buffer_size_audio, - uint32_t initial_buffer_size_video) : - kinesis_video_stream(kinesis_video_stream), - data_buffer_size(max_video_queue_size), - put_frame_status(true) { +PutFrameHelper::PutFrameHelper(shared_ptr kinesis_video_stream, uint64_t mkv_timecode_scale_ns, uint32_t max_audio_queue_size, + uint32_t max_video_queue_size, uint32_t initial_buffer_size_audio, uint32_t initial_buffer_size_video) + : kinesis_video_stream(kinesis_video_stream), data_buffer_size(max_video_queue_size), put_frame_status(true) +{ data_buffer = new uint8_t[data_buffer_size]; } -void PutFrameHelper::putFrameMultiTrack(Frame frame, bool isVideo) { +void PutFrameHelper::putFrameMultiTrack(Frame frame, bool isVideo) +{ if (!kinesis_video_stream->putFrame(frame)) { put_frame_status = false; LOG_WARN("Failed to put normal frame"); } } -void PutFrameHelper::flush() { +void PutFrameHelper::flush() +{ // no-op } -uint8_t *PutFrameHelper::getFrameDataBuffer(uint32_t requested_buffer_size, bool isVideo) { +uint8_t* PutFrameHelper::getFrameDataBuffer(uint32_t requested_buffer_size, bool isVideo) +{ if (requested_buffer_size > data_buffer_size) { - delete [] data_buffer; + delete[] data_buffer; data_buffer_size = requested_buffer_size + requested_buffer_size / 2; data_buffer = new uint8_t[data_buffer_size]; } return data_buffer; } -bool PutFrameHelper::putFrameFailed() { +bool PutFrameHelper::putFrameFailed() +{ return put_frame_status; } -void PutFrameHelper::putEofr() { +void PutFrameHelper::putEofr() +{ Frame frame = EOFR_FRAME_INITIALIZER; if (!kinesis_video_stream->putFrame(frame)) { put_frame_status = false; @@ -52,11 +54,12 @@ void PutFrameHelper::putEofr() { } } -PutFrameHelper::~PutFrameHelper() { - delete [] data_buffer; +PutFrameHelper::~PutFrameHelper() +{ + delete[] data_buffer; } -} -} -} -} +} // namespace video +} // namespace kinesis +} // namespace amazonaws +} // namespace com diff --git a/src/common/PutFrameHelper.h b/src/common/PutFrameHelper.h index 8f89c95c..f039ab9f 100644 --- a/src/common/PutFrameHelper.h +++ b/src/common/PutFrameHelper.h @@ -7,14 +7,17 @@ #include namespace { - const uint32_t DEFAULT_MAX_AUDIO_QUEUE_SIZE = 200; - const uint32_t DEFAULT_MAX_VIDEO_QUEUE_SIZE = 50; - const uint64_t DEFAULT_MKV_TIMECODE_SCALE_NS = 1000000; - const uint32_t DEFAULT_BUFFER_SIZE_AUDIO = 50 * 1024; - const uint32_t DEFAULT_BUFFER_SIZE_VIDEO = 100 * 1024; -} +const uint32_t DEFAULT_MAX_AUDIO_QUEUE_SIZE = 200; +const uint32_t DEFAULT_MAX_VIDEO_QUEUE_SIZE = 50; +const uint64_t DEFAULT_MKV_TIMECODE_SCALE_NS = 1000000; +const uint32_t DEFAULT_BUFFER_SIZE_AUDIO = 50 * 1024; +const uint32_t DEFAULT_BUFFER_SIZE_VIDEO = 100 * 1024; +} // namespace -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { /** * @Deprecated @@ -34,21 +37,18 @@ class PutFrameHelper { bool put_frame_status; uint8_t* data_buffer; uint32_t data_buffer_size; -public: - PutFrameHelper( - std::shared_ptr kinesis_video_stream, - uint64_t mkv_timecode_scale_ns = DEFAULT_MKV_TIMECODE_SCALE_NS, - uint32_t max_audio_queue_size = DEFAULT_MAX_AUDIO_QUEUE_SIZE, - uint32_t max_video_queue_size = DEFAULT_MAX_VIDEO_QUEUE_SIZE, - uint32_t initial_buffer_size_audio = DEFAULT_BUFFER_SIZE_AUDIO, - uint32_t initial_buffer_size_video = DEFAULT_BUFFER_SIZE_VIDEO); + + public: + PutFrameHelper(std::shared_ptr kinesis_video_stream, uint64_t mkv_timecode_scale_ns = DEFAULT_MKV_TIMECODE_SCALE_NS, + uint32_t max_audio_queue_size = DEFAULT_MAX_AUDIO_QUEUE_SIZE, uint32_t max_video_queue_size = DEFAULT_MAX_VIDEO_QUEUE_SIZE, + uint32_t initial_buffer_size_audio = DEFAULT_BUFFER_SIZE_AUDIO, uint32_t initial_buffer_size_video = DEFAULT_BUFFER_SIZE_VIDEO); ~PutFrameHelper(); /* * application should call getFrameDataBuffer() to get a buffer to store frame data before calling putFrameMultiTrack() */ - uint8_t *getFrameDataBuffer(uint32_t requested_buffer_size, bool isVideo); + uint8_t* getFrameDataBuffer(uint32_t requested_buffer_size, bool isVideo); /* * application should call putFrameMultiTrack() to pass over the frame. The frame will be put into the kinesis video @@ -66,11 +66,9 @@ class PutFrameHelper { void putEofr(); }; -} -} -} -} - - +} // namespace video +} // namespace kinesis +} // namespace amazonaws +} // namespace com #endif //__PUT_FRAME_HELPER_H__ diff --git a/src/credential-providers/IotCertCredentialProvider.cpp b/src/credential-providers/IotCertCredentialProvider.cpp index 9036fee8..e7d47d3e 100644 --- a/src/credential-providers/IotCertCredentialProvider.cpp +++ b/src/credential-providers/IotCertCredentialProvider.cpp @@ -9,16 +9,10 @@ IotCertCredentialProvider::callback_t IotCertCredentialProvider::getCallbacks(PC STATUS retStatus = STATUS_SUCCESS; LOG_DEBUG("Creating IoT auth callbacks."); - if (STATUS_FAILED(retStatus = createIotAuthCallbacksWithTimeouts(client_callbacks, - STRING_TO_PCHAR(iot_get_credential_endpoint_), - STRING_TO_PCHAR(cert_path_), - STRING_TO_PCHAR(private_key_path_), - STRING_TO_PCHAR(ca_cert_path_), - STRING_TO_PCHAR(role_alias_), - STRING_TO_PCHAR(stream_name_), - connectionTimeout_, - completionTimeout_, - &iot_callbacks))) { + if (STATUS_FAILED(retStatus = createIotAuthCallbacksWithTimeouts( + client_callbacks, STRING_TO_PCHAR(iot_get_credential_endpoint_), STRING_TO_PCHAR(cert_path_), + STRING_TO_PCHAR(private_key_path_), STRING_TO_PCHAR(ca_cert_path_), STRING_TO_PCHAR(role_alias_), + STRING_TO_PCHAR(stream_name_), connectionTimeout_, completionTimeout_, &iot_callbacks))) { std::stringstream status_strstrm; status_strstrm << std::hex << retStatus; LOG_AND_THROW("Unable to create Iot Credential provider. Error status: 0x" + status_strstrm.str()); diff --git a/src/credential-providers/IotCertCredentialProvider.h b/src/credential-providers/IotCertCredentialProvider.h index 180769ce..d5e42e2c 100644 --- a/src/credential-providers/IotCertCredentialProvider.h +++ b/src/credential-providers/IotCertCredentialProvider.h @@ -4,56 +4,43 @@ #include #include -namespace com { namespace amazonaws { namespace kinesis { namespace video { - class IotCertCredentialProvider : public CredentialProvider { - PAuthCallbacks iot_callbacks = nullptr; - const std::string iot_get_credential_endpoint_, cert_path_, private_key_path_, ca_cert_path_, - role_alias_,stream_name_; - uint64_t connectionTimeout_ = 0, completionTimeout_ = 0; - - public: - IotCertCredentialProvider(const std::string iot_get_credential_endpoint, - const std::string cert_path, - const std::string private_key_path, - const std::string role_alias, - const std::string ca_cert_path, - const std::string stream_name): - iot_get_credential_endpoint_(iot_get_credential_endpoint), - cert_path_(cert_path), - private_key_path_(private_key_path), - role_alias_(role_alias), - ca_cert_path_(ca_cert_path), - stream_name_(stream_name) {} - - IotCertCredentialProvider(const std::string iot_get_credential_endpoint, - const std::string cert_path, - const std::string private_key_path, - const std::string role_alias, - const std::string ca_cert_path, - const std::string stream_name, - uint64_t connectionTimeout, - uint64_t completionTimeout): - iot_get_credential_endpoint_(iot_get_credential_endpoint), - cert_path_(cert_path), - private_key_path_(private_key_path), - role_alias_(role_alias), - ca_cert_path_(ca_cert_path), - stream_name_(stream_name), - connectionTimeout_ (connectionTimeout), - completionTimeout_ (completionTimeout) {} - - void updateCredentials(Credentials& credentials) override { - // no-op as credential update is handled in c producer iot auth callbacks - } - - callback_t getCallbacks(PClientCallbacks) override; - }; - -} -} -} -} - - +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { +class IotCertCredentialProvider : public CredentialProvider { + PAuthCallbacks iot_callbacks = nullptr; + const std::string iot_get_credential_endpoint_, cert_path_, private_key_path_, ca_cert_path_, role_alias_, stream_name_; + uint64_t connectionTimeout_ = 0, completionTimeout_ = 0; + + public: + IotCertCredentialProvider(const std::string iot_get_credential_endpoint, const std::string cert_path, const std::string private_key_path, + const std::string role_alias, const std::string ca_cert_path, const std::string stream_name) + : iot_get_credential_endpoint_(iot_get_credential_endpoint), cert_path_(cert_path), private_key_path_(private_key_path), + role_alias_(role_alias), ca_cert_path_(ca_cert_path), stream_name_(stream_name) + { + } + + IotCertCredentialProvider(const std::string iot_get_credential_endpoint, const std::string cert_path, const std::string private_key_path, + const std::string role_alias, const std::string ca_cert_path, const std::string stream_name, uint64_t connectionTimeout, + uint64_t completionTimeout) + : iot_get_credential_endpoint_(iot_get_credential_endpoint), cert_path_(cert_path), private_key_path_(private_key_path), + role_alias_(role_alias), ca_cert_path_(ca_cert_path), stream_name_(stream_name), connectionTimeout_(connectionTimeout), + completionTimeout_(completionTimeout) + { + } + + void updateCredentials(Credentials& credentials) override + { + // no-op as credential update is handled in c producer iot auth callbacks + } + + callback_t getCallbacks(PClientCallbacks) override; +}; + +} // namespace video +} // namespace kinesis +} // namespace amazonaws +} // namespace com #endif //_IOT_CERT_CREDENTIAL_PROVIDER_H_ diff --git a/src/credential-providers/RotatingCredentialProvider.cpp b/src/credential-providers/RotatingCredentialProvider.cpp index 0801d6d5..de213d3c 100644 --- a/src/credential-providers/RotatingCredentialProvider.cpp +++ b/src/credential-providers/RotatingCredentialProvider.cpp @@ -8,12 +8,11 @@ LOGGER_TAG("com.amazonaws.kinesis.video"); using namespace com::amazonaws::kinesis::video; using namespace std; -RotatingCredentialProvider::callback_t RotatingCredentialProvider::getCallbacks(PClientCallbacks client_callbacks) { +RotatingCredentialProvider::callback_t RotatingCredentialProvider::getCallbacks(PClientCallbacks client_callbacks) +{ STATUS retStatus = STATUS_SUCCESS; - if (STATUS_FAILED(retStatus = createFileAuthCallbacks(client_callbacks, - STRING_TO_PCHAR(credential_file_path_), - &rotating_callbacks))) { + if (STATUS_FAILED(retStatus = createFileAuthCallbacks(client_callbacks, STRING_TO_PCHAR(credential_file_path_), &rotating_callbacks))) { std::stringstream status_strstrm; status_strstrm << std::hex << retStatus; LOG_AND_THROW("Unable to create Rotating Credential provider. Error status: 0x" + status_strstrm.str()); diff --git a/src/credential-providers/RotatingCredentialProvider.h b/src/credential-providers/RotatingCredentialProvider.h index e6a4d43a..df362d9f 100644 --- a/src/credential-providers/RotatingCredentialProvider.h +++ b/src/credential-providers/RotatingCredentialProvider.h @@ -3,23 +3,29 @@ #include "Auth.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { - class RotatingCredentialProvider : public CredentialProvider { - std::string credential_file_path_; - PAuthCallbacks rotating_callbacks = nullptr; - public: - RotatingCredentialProvider(std::string credential_file_path): credential_file_path_(credential_file_path) {} - void updateCredentials(Credentials& credentials) override { - // no-op as credential update is handled in c producer file auth callbacks - } - callback_t getCallbacks(PClientCallbacks) override; - }; +class RotatingCredentialProvider : public CredentialProvider { + std::string credential_file_path_; + PAuthCallbacks rotating_callbacks = nullptr; -} -} -} -} + public: + RotatingCredentialProvider(std::string credential_file_path) : credential_file_path_(credential_file_path) + { + } + void updateCredentials(Credentials& credentials) override + { + // no-op as credential update is handled in c producer file auth callbacks + } + callback_t getCallbacks(PClientCallbacks) override; +}; +} // namespace video +} // namespace kinesis +} // namespace amazonaws +} // namespace com #endif /* __ROTATING_CREDENTIAL_PROVIDER_H__ */ \ No newline at end of file diff --git a/src/gstreamer/KvsSinkClientCallbackProvider.cpp b/src/gstreamer/KvsSinkClientCallbackProvider.cpp index 6459d1dc..376a376c 100644 --- a/src/gstreamer/KvsSinkClientCallbackProvider.cpp +++ b/src/gstreamer/KvsSinkClientCallbackProvider.cpp @@ -4,7 +4,8 @@ LOGGER_TAG("com.amazonaws.kinesis.video.gstkvs"); using namespace com::amazonaws::kinesis::video; -STATUS KvsSinkClientCallbackProvider::storageOverflowPressure(UINT64 custom_handle, UINT64 remaining_bytes) { +STATUS KvsSinkClientCallbackProvider::storageOverflowPressure(UINT64 custom_handle, UINT64 remaining_bytes) +{ UNUSED_PARAM(custom_handle); LOG_WARN("Reported storage overflow. Bytes remaining " << remaining_bytes); return STATUS_SUCCESS; diff --git a/src/gstreamer/KvsSinkClientCallbackProvider.h b/src/gstreamer/KvsSinkClientCallbackProvider.h index 157dcf26..a1c4fba9 100644 --- a/src/gstreamer/KvsSinkClientCallbackProvider.h +++ b/src/gstreamer/KvsSinkClientCallbackProvider.h @@ -4,26 +4,29 @@ #include #include -namespace com { namespace amazonaws { namespace kinesis { namespace video { - - class KvsSinkClientCallbackProvider: public ClientCallbackProvider { - public: - - StorageOverflowPressureFunc getStorageOverflowPressureCallback() override { - return storageOverflowPressure; - } - - UINT64 getCallbackCustomData() override { - return reinterpret_cast (this); - } - - private: - static STATUS storageOverflowPressure(UINT64 custom_handle, UINT64 remaining_bytes); - }; -} -} -} -} - +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { + +class KvsSinkClientCallbackProvider : public ClientCallbackProvider { + public: + StorageOverflowPressureFunc getStorageOverflowPressureCallback() override + { + return storageOverflowPressure; + } + + UINT64 getCallbackCustomData() override + { + return reinterpret_cast(this); + } + + private: + static STATUS storageOverflowPressure(UINT64 custom_handle, UINT64 remaining_bytes); +}; +} // namespace video +} // namespace kinesis +} // namespace amazonaws +} // namespace com #endif //__KVS_SINK_CLIENT_CALLBACK_PROVIDER_H__ diff --git a/src/gstreamer/KvsSinkDeviceInfoProvider.cpp b/src/gstreamer/KvsSinkDeviceInfoProvider.cpp index 471915ae..a9c37c6f 100644 --- a/src/gstreamer/KvsSinkDeviceInfoProvider.cpp +++ b/src/gstreamer/KvsSinkDeviceInfoProvider.cpp @@ -2,7 +2,8 @@ using namespace com::amazonaws::kinesis::video; -KvsSinkDeviceInfoProvider::device_info_t KvsSinkDeviceInfoProvider::getDeviceInfo(){ +KvsSinkDeviceInfoProvider::device_info_t KvsSinkDeviceInfoProvider::getDeviceInfo() +{ auto device_info = DefaultDeviceInfoProvider::getDeviceInfo(); // Set the storage size to user specified size in MB device_info.storageInfo.storageSize = static_cast(storage_size_mb_) * 1024 * 1024; diff --git a/src/gstreamer/KvsSinkDeviceInfoProvider.h b/src/gstreamer/KvsSinkDeviceInfoProvider.h index d6d949c6..5333b973 100644 --- a/src/gstreamer/KvsSinkDeviceInfoProvider.h +++ b/src/gstreamer/KvsSinkDeviceInfoProvider.h @@ -3,26 +3,29 @@ #include -namespace com { namespace amazonaws { namespace kinesis { namespace video { - class KvsSinkDeviceInfoProvider: public DefaultDeviceInfoProvider { - uint64_t storage_size_mb_; - uint64_t stop_stream_timeout_sec_; - uint64_t service_call_connection_timeout_sec_; - uint64_t service_call_completion_timeout_sec_; - public: - KvsSinkDeviceInfoProvider(uint64_t storage_size_mb, - uint64_t stop_stream_timeout_sec, - uint64_t service_call_connection_timeout_sec, - uint64_t service_call_completion_timeout_sec): - storage_size_mb_(storage_size_mb), - stop_stream_timeout_sec_(stop_stream_timeout_sec), - service_call_connection_timeout_sec_(service_call_connection_timeout_sec), - service_call_completion_timeout_sec_(service_call_completion_timeout_sec) {} - device_info_t getDeviceInfo() override; - }; -} -} -} -} +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { +class KvsSinkDeviceInfoProvider : public DefaultDeviceInfoProvider { + uint64_t storage_size_mb_; + uint64_t stop_stream_timeout_sec_; + uint64_t service_call_connection_timeout_sec_; + uint64_t service_call_completion_timeout_sec_; + + public: + KvsSinkDeviceInfoProvider(uint64_t storage_size_mb, uint64_t stop_stream_timeout_sec, uint64_t service_call_connection_timeout_sec, + uint64_t service_call_completion_timeout_sec) + : storage_size_mb_(storage_size_mb), stop_stream_timeout_sec_(stop_stream_timeout_sec), + service_call_connection_timeout_sec_(service_call_connection_timeout_sec), + service_call_completion_timeout_sec_(service_call_completion_timeout_sec) + { + } + device_info_t getDeviceInfo() override; +}; +} // namespace video +} // namespace kinesis +} // namespace amazonaws +} // namespace com #endif //__KVS_SINK_DEVICE_INFO_PROVIDER_H__ diff --git a/src/gstreamer/KvsSinkStreamCallbackProvider.cpp b/src/gstreamer/KvsSinkStreamCallbackProvider.cpp index 3d1e5624..a57f1acb 100644 --- a/src/gstreamer/KvsSinkStreamCallbackProvider.cpp +++ b/src/gstreamer/KvsSinkStreamCallbackProvider.cpp @@ -5,34 +5,35 @@ LOGGER_TAG("com.amazonaws.kinesis.video.gstkvs"); using namespace com::amazonaws::kinesis::video; STATUS -KvsSinkStreamCallbackProvider::bufferDurationOverflowPressureHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 remainDuration) { +KvsSinkStreamCallbackProvider::bufferDurationOverflowPressureHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 remainDuration) +{ UNUSED_PARAM(custom_data); return STATUS_SUCCESS; } STATUS -KvsSinkStreamCallbackProvider::streamUnderflowReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle) { +KvsSinkStreamCallbackProvider::streamUnderflowReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle) +{ return STATUS_SUCCESS; } STATUS -KvsSinkStreamCallbackProvider::streamConnectionStaleHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 time_since_last_buffering_ack) { +KvsSinkStreamCallbackProvider::streamConnectionStaleHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 time_since_last_buffering_ack) +{ UNUSED_PARAM(custom_data); - LOG_DEBUG("Reported streamConnectionStale callback for stream handle " << stream_handle << ". Time since last buffering ack in 100ns: " << time_since_last_buffering_ack); + LOG_DEBUG("Reported streamConnectionStale callback for stream handle " + << stream_handle << ". Time since last buffering ack in 100ns: " << time_since_last_buffering_ack); return STATUS_SUCCESS; } STATUS -KvsSinkStreamCallbackProvider::streamErrorReportHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UPLOAD_HANDLE upload_handle, - UINT64 errored_timecode, - STATUS status_code) { +KvsSinkStreamCallbackProvider::streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, + UINT64 errored_timecode, STATUS status_code) +{ auto customDataObj = reinterpret_cast(custom_data); - LOG_ERROR("Reported stream error. Errored timecode: " << errored_timecode << " Status: 0x" << std::hex << status_code << " for " << customDataObj->kvs_sink->stream_name); - if(customDataObj != NULL && (!IS_RECOVERABLE_ERROR(status_code))) { + LOG_ERROR("Reported stream error. Errored timecode: " << errored_timecode << " Status: 0x" << std::hex << status_code << " for " + << customDataObj->kvs_sink->stream_name); + if (customDataObj != NULL && (!IS_RECOVERABLE_ERROR(status_code))) { customDataObj->stream_status = status_code; g_signal_emit(G_OBJECT(customDataObj->kvs_sink), customDataObj->err_signal_id, 0, status_code); } @@ -41,39 +42,37 @@ KvsSinkStreamCallbackProvider::streamErrorReportHandler(UINT64 custom_data, } STATUS -KvsSinkStreamCallbackProvider::droppedFrameReportHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 dropped_frame_timecode) { +KvsSinkStreamCallbackProvider::droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 dropped_frame_timecode) +{ UNUSED_PARAM(custom_data); LOG_WARN("Reported droppedFrame callback for stream handle " << stream_handle << ". Dropped frame timecode in 100ns: " << dropped_frame_timecode); return STATUS_SUCCESS; // continue streaming } STATUS -KvsSinkStreamCallbackProvider::droppedFragmentReportHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 dropped_fragment_timecode) { +KvsSinkStreamCallbackProvider::droppedFragmentReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 dropped_fragment_timecode) +{ UNUSED_PARAM(custom_data); - LOG_WARN("Reported droppedFrame callback for stream handle " << stream_handle << ". Dropped fragment timecode in 100ns: " << dropped_fragment_timecode); + LOG_WARN("Reported droppedFrame callback for stream handle " << stream_handle + << ". Dropped fragment timecode in 100ns: " << dropped_fragment_timecode); return STATUS_SUCCESS; // continue streaming } STATUS -KvsSinkStreamCallbackProvider::streamLatencyPressureHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UINT64 current_buffer_duration) { +KvsSinkStreamCallbackProvider::streamLatencyPressureHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 current_buffer_duration) +{ UNUSED_PARAM(custom_data); - LOG_WARN("Reported streamLatencyPressure callback for stream handle " << stream_handle << ". Current buffer duration in 100ns: " << current_buffer_duration); + LOG_WARN("Reported streamLatencyPressure callback for stream handle " << stream_handle + << ". Current buffer duration in 100ns: " << current_buffer_duration); return STATUS_SUCCESS; } STATUS -KvsSinkStreamCallbackProvider::streamClosedHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UPLOAD_HANDLE upload_handle) { +KvsSinkStreamCallbackProvider::streamClosedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle) +{ std::string streamName = ""; auto customDataObj = reinterpret_cast(custom_data); - if(customDataObj != NULL && customDataObj->kvs_sink != NULL) { + if (customDataObj != NULL && customDataObj->kvs_sink != NULL) { streamName = customDataObj->kvs_sink->stream_name; } LOG_DEBUG("[" << streamName << "]Reported streamClosed callback"); @@ -81,18 +80,16 @@ KvsSinkStreamCallbackProvider::streamClosedHandler(UINT64 custom_data, } STATUS -KvsSinkStreamCallbackProvider::fragmentAckReceivedHandler(UINT64 custom_data, - STREAM_HANDLE stream_handle, - UPLOAD_HANDLE upload_handle, - PFragmentAck pFragmentAck) { +KvsSinkStreamCallbackProvider::fragmentAckReceivedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, + PFragmentAck pFragmentAck) +{ auto customDataObj = reinterpret_cast(custom_data); - if(customDataObj != NULL && customDataObj->kvs_sink != NULL && pFragmentAck != NULL) { - LOG_TRACE("[" << customDataObj->kvs_sink->stream_name << "] Ack timestamp for " << pFragmentAck->ackType << " is " << pFragmentAck->timestamp); + if (customDataObj != NULL && customDataObj->kvs_sink != NULL && pFragmentAck != NULL) { + LOG_TRACE("[" << customDataObj->kvs_sink->stream_name << "] Ack timestamp for " << pFragmentAck->ackType << " is " + << pFragmentAck->timestamp); g_signal_emit(G_OBJECT(customDataObj->kvs_sink), customDataObj->ack_signal_id, 0, pFragmentAck); } return STATUS_SUCCESS; } - - diff --git a/src/gstreamer/KvsSinkStreamCallbackProvider.h b/src/gstreamer/KvsSinkStreamCallbackProvider.h index 282e1e60..6b67e7ea 100644 --- a/src/gstreamer/KvsSinkStreamCallbackProvider.h +++ b/src/gstreamer/KvsSinkStreamCallbackProvider.h @@ -4,90 +4,91 @@ #include "gstkvssink.h" #include "Logger.h" -namespace com { namespace amazonaws { namespace kinesis { namespace video { - class KvsSinkStreamCallbackProvider : public StreamCallbackProvider { - std::shared_ptr data; - public: - KvsSinkStreamCallbackProvider(std::shared_ptr data) : data(data) {} - - UINT64 getCallbackCustomData() override { - return reinterpret_cast (data.get()); - } - - StreamUnderflowReportFunc getStreamUnderflowReportCallback() override { - return streamUnderflowReportHandler; - } - - BufferDurationOverflowPressureFunc getBufferDurationOverFlowCallback() override { - return bufferDurationOverflowPressureHandler; - } - - StreamConnectionStaleFunc getStreamConnectionStaleCallback() override { - return streamConnectionStaleHandler; - }; - - StreamErrorReportFunc getStreamErrorReportCallback() override { - return streamErrorReportHandler; - }; +namespace com { +namespace amazonaws { +namespace kinesis { +namespace video { +class KvsSinkStreamCallbackProvider : public StreamCallbackProvider { + std::shared_ptr data; + + public: + KvsSinkStreamCallbackProvider(std::shared_ptr data) : data(data) + { + } + + UINT64 getCallbackCustomData() override + { + return reinterpret_cast(data.get()); + } + + StreamUnderflowReportFunc getStreamUnderflowReportCallback() override + { + return streamUnderflowReportHandler; + } + + BufferDurationOverflowPressureFunc getBufferDurationOverFlowCallback() override + { + return bufferDurationOverflowPressureHandler; + } + + StreamConnectionStaleFunc getStreamConnectionStaleCallback() override + { + return streamConnectionStaleHandler; + }; - DroppedFrameReportFunc getDroppedFrameReportCallback() override { - return droppedFrameReportHandler; - }; + StreamErrorReportFunc getStreamErrorReportCallback() override + { + return streamErrorReportHandler; + }; - StreamLatencyPressureFunc getStreamLatencyPressureCallback() override { - return streamLatencyPressureHandler; - } + DroppedFrameReportFunc getDroppedFrameReportCallback() override + { + return droppedFrameReportHandler; + }; - DroppedFragmentReportFunc getDroppedFragmentReportCallback() override { - return droppedFragmentReportHandler; - } + StreamLatencyPressureFunc getStreamLatencyPressureCallback() override + { + return streamLatencyPressureHandler; + } - StreamClosedFunc getStreamClosedCallback() override { - return streamClosedHandler; - } + DroppedFragmentReportFunc getDroppedFragmentReportCallback() override + { + return droppedFragmentReportHandler; + } - FragmentAckReceivedFunc getFragmentAckReceivedCallback() override{ - return fragmentAckReceivedHandler; - } + StreamClosedFunc getStreamClosedCallback() override + { + return streamClosedHandler; + } - private: - static STATUS - streamUnderflowReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle); + FragmentAckReceivedFunc getFragmentAckReceivedCallback() override + { + return fragmentAckReceivedHandler; + } - static STATUS - bufferDurationOverflowPressureHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 remainDuration); + private: + static STATUS streamUnderflowReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle); - static STATUS - streamLatencyPressureHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UINT64 current_buffer_duration); + static STATUS bufferDurationOverflowPressureHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 remainDuration); - static STATUS - streamConnectionStaleHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UINT64 last_buffering_ack); + static STATUS streamLatencyPressureHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 current_buffer_duration); - static STATUS - droppedFragmentReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UINT64 fragment_timecode); + static STATUS streamConnectionStaleHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 last_buffering_ack); - static STATUS - streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UPLOAD_HANDLE upload_handle, UINT64 errored_timecode, - STATUS status_code); + static STATUS droppedFragmentReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 fragment_timecode); - static STATUS - droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, - UINT64 dropped_frame_timecode); + static STATUS streamErrorReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, UINT64 errored_timecode, + STATUS status_code); - static STATUS - streamClosedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle); + static STATUS droppedFrameReportHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UINT64 dropped_frame_timecode); - static STATUS - fragmentAckReceivedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, PFragmentAck pFragmentAck); - }; -} -} -} -} + static STATUS streamClosedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle); + static STATUS fragmentAckReceivedHandler(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle, PFragmentAck pFragmentAck); +}; +} // namespace video +} // namespace kinesis +} // namespace amazonaws +} // namespace com #endif //__KVS_SINK_STREAM_CALLBACK_PROVIDER_H__ diff --git a/src/gstreamer/Util/KvsSinkUtil.cpp b/src/gstreamer/Util/KvsSinkUtil.cpp index ed99a05c..230c9e20 100644 --- a/src/gstreamer/Util/KvsSinkUtil.cpp +++ b/src/gstreamer/Util/KvsSinkUtil.cpp @@ -4,23 +4,17 @@ LOGGER_TAG("com.amazonaws.kinesis.video.gstkvs"); -static const std::set iot_param_set = {IOT_GET_CREDENTIAL_ENDPOINT, - CERTIFICATE_PATH, - PRIVATE_KEY_PATH, - CA_CERT_PATH, - ROLE_ALIASES, - IOT_THING_NAME, - IOT_CONNECTION_TIMEOUT, - IOT_COMPLETION_TIMEOUT}; +static const std::set iot_param_set = { + IOT_GET_CREDENTIAL_ENDPOINT, CERTIFICATE_PATH, PRIVATE_KEY_PATH, CA_CERT_PATH, ROLE_ALIASES, IOT_THING_NAME, + IOT_CONNECTION_TIMEOUT, IOT_COMPLETION_TIMEOUT}; static const time_t time_point = std::time(NULL); -static const long timezone_offset = - static_cast (std::mktime(std::gmtime(&time_point)) - std::mktime(std::localtime(&time_point))); +static const long timezone_offset = static_cast(std::mktime(std::gmtime(&time_point)) - std::mktime(std::localtime(&time_point))); - -gboolean setParams(GQuark field_id, const GValue *value, gpointer g_ptr_user_map) { - std::map *target_map = reinterpret_cast *>(g_ptr_user_map); - std::string field_str = std::string(g_quark_to_string (field_id)); +gboolean setParams(GQuark field_id, const GValue* value, gpointer g_ptr_user_map) +{ + std::map* target_map = reinterpret_cast*>(g_ptr_user_map); + std::string field_str = std::string(g_quark_to_string(field_id)); std::string value_str; gboolean ret = TRUE; @@ -38,7 +32,7 @@ gboolean setParams(GQuark field_id, const GValue *value, gpointer g_ptr_user_map goto CleanUp; } - target_map->insert(std::pair(field_str, value_str)); + target_map->insert(std::pair(field_str, value_str)); CleanUp: return ret; @@ -46,16 +40,18 @@ gboolean setParams(GQuark field_id, const GValue *value, gpointer g_ptr_user_map namespace kvs_sink_util { -gboolean gstructToMap(GstStructure *g_struct, std::map *user_map) { +gboolean gstructToMap(GstStructure* g_struct, std::map* user_map) +{ std::map temp; - gboolean ret = gst_structure_foreach (g_struct, setParams, user_map); + gboolean ret = gst_structure_foreach(g_struct, setParams, user_map); if (ret) { // if conversion failed, user_map will be unchanged user_map->insert(temp.begin(), temp.end()); } return ret; } -gboolean parseIotCredentialGstructure(GstStructure *g_struct, std::map &iot_cert_params) { +gboolean parseIotCredentialGstructure(GstStructure* g_struct, std::map& iot_cert_params) +{ gboolean ret; std::set params_key_set; @@ -65,33 +61,30 @@ gboolean parseIotCredentialGstructure(GstStructure *g_struct, std::map::iterator it = iot_cert_params.begin(); it != iot_cert_params.end(); - ++it) { + for (std::map::iterator it = iot_cert_params.begin(); it != iot_cert_params.end(); ++it) { params_key_set.insert(it->first); } - if(params_key_set.count(IOT_THING_NAME) == 0) { + if (params_key_set.count(IOT_THING_NAME) == 0) { params_key_set.insert(IOT_THING_NAME); } - if(params_key_set.count(IOT_CONNECTION_TIMEOUT) == 0) { + if (params_key_set.count(IOT_CONNECTION_TIMEOUT) == 0) { params_key_set.insert(IOT_CONNECTION_TIMEOUT); } - if(params_key_set.count(IOT_COMPLETION_TIMEOUT) == 0) { + if (params_key_set.count(IOT_COMPLETION_TIMEOUT) == 0) { params_key_set.insert(IOT_COMPLETION_TIMEOUT); } - if (params_key_set != iot_param_set) { std::ostringstream ostream; std::copy(iot_param_set.begin(), iot_param_set.end(), std::ostream_iterator(ostream, ",")); - LOG_ERROR("Missing parameters for iot certificate credential. The following keys are expected" - << ostream.str()); + LOG_ERROR("Missing parameters for iot certificate credential. The following keys are expected" << ostream.str()); ret = FALSE; } CleanUp: return ret; } -} +} // namespace kvs_sink_util diff --git a/src/gstreamer/Util/KvsSinkUtil.h b/src/gstreamer/Util/KvsSinkUtil.h index a4476490..8cd51dcc 100644 --- a/src/gstreamer/Util/KvsSinkUtil.h +++ b/src/gstreamer/Util/KvsSinkUtil.h @@ -9,21 +9,20 @@ #include #define IOT_GET_CREDENTIAL_ENDPOINT "endpoint" -#define CERTIFICATE_PATH "cert-path" -#define PRIVATE_KEY_PATH "key-path" -#define CA_CERT_PATH "ca-path" -#define ROLE_ALIASES "role-aliases" -#define IOT_THING_NAME "iot-thing-name" -#define IOT_CONNECTION_TIMEOUT "connection-timeout" -#define IOT_COMPLETION_TIMEOUT "completion-timeout" +#define CERTIFICATE_PATH "cert-path" +#define PRIVATE_KEY_PATH "key-path" +#define CA_CERT_PATH "ca-path" +#define ROLE_ALIASES "role-aliases" +#define IOT_THING_NAME "iot-thing-name" +#define IOT_CONNECTION_TIMEOUT "connection-timeout" +#define IOT_COMPLETION_TIMEOUT "completion-timeout" -namespace kvs_sink_util{ +namespace kvs_sink_util { - gboolean gstructToMap(GstStructure *g_struct, std::map *user_map); +gboolean gstructToMap(GstStructure* g_struct, std::map* user_map); - gboolean parseIotCredentialGstructure(GstStructure *g_struct, - std::map &iot_cert_params); +gboolean parseIotCredentialGstructure(GstStructure* g_struct, std::map& iot_cert_params); -} +} // namespace kvs_sink_util #endif //__KVS_SINK_UTIL_H__ \ No newline at end of file diff --git a/src/gstreamer/gstkvssink.cpp b/src/gstreamer/gstkvssink.cpp index 7f74748f..814cf532 100644 --- a/src/gstreamer/gstkvssink.cpp +++ b/src/gstreamer/gstkvssink.cpp @@ -4,29 +4,29 @@ // // Portions Copyright /* -* GStreamer -* Copyright (C) 2005 Thomas Vander Stichele -* Copyright (C) 2005 Ronald S. Bultje -* Copyright (C) 2017 <> -* -* Permission is hereby granted, free of charge, to any person obtaining a -* copy of this software and associated documentation files (the "Software"), -* to deal in the Software without restriction, including without limitation -* the rights to use, copy, modify, merge, publish, distribute, sublicense, -* and/or sell copies of the Software, and to permit persons to whom the -* Software is furnished to do so, subject to the following conditions: -* -* The above copyright notice and this permission notice shall be included in -* all copies or substantial portions of the Software. -* -* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -* DEALINGS IN THE SOFTWARE. -*/ + * GStreamer + * Copyright (C) 2005 Thomas Vander Stichele + * Copyright (C) 2005 Ronald S. Bultje + * Copyright (C) 2017 <> + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ /** * SECTION:element-kvs @@ -49,7 +49,7 @@ */ #ifdef HAVE_CONFIG_H -# include +#include #endif #include "gstkvssink.h" @@ -68,7 +68,7 @@ using namespace std; using namespace std::chrono; using namespace log4cplus; -GST_DEBUG_CATEGORY_STATIC (gst_kvs_sink_debug); +GST_DEBUG_CATEGORY_STATIC(gst_kvs_sink_debug); #define GST_CAT_DEFAULT gst_kvs_sink_debug // default starting delay between reties when trying to create and start kvs stream. @@ -76,72 +76,72 @@ GST_DEBUG_CATEGORY_STATIC (gst_kvs_sink_debug); // default number of retries when trying to create and start kvs stream. #define DEFAULT_TOTAL_RETRY 4 -#define DEFAULT_STREAM_NAME "DEFAULT_STREAM" -#define DEFAULT_RETENTION_PERIOD_HOURS 2 -#define DEFAULT_KMS_KEY_ID "" -#define DEFAULT_STREAMING_TYPE STREAMING_TYPE_REALTIME -#define DEFAULT_MAX_LATENCY_SECONDS 60 +#define DEFAULT_STREAM_NAME "DEFAULT_STREAM" +#define DEFAULT_RETENTION_PERIOD_HOURS 2 +#define DEFAULT_KMS_KEY_ID "" +#define DEFAULT_STREAMING_TYPE STREAMING_TYPE_REALTIME +#define DEFAULT_MAX_LATENCY_SECONDS 60 #define DEFAULT_FRAGMENT_DURATION_MILLISECONDS 2000 -#define DEFAULT_TIMECODE_SCALE_MILLISECONDS 1 -#define DEFAULT_KEY_FRAME_FRAGMENTATION TRUE -#define DEFAULT_FRAME_TIMECODES TRUE -#define DEFAULT_ABSOLUTE_FRAGMENT_TIMES TRUE -#define DEFAULT_FRAGMENT_ACKS TRUE -#define DEFAULT_RESTART_ON_ERROR TRUE -#define DEFAULT_ALLOW_CREATE_STREAM TRUE -#define DEFAULT_RECALCULATE_METRICS TRUE -#define DEFAULT_DISABLE_BUFFER_CLIPPING FALSE -#define DEFAULT_USE_ORIGINAL_PTS FALSE -#define DEFAULT_ENABLE_METRICS FALSE -#define DEFAULT_STREAM_FRAMERATE 25 -#define DEFAULT_STREAM_FRAMERATE_HIGH_DENSITY 100 -#define DEFAULT_AVG_BANDWIDTH_BPS (4 * 1024 * 1024) -#define DEFAULT_BUFFER_DURATION_SECONDS 120 -#define DEFAULT_REPLAY_DURATION_SECONDS 40 -#define DEFAULT_CONNECTION_STALENESS_SECONDS 60 -#define DEFAULT_CODEC_ID_H264 "V_MPEG4/ISO/AVC" -#define DEFAULT_CODEC_ID_H265 "V_MPEGH/ISO/HEVC" -#define DEFAULT_TRACKNAME "kinesis_video" -#define DEFAULT_ACCESS_KEY "access_key" -#define DEFAULT_SECRET_KEY "secret_key" -#define DEFAULT_SESSION_TOKEN "session_token" -#define DEFAULT_REGION "us-west-2" -#define DEFAULT_ROTATION_PERIOD_SECONDS 3600 -#define DEFAULT_LOG_FILE_PATH "../kvs_log_configuration" -#define DEFAULT_STORAGE_SIZE_MB 128 -#define DEFAULT_STOP_STREAM_TIMEOUT_SEC 120 +#define DEFAULT_TIMECODE_SCALE_MILLISECONDS 1 +#define DEFAULT_KEY_FRAME_FRAGMENTATION TRUE +#define DEFAULT_FRAME_TIMECODES TRUE +#define DEFAULT_ABSOLUTE_FRAGMENT_TIMES TRUE +#define DEFAULT_FRAGMENT_ACKS TRUE +#define DEFAULT_RESTART_ON_ERROR TRUE +#define DEFAULT_ALLOW_CREATE_STREAM TRUE +#define DEFAULT_RECALCULATE_METRICS TRUE +#define DEFAULT_DISABLE_BUFFER_CLIPPING FALSE +#define DEFAULT_USE_ORIGINAL_PTS FALSE +#define DEFAULT_ENABLE_METRICS FALSE +#define DEFAULT_STREAM_FRAMERATE 25 +#define DEFAULT_STREAM_FRAMERATE_HIGH_DENSITY 100 +#define DEFAULT_AVG_BANDWIDTH_BPS (4 * 1024 * 1024) +#define DEFAULT_BUFFER_DURATION_SECONDS 120 +#define DEFAULT_REPLAY_DURATION_SECONDS 40 +#define DEFAULT_CONNECTION_STALENESS_SECONDS 60 +#define DEFAULT_CODEC_ID_H264 "V_MPEG4/ISO/AVC" +#define DEFAULT_CODEC_ID_H265 "V_MPEGH/ISO/HEVC" +#define DEFAULT_TRACKNAME "kinesis_video" +#define DEFAULT_ACCESS_KEY "access_key" +#define DEFAULT_SECRET_KEY "secret_key" +#define DEFAULT_SESSION_TOKEN "session_token" +#define DEFAULT_REGION "us-west-2" +#define DEFAULT_ROTATION_PERIOD_SECONDS 3600 +#define DEFAULT_LOG_FILE_PATH "../kvs_log_configuration" +#define DEFAULT_STORAGE_SIZE_MB 128 +#define DEFAULT_STOP_STREAM_TIMEOUT_SEC 120 #define DEFAULT_SERVICE_CONNECTION_TIMEOUT_SEC 5 #define DEFAULT_SERVICE_COMPLETION_TIMEOUT_SEC 10 -#define DEFAULT_IOT_CONNECTION_TIMEOUT_SEC 3 -#define DEFAULT_IOT_COMPLETION_TIMEOUT_SEC 5 -#define DEFAULT_CREDENTIAL_FILE_PATH ".kvs/credential" -#define DEFAULT_FRAME_DURATION_MS 2 +#define DEFAULT_IOT_CONNECTION_TIMEOUT_SEC 3 +#define DEFAULT_IOT_COMPLETION_TIMEOUT_SEC 5 +#define DEFAULT_CREDENTIAL_FILE_PATH ".kvs/credential" +#define DEFAULT_FRAME_DURATION_MS 2 #define KVS_ADD_METADATA_G_STRUCT_NAME "kvs-add-metadata" -#define KVS_ADD_METADATA_NAME "name" -#define KVS_ADD_METADATA_VALUE "value" -#define KVS_ADD_METADATA_PERSISTENT "persist" -#define KVS_CLIENT_USER_AGENT_NAME "AWS-SDK-KVS-CPP-CLIENT" - -#define DEFAULT_AUDIO_TRACK_NAME "audio" -#define DEFAULT_AUDIO_CODEC_ID_AAC "A_AAC" -#define DEFAULT_AUDIO_CODEC_ID_PCM "A_MS/ACM" -#define KVS_SINK_DEFAULT_TRACKID 1 +#define KVS_ADD_METADATA_NAME "name" +#define KVS_ADD_METADATA_VALUE "value" +#define KVS_ADD_METADATA_PERSISTENT "persist" +#define KVS_CLIENT_USER_AGENT_NAME "AWS-SDK-KVS-CPP-CLIENT" + +#define DEFAULT_AUDIO_TRACK_NAME "audio" +#define DEFAULT_AUDIO_CODEC_ID_AAC "A_AAC" +#define DEFAULT_AUDIO_CODEC_ID_PCM "A_MS/ACM" +#define KVS_SINK_DEFAULT_TRACKID 1 #define KVS_SINK_DEFAULT_AUDIO_TRACKID 2 -#define GSTREAMER_MEDIA_TYPE_H265 "video/x-h265" -#define GSTREAMER_MEDIA_TYPE_H264 "video/x-h264" -#define GSTREAMER_MEDIA_TYPE_AAC "audio/mpeg" -#define GSTREAMER_MEDIA_TYPE_MULAW "audio/x-mulaw" -#define GSTREAMER_MEDIA_TYPE_ALAW "audio/x-alaw" +#define GSTREAMER_MEDIA_TYPE_H265 "video/x-h265" +#define GSTREAMER_MEDIA_TYPE_H264 "video/x-h264" +#define GSTREAMER_MEDIA_TYPE_AAC "audio/mpeg" +#define GSTREAMER_MEDIA_TYPE_MULAW "audio/x-mulaw" +#define GSTREAMER_MEDIA_TYPE_ALAW "audio/x-alaw" -#define MAX_GSTREAMER_MEDIA_TYPE_LEN 16 +#define MAX_GSTREAMER_MEDIA_TYPE_LEN 16 namespace KvsSinkSignals { - guint err_signal_id; - guint ack_signal_id; - guint metric_signal_id; -}; +guint err_signal_id; +guint ack_signal_id; +guint metric_signal_id; +}; // namespace KvsSinkSignals enum { PROP_0, @@ -187,94 +187,74 @@ enum { }; #define GST_TYPE_KVS_SINK_STREAMING_TYPE (gst_kvs_sink_streaming_type_get_type()) -static GType -gst_kvs_sink_streaming_type_get_type (void) +static GType gst_kvs_sink_streaming_type_get_type(void) { static GType kvssink_streaming_type_type = 0; static const GEnumValue kvssink_streaming_type[] = { - {STREAMING_TYPE_REALTIME, "streaming type realtime", "realtime"}, - {STREAMING_TYPE_NEAR_REALTIME, "streaming type near realtime", "near-realtime"}, - {STREAMING_TYPE_OFFLINE, "streaming type offline", "offline"}, - {0, NULL, NULL}, + {STREAMING_TYPE_REALTIME, "streaming type realtime", "realtime"}, + {STREAMING_TYPE_NEAR_REALTIME, "streaming type near realtime", "near-realtime"}, + {STREAMING_TYPE_OFFLINE, "streaming type offline", "offline"}, + {0, NULL, NULL}, }; if (!kvssink_streaming_type_type) { - kvssink_streaming_type_type = - g_enum_register_static ("GstKvsSinkStreamingType", kvssink_streaming_type); + kvssink_streaming_type_type = g_enum_register_static("GstKvsSinkStreamingType", kvssink_streaming_type); } return kvssink_streaming_type_type; } -static GstStaticPadTemplate audiosink_templ = - GST_STATIC_PAD_TEMPLATE ("audio_%u", - GST_PAD_SINK, - GST_PAD_REQUEST, - GST_STATIC_CAPS ( - "audio/mpeg, mpegversion = (int) { 2, 4 }, stream-format = (string) raw, channels = (int) [ 1, MAX ], rate = (int) [ 1, MAX ] ; " \ - "audio/x-alaw, channels = (int) { 1, 2 }, rate = (int) [ 8000, 192000 ] ; " \ - "audio/x-mulaw, channels = (int) { 1, 2 }, rate = (int) [ 8000, 192000 ] ; " - ) - ); - -static GstStaticPadTemplate videosink_templ = - GST_STATIC_PAD_TEMPLATE ("video_%u", - GST_PAD_SINK, - GST_PAD_REQUEST, - GST_STATIC_CAPS ( - "video/x-h264, stream-format = (string) avc, alignment = (string) au, width = (int) [ 16, MAX ], height = (int) [ 16, MAX ] ; " \ - "video/x-h265, alignment = (string) au, width = (int) [ 16, MAX ], height = (int) [ 16, MAX ] ;" - ) - ); - -#define _do_init GST_DEBUG_CATEGORY_INIT (gst_kvs_sink_debug, "kvssink", 0, "KVS sink plug-in"); +static GstStaticPadTemplate audiosink_templ = GST_STATIC_PAD_TEMPLATE( + "audio_%u", GST_PAD_SINK, GST_PAD_REQUEST, + GST_STATIC_CAPS("audio/mpeg, mpegversion = (int) { 2, 4 }, stream-format = (string) raw, channels = (int) [ 1, MAX ], rate = (int) [ 1, MAX ] ; " + "audio/x-alaw, channels = (int) { 1, 2 }, rate = (int) [ 8000, 192000 ] ; " + "audio/x-mulaw, channels = (int) { 1, 2 }, rate = (int) [ 8000, 192000 ] ; ")); -#define gst_kvs_sink_parent_class parent_class +static GstStaticPadTemplate videosink_templ = GST_STATIC_PAD_TEMPLATE( + "video_%u", GST_PAD_SINK, GST_PAD_REQUEST, + GST_STATIC_CAPS("video/x-h264, stream-format = (string) avc, alignment = (string) au, width = (int) [ 16, MAX ], height = (int) [ 16, MAX ] ; " + "video/x-h265, alignment = (string) au, width = (int) [ 16, MAX ], height = (int) [ 16, MAX ] ;")); + +#define _do_init GST_DEBUG_CATEGORY_INIT(gst_kvs_sink_debug, "kvssink", 0, "KVS sink plug-in"); -G_DEFINE_TYPE_WITH_CODE (GstKvsSink, gst_kvs_sink, GST_TYPE_ELEMENT, _do_init); +#define gst_kvs_sink_parent_class parent_class +G_DEFINE_TYPE_WITH_CODE(GstKvsSink, gst_kvs_sink, GST_TYPE_ELEMENT, _do_init); -static void gst_kvs_sink_set_property(GObject *object, guint prop_id, - const GValue *value, GParamSpec *pspec); +static void gst_kvs_sink_set_property(GObject* object, guint prop_id, const GValue* value, GParamSpec* pspec); -static void gst_kvs_sink_get_property(GObject *object, guint prop_id, - GValue *value, GParamSpec *pspec); +static void gst_kvs_sink_get_property(GObject* object, guint prop_id, GValue* value, GParamSpec* pspec); -static void gst_kvs_sink_finalize(GObject *obj); +static void gst_kvs_sink_finalize(GObject* obj); -static GstStateChangeReturn gst_kvs_sink_change_state(GstElement *element, - GstStateChange transition); +static GstStateChangeReturn gst_kvs_sink_change_state(GstElement* element, GstStateChange transition); /* collectpad callback */ -static GstFlowReturn gst_kvs_sink_handle_buffer (GstCollectPads * pads, - GstCollectData * data, GstBuffer * buf, gpointer user_data); -static gboolean gst_kvs_sink_handle_sink_event (GstCollectPads * pads, - GstCollectData * data, GstEvent * event, gpointer user_data); +static GstFlowReturn gst_kvs_sink_handle_buffer(GstCollectPads* pads, GstCollectData* data, GstBuffer* buf, gpointer user_data); +static gboolean gst_kvs_sink_handle_sink_event(GstCollectPads* pads, GstCollectData* data, GstEvent* event, gpointer user_data); /* Request pad callback */ -static GstPad* gst_kvs_sink_request_new_pad (GstElement *element, GstPadTemplate *templ, - const gchar* name, const GstCaps *caps); -static void gst_kvs_sink_release_pad (GstElement *element, GstPad *pad); +static GstPad* gst_kvs_sink_request_new_pad(GstElement* element, GstPadTemplate* templ, const gchar* name, const GstCaps* caps); +static void gst_kvs_sink_release_pad(GstElement* element, GstPad* pad); -void closed(UINT64 custom_data, STREAM_HANDLE stream_handle, UPLOAD_HANDLE upload_handle) { - LOG_INFO("Closed connection with stream handle "<data; - unique_ptr device_info_provider(new KvsSinkDeviceInfoProvider(kvssink->storage_size, - kvssink->stop_stream_timeout, - kvssink->service_connection_timeout, - kvssink->service_completion_timeout)); + unique_ptr device_info_provider(new KvsSinkDeviceInfoProvider( + kvssink->storage_size, kvssink->stop_stream_timeout, kvssink->service_connection_timeout, kvssink->service_completion_timeout)); unique_ptr client_callback_provider(new KvsSinkClientCallbackProvider()); unique_ptr stream_callback_provider(new KvsSinkStreamCallbackProvider(data)); kvssink->data->kvs_sink = kvssink; - char const *access_key; - char const *secret_key; - char const *session_token; - char const *default_region; - char const *control_plane_uri; + char const* access_key; + char const* secret_key; + char const* session_token; + char const* default_region; + char const* control_plane_uri; string access_key_str; string secret_key_str; string session_token_str; @@ -284,16 +264,15 @@ void kinesis_video_producer_init(GstKvsSink *kvssink) // This needs to happen after we've read in ALL of the properties if (!kvssink->disable_buffer_clipping) { - gst_collect_pads_set_clip_function(kvssink->collect, - GST_DEBUG_FUNCPTR(gst_collect_pads_clip_running_time), kvssink); + gst_collect_pads_set_clip_function(kvssink->collect, GST_DEBUG_FUNCPTR(gst_collect_pads_clip_running_time), kvssink); } kvssink->data->kvs_sink = kvssink; if (0 == strcmp(kvssink->access_key, DEFAULT_ACCESS_KEY)) { // if no static credential is available in plugin property. - if (nullptr == (access_key = getenv(ACCESS_KEY_ENV_VAR)) - || nullptr == (secret_key = getenv(SECRET_KEY_ENV_VAR))) { // if no static credential is available in env var. - credential_is_static = false; // No static credential available. + if (nullptr == (access_key = getenv(ACCESS_KEY_ENV_VAR)) || + nullptr == (secret_key = getenv(SECRET_KEY_ENV_VAR))) { // if no static credential is available in env var. + credential_is_static = false; // No static credential available. access_key_str = ""; secret_key_str = ""; } else { @@ -331,60 +310,50 @@ void kinesis_video_producer_init(GstKvsSink *kvssink) std::map iot_cert_params; uint64_t iot_connection_timeout = DEFAULT_IOT_CONNECTION_TIMEOUT_SEC * HUNDREDS_OF_NANOS_IN_A_SECOND; uint64_t iot_completion_timeout = DEFAULT_IOT_COMPLETION_TIMEOUT_SEC * HUNDREDS_OF_NANOS_IN_A_SECOND; - if (!kvs_sink_util::parseIotCredentialGstructure(kvssink->iot_certificate, iot_cert_params)){ + if (!kvs_sink_util::parseIotCredentialGstructure(kvssink->iot_certificate, iot_cert_params)) { LOG_AND_THROW("Failed to parse Iot credentials for " << kvssink->stream_name); } std::map::iterator it = iot_cert_params.find(IOT_THING_NAME); if (it == iot_cert_params.end()) { - iot_cert_params.insert( std::pair(IOT_THING_NAME, kvssink->stream_name) ); + iot_cert_params.insert(std::pair(IOT_THING_NAME, kvssink->stream_name)); } - if(!iot_cert_params[IOT_CONNECTION_TIMEOUT].empty()) { + if (!iot_cert_params[IOT_CONNECTION_TIMEOUT].empty()) { iot_connection_timeout = std::stoull(iot_cert_params[IOT_CONNECTION_TIMEOUT]) * HUNDREDS_OF_NANOS_IN_A_SECOND; } - if(!iot_cert_params[IOT_COMPLETION_TIMEOUT].empty()) { + if (!iot_cert_params[IOT_COMPLETION_TIMEOUT].empty()) { iot_completion_timeout = std::stoull(iot_cert_params[IOT_COMPLETION_TIMEOUT]) * HUNDREDS_OF_NANOS_IN_A_SECOND; } - credential_provider.reset(new IotCertCredentialProvider(iot_cert_params[IOT_GET_CREDENTIAL_ENDPOINT], - iot_cert_params[CERTIFICATE_PATH], - iot_cert_params[PRIVATE_KEY_PATH], - iot_cert_params[ROLE_ALIASES], - iot_cert_params[CA_CERT_PATH], - iot_cert_params[IOT_THING_NAME], - iot_connection_timeout, - iot_completion_timeout)); + credential_provider.reset(new IotCertCredentialProvider(iot_cert_params[IOT_GET_CREDENTIAL_ENDPOINT], iot_cert_params[CERTIFICATE_PATH], + iot_cert_params[PRIVATE_KEY_PATH], iot_cert_params[ROLE_ALIASES], + iot_cert_params[CA_CERT_PATH], iot_cert_params[IOT_THING_NAME], + iot_connection_timeout, iot_completion_timeout)); } else if (credential_is_static) { - kvssink->credentials_.reset(new Credentials(access_key_str, - secret_key_str, - session_token_str, - std::chrono::seconds(DEFAULT_ROTATION_PERIOD_SECONDS))); + kvssink->credentials_.reset( + new Credentials(access_key_str, secret_key_str, session_token_str, std::chrono::seconds(DEFAULT_ROTATION_PERIOD_SECONDS))); credential_provider.reset(new StaticCredentialProvider(*kvssink->credentials_)); } else { credential_provider.reset(new RotatingCredentialProvider(kvssink->credential_file_path)); } // Handle env for providing CP URL - if(nullptr != (control_plane_uri = getenv(CONTROL_PLANE_URI_ENV_VAR))) { + if (nullptr != (control_plane_uri = getenv(CONTROL_PLANE_URI_ENV_VAR))) { LOG_INFO("Getting URL from env for " << kvssink->stream_name); control_plane_uri_str = string(control_plane_uri); } LOG_INFO("User agent string: " << kvssink->user_agent); - data->kinesis_video_producer = KinesisVideoProducer::createSync(std::move(device_info_provider), - std::move(client_callback_provider), - std::move(stream_callback_provider), - std::move(credential_provider), - API_CALL_CACHE_TYPE_ALL, - region_str, - control_plane_uri_str, - kvssink->user_agent); + data->kinesis_video_producer = KinesisVideoProducer::createSync(std::move(device_info_provider), std::move(client_callback_provider), + std::move(stream_callback_provider), std::move(credential_provider), + API_CALL_CACHE_TYPE_ALL, region_str, control_plane_uri_str, kvssink->user_agent); } -void create_kinesis_video_stream(GstKvsSink *kvssink) { +void create_kinesis_video_stream(GstKvsSink* kvssink) +{ auto data = kvssink->data; - map *p_stream_tags = nullptr; + map* p_stream_tags = nullptr; map stream_tags; if (kvssink->stream_tags) { gboolean ret; @@ -423,35 +392,20 @@ void create_kinesis_video_stream(GstKvsSink *kvssink) { break; } - unique_ptr stream_definition(new StreamDefinition(kvssink->stream_name, - hours(kvssink->retention_period_hours), - p_stream_tags, - kvssink->kms_key_id, - kvssink->streaming_type, - kvssink->content_type, - duration_cast (seconds(kvssink->max_latency_seconds)), - milliseconds(kvssink->fragment_duration_miliseconds), - milliseconds(kvssink->timecode_scale_milliseconds), - kvssink->key_frame_fragmentation,// Construct a fragment at each key frame - kvssink->frame_timecodes,// Use provided frame timecode - kvssink->absolute_fragment_times,// Relative timecode - kvssink->fragment_acks,// Ack on fragment is enabled - kvssink->restart_on_error,// SDK will restart when error happens - kvssink->recalculate_metrics,// recalculate_metrics - kvssink->allow_create_stream, // allow stream creation if stream does not exist - 0, - kvssink->framerate, - kvssink->avg_bandwidth_bps, - seconds(kvssink->buffer_duration_seconds), - seconds(kvssink->replay_duration_seconds), - seconds(kvssink->connection_staleness_seconds), - kvssink->codec_id, - kvssink->track_name, - nullptr, - 0, - kvssink->track_info_type, - vector(), - KVS_SINK_DEFAULT_TRACKID)); + unique_ptr stream_definition( + new StreamDefinition(kvssink->stream_name, hours(kvssink->retention_period_hours), p_stream_tags, kvssink->kms_key_id, + kvssink->streaming_type, kvssink->content_type, duration_cast(seconds(kvssink->max_latency_seconds)), + milliseconds(kvssink->fragment_duration_miliseconds), milliseconds(kvssink->timecode_scale_milliseconds), + kvssink->key_frame_fragmentation, // Construct a fragment at each key frame + kvssink->frame_timecodes, // Use provided frame timecode + kvssink->absolute_fragment_times, // Relative timecode + kvssink->fragment_acks, // Ack on fragment is enabled + kvssink->restart_on_error, // SDK will restart when error happens + kvssink->recalculate_metrics, // recalculate_metrics + kvssink->allow_create_stream, // allow stream creation if stream does not exist + 0, kvssink->framerate, kvssink->avg_bandwidth_bps, seconds(kvssink->buffer_duration_seconds), + seconds(kvssink->replay_duration_seconds), seconds(kvssink->connection_staleness_seconds), kvssink->codec_id, + kvssink->track_name, nullptr, 0, kvssink->track_info_type, vector(), KVS_SINK_DEFAULT_TRACKID)); if (data->media_type == AUDIO_VIDEO) { stream_definition->addTrack(KVS_SINK_DEFAULT_AUDIO_TRACKID, DEFAULT_AUDIO_TRACK_NAME, kvssink->audio_codec_id, MKV_TRACK_INFO_TYPE_AUDIO); @@ -464,18 +418,19 @@ void create_kinesis_video_stream(GstKvsSink *kvssink) { cout << "Stream is ready" << endl; } -bool kinesis_video_stream_init(GstKvsSink *kvssink, string &err_msg) { +bool kinesis_video_stream_init(GstKvsSink* kvssink, string& err_msg) +{ bool ret = true; int retry_count = DEFAULT_TOTAL_RETRY; int retry_delay = DEFAULT_INITIAL_RETRY_DELAY_MS; bool do_retry = true; - while(do_retry) { + while (do_retry) { try { LOG_INFO("Try creating stream for " << kvssink->stream_name); // stream is freed when createStreamSync fails create_kinesis_video_stream(kvssink); break; - } catch (runtime_error &err) { + } catch (runtime_error& err) { if (--retry_count == 0) { ostringstream oss; oss << "Failed to create stream. Error: " << err.what(); @@ -493,227 +448,235 @@ bool kinesis_video_stream_init(GstKvsSink *kvssink, string &err_msg) { return ret; } -static void -gst_kvs_sink_class_init(GstKvsSinkClass *klass) { - GObjectClass *gobject_class; - GstElementClass *gstelement_class; - GstKvsSinkClass *basesink_class = (GstKvsSinkClass *) klass; +static void gst_kvs_sink_class_init(GstKvsSinkClass* klass) +{ + GObjectClass* gobject_class; + GstElementClass* gstelement_class; + GstKvsSinkClass* basesink_class = (GstKvsSinkClass*) klass; - gobject_class = G_OBJECT_CLASS (klass); - gstelement_class = GST_ELEMENT_CLASS (klass); + gobject_class = G_OBJECT_CLASS(klass); + gstelement_class = GST_ELEMENT_CLASS(klass); gobject_class->set_property = gst_kvs_sink_set_property; gobject_class->get_property = gst_kvs_sink_get_property; gobject_class->finalize = gst_kvs_sink_finalize; - g_object_class_install_property (gobject_class, PROP_STREAM_NAME, - g_param_spec_string ("stream-name", "Stream Name", - "Name of the destination stream", DEFAULT_STREAM_NAME, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_USER_AGENT_NAME, - g_param_spec_string ("user-agent", "Custom user agent name", - "Name of the user agent", KVS_CLIENT_USER_AGENT_NAME, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_RETENTION_PERIOD, - g_param_spec_uint ("retention-period", "Retention Period", - "Length of time stream is preserved. Unit: hours", 0, G_MAXUINT, DEFAULT_RETENTION_PERIOD_HOURS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_STREAMING_TYPE, - g_param_spec_enum ("streaming-type", "Streaming Type", - "Streaming type", GST_TYPE_KVS_SINK_STREAMING_TYPE, DEFAULT_STREAMING_TYPE, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_CONTENT_TYPE, - g_param_spec_string ("content-type", "Content Type", - "content type", MKV_H264_CONTENT_TYPE, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_MAX_LATENCY, - g_param_spec_uint ("max-latency", "Max Latency", - "Max Latency. Unit: seconds", 0, G_MAXUINT, DEFAULT_MAX_LATENCY_SECONDS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_FRAGMENT_DURATION, - g_param_spec_uint ("fragment-duration", "Fragment Duration", - "Fragment Duration. Unit: miliseconds", 0, G_MAXUINT, DEFAULT_FRAGMENT_DURATION_MILLISECONDS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_TIMECODE_SCALE, - g_param_spec_uint ("timecode-scale", "Timecode Scale", - "Timecode Scale. Unit: milliseconds", 0, G_MAXUINT, DEFAULT_TIMECODE_SCALE_MILLISECONDS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_KEY_FRAME_FRAGMENTATION, - g_param_spec_boolean ("key-frame-fragmentation", "Do key frame fragmentation", - "If true, generate new fragment on each keyframe, otherwise generate new fragment on first keyframe after fragment-duration has passed.", DEFAULT_KEY_FRAME_FRAGMENTATION, - (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_FRAME_TIMECODES, - g_param_spec_boolean ("frame-timecodes", "Do frame timecodes", - "Do frame timecodes", DEFAULT_FRAME_TIMECODES, - (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_ABSOLUTE_FRAGMENT_TIMES, - g_param_spec_boolean ("absolute-fragment-times", "Use absolute fragment time", - "Use absolute fragment time", DEFAULT_ABSOLUTE_FRAGMENT_TIMES, - (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_FRAGMENT_ACKS, - g_param_spec_boolean ("fragment-acks", "Do fragment acks", - "Do fragment acks", DEFAULT_FRAGMENT_ACKS, - (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_RESTART_ON_ERROR, - g_param_spec_boolean ("restart-on-error", "Do restart on error", - "Do restart on error", DEFAULT_RESTART_ON_ERROR, - (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_RECALCULATE_METRICS, - g_param_spec_boolean ("recalculate-metrics", "Do recalculate metrics", - "Do recalculate metrics", DEFAULT_RECALCULATE_METRICS, - (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_FRAMERATE, - g_param_spec_uint ("framerate", "Framerate", - "Framerate", 0, G_MAXUINT, DEFAULT_STREAM_FRAMERATE, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_AVG_BANDWIDTH_BPS, - g_param_spec_uint ("avg-bandwidth-bps", "Average bandwidth bps", - "Average bandwidth bps", 0, G_MAXUINT, DEFAULT_AVG_BANDWIDTH_BPS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_BUFFER_DURATION, - g_param_spec_uint ("buffer-duration", "Buffer duration", - "Buffer duration. Unit: seconds", 0, G_MAXUINT, DEFAULT_BUFFER_DURATION_SECONDS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_REPLAY_DURATION, - g_param_spec_uint ("replay-duration", "Replay duration", - "Replay duration. Unit: seconds", 0, G_MAXUINT, DEFAULT_REPLAY_DURATION_SECONDS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_CONNECTION_STALENESS, - g_param_spec_uint ("connection-staleness", "Connection staleness", - "Connection staleness. Unit: seconds", 0, G_MAXUINT, DEFAULT_CONNECTION_STALENESS_SECONDS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_CODEC_ID, - g_param_spec_string ("codec-id", "Codec ID", - "Codec ID", DEFAULT_CODEC_ID_H264, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_TRACK_NAME, - g_param_spec_string ("track-name", "Track name", - "Track name", DEFAULT_TRACKNAME, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_ACCESS_KEY, - g_param_spec_string ("access-key", "Access Key", - "AWS Access Key", DEFAULT_ACCESS_KEY, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_SECRET_KEY, - g_param_spec_string ("secret-key", "Secret Key", - "AWS Secret Key", DEFAULT_SECRET_KEY, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_SESSION_TOKEN, - g_param_spec_string ("session-token", "Session token", - "AWS Session token", DEFAULT_SESSION_TOKEN, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_AWS_REGION, - g_param_spec_string ("aws-region", "AWS Region", - "AWS Region", DEFAULT_REGION, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_ROTATION_PERIOD, - g_param_spec_uint ("rotation-period", "Rotation Period", - "Rotation Period. Unit: seconds", 0, G_MAXUINT, DEFAULT_ROTATION_PERIOD_SECONDS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_LOG_CONFIG_PATH, - g_param_spec_string ("log-config", "Log Configuration", - "Log Configuration Path", DEFAULT_LOG_FILE_PATH, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_STORAGE_SIZE, - g_param_spec_uint ("storage-size", "Storage Size", - "Storage Size. Unit: MB", 0, G_MAXUINT, DEFAULT_STORAGE_SIZE_MB, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_STOP_STREAM_TIMEOUT, - g_param_spec_uint ("stop-stream-timeout", "Stop stream timeout", - "Stop stream timeout: seconds", 0, G_MAXUINT, DEFAULT_STOP_STREAM_TIMEOUT_SEC, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_SERVICE_CONNECTION_TIMEOUT, - g_param_spec_uint ("connection-timeout", "Service call connection timeout", - "Service call connection timeout: seconds", 0, G_MAXUINT, DEFAULT_SERVICE_CONNECTION_TIMEOUT_SEC, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_SERVICE_COMPLETION_TIMEOUT, - g_param_spec_uint ("completion-timeout", "Service call completion timeout", - "Service call completion timeout: seconds. Should be more than connection timeout. If it isnt, SDK will override with defaults", 0, G_MAXUINT, DEFAULT_SERVICE_COMPLETION_TIMEOUT_SEC, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_CREDENTIAL_FILE_PATH, - g_param_spec_string ("credential-path", "Credential File Path", - "Credential File Path", DEFAULT_CREDENTIAL_FILE_PATH, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_IOT_CERTIFICATE, - g_param_spec_boxed ("iot-certificate", "Iot Certificate", - "Use aws iot certificate to obtain credentials", - GST_TYPE_STRUCTURE, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_STREAM_TAGS, - g_param_spec_boxed ("stream-tags", "Stream Tags", - "key-value pair that you can define and assign to each stream", - GST_TYPE_STRUCTURE, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_FILE_START_TIME, - g_param_spec_uint64 ("file-start-time", "File Start Time", - "Epoch time that the file starts in kinesis video stream. By default, current time is used. Unit: Seconds", - 0, G_MAXULONG, 0, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_DISABLE_BUFFER_CLIPPING, - g_param_spec_boolean ("disable-buffer-clipping", "Disable Buffer Clipping", - "Set to true only if your src/mux elements produce GST_CLOCK_TIME_NONE for segment start times. It is non-standard behavior to set this to true, only use if there are known issues with your src/mux segment start/stop times.", DEFAULT_DISABLE_BUFFER_CLIPPING, - (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_USE_ORIGINAL_PTS, - g_param_spec_boolean ("use-original-pts", "Use Original PTS", - "Set to true only if you want to use the original presentation time stamp on the buffer and that timestamp is expected to be a valid epoch value in nanoseconds. Most encoders will not have a valid PTS", DEFAULT_USE_ORIGINAL_PTS, - (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_GET_METRICS, - g_param_spec_boolean ("get-kvs-metrics", "Get client and stream level metrics on every key frame", - "Set to true if you want to read on the producer streamMetrics and clientMetrics object every key frame. Disabled by default", DEFAULT_ENABLE_METRICS, - (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - g_object_class_install_property (gobject_class, PROP_ALLOW_CREATE_STREAM, - g_param_spec_boolean ("allow-create-stream", "Allow creating stream if stream does not exist", - "Set to true if allowing create stream call, false otherwise", DEFAULT_ALLOW_CREATE_STREAM, - (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); - - gst_element_class_set_static_metadata(gstelement_class, - "KVS Sink", - "Sink/Video/Network", - "GStreamer AWS KVS plugin", + g_object_class_install_property(gobject_class, PROP_STREAM_NAME, + g_param_spec_string("stream-name", "Stream Name", "Name of the destination stream", DEFAULT_STREAM_NAME, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_USER_AGENT_NAME, + g_param_spec_string("user-agent", "Custom user agent name", "Name of the user agent", KVS_CLIENT_USER_AGENT_NAME, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_RETENTION_PERIOD, + g_param_spec_uint("retention-period", "Retention Period", "Length of time stream is preserved. Unit: hours", 0, + G_MAXUINT, DEFAULT_RETENTION_PERIOD_HOURS, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_STREAMING_TYPE, + g_param_spec_enum("streaming-type", "Streaming Type", "Streaming type", GST_TYPE_KVS_SINK_STREAMING_TYPE, + DEFAULT_STREAMING_TYPE, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_CONTENT_TYPE, + g_param_spec_string("content-type", "Content Type", "content type", MKV_H264_CONTENT_TYPE, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_MAX_LATENCY, + g_param_spec_uint("max-latency", "Max Latency", "Max Latency. Unit: seconds", 0, G_MAXUINT, + DEFAULT_MAX_LATENCY_SECONDS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_FRAGMENT_DURATION, + g_param_spec_uint("fragment-duration", "Fragment Duration", "Fragment Duration. Unit: miliseconds", 0, G_MAXUINT, + DEFAULT_FRAGMENT_DURATION_MILLISECONDS, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_TIMECODE_SCALE, + g_param_spec_uint("timecode-scale", "Timecode Scale", "Timecode Scale. Unit: milliseconds", 0, G_MAXUINT, + DEFAULT_TIMECODE_SCALE_MILLISECONDS, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_KEY_FRAME_FRAGMENTATION, + g_param_spec_boolean( + "key-frame-fragmentation", "Do key frame fragmentation", + "If true, generate new fragment on each keyframe, otherwise generate new fragment on first keyframe after fragment-duration has passed.", + DEFAULT_KEY_FRAME_FRAGMENTATION, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_FRAME_TIMECODES, + g_param_spec_boolean("frame-timecodes", "Do frame timecodes", "Do frame timecodes", DEFAULT_FRAME_TIMECODES, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_ABSOLUTE_FRAGMENT_TIMES, + g_param_spec_boolean("absolute-fragment-times", "Use absolute fragment time", "Use absolute fragment time", + DEFAULT_ABSOLUTE_FRAGMENT_TIMES, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_FRAGMENT_ACKS, + g_param_spec_boolean("fragment-acks", "Do fragment acks", "Do fragment acks", DEFAULT_FRAGMENT_ACKS, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_RESTART_ON_ERROR, + g_param_spec_boolean("restart-on-error", "Do restart on error", "Do restart on error", DEFAULT_RESTART_ON_ERROR, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_RECALCULATE_METRICS, + g_param_spec_boolean("recalculate-metrics", "Do recalculate metrics", "Do recalculate metrics", + DEFAULT_RECALCULATE_METRICS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_FRAMERATE, + g_param_spec_uint("framerate", "Framerate", "Framerate", 0, G_MAXUINT, DEFAULT_STREAM_FRAMERATE, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_AVG_BANDWIDTH_BPS, + g_param_spec_uint("avg-bandwidth-bps", "Average bandwidth bps", "Average bandwidth bps", 0, G_MAXUINT, + DEFAULT_AVG_BANDWIDTH_BPS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_BUFFER_DURATION, + g_param_spec_uint("buffer-duration", "Buffer duration", "Buffer duration. Unit: seconds", 0, G_MAXUINT, + DEFAULT_BUFFER_DURATION_SECONDS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_REPLAY_DURATION, + g_param_spec_uint("replay-duration", "Replay duration", "Replay duration. Unit: seconds", 0, G_MAXUINT, + DEFAULT_REPLAY_DURATION_SECONDS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_CONNECTION_STALENESS, + g_param_spec_uint("connection-staleness", "Connection staleness", "Connection staleness. Unit: seconds", 0, + G_MAXUINT, DEFAULT_CONNECTION_STALENESS_SECONDS, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_CODEC_ID, + g_param_spec_string("codec-id", "Codec ID", "Codec ID", DEFAULT_CODEC_ID_H264, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_TRACK_NAME, + g_param_spec_string("track-name", "Track name", "Track name", DEFAULT_TRACKNAME, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_ACCESS_KEY, + g_param_spec_string("access-key", "Access Key", "AWS Access Key", DEFAULT_ACCESS_KEY, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_SECRET_KEY, + g_param_spec_string("secret-key", "Secret Key", "AWS Secret Key", DEFAULT_SECRET_KEY, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_SESSION_TOKEN, + g_param_spec_string("session-token", "Session token", "AWS Session token", DEFAULT_SESSION_TOKEN, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_AWS_REGION, + g_param_spec_string("aws-region", "AWS Region", "AWS Region", DEFAULT_REGION, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_ROTATION_PERIOD, + g_param_spec_uint("rotation-period", "Rotation Period", "Rotation Period. Unit: seconds", 0, G_MAXUINT, + DEFAULT_ROTATION_PERIOD_SECONDS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_LOG_CONFIG_PATH, + g_param_spec_string("log-config", "Log Configuration", "Log Configuration Path", DEFAULT_LOG_FILE_PATH, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_STORAGE_SIZE, + g_param_spec_uint("storage-size", "Storage Size", "Storage Size. Unit: MB", 0, G_MAXUINT, DEFAULT_STORAGE_SIZE_MB, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_STOP_STREAM_TIMEOUT, + g_param_spec_uint("stop-stream-timeout", "Stop stream timeout", "Stop stream timeout: seconds", 0, G_MAXUINT, + DEFAULT_STOP_STREAM_TIMEOUT_SEC, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_SERVICE_CONNECTION_TIMEOUT, + g_param_spec_uint("connection-timeout", "Service call connection timeout", "Service call connection timeout: seconds", 0, G_MAXUINT, + DEFAULT_SERVICE_CONNECTION_TIMEOUT_SEC, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_SERVICE_COMPLETION_TIMEOUT, + g_param_spec_uint( + "completion-timeout", "Service call completion timeout", + "Service call completion timeout: seconds. Should be more than connection timeout. If it isnt, SDK will override with defaults", 0, + G_MAXUINT, DEFAULT_SERVICE_COMPLETION_TIMEOUT_SEC, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_CREDENTIAL_FILE_PATH, + g_param_spec_string("credential-path", "Credential File Path", "Credential File Path", + DEFAULT_CREDENTIAL_FILE_PATH, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_IOT_CERTIFICATE, + g_param_spec_boxed("iot-certificate", "Iot Certificate", "Use aws iot certificate to obtain credentials", + GST_TYPE_STRUCTURE, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_STREAM_TAGS, + g_param_spec_boxed("stream-tags", "Stream Tags", "key-value pair that you can define and assign to each stream", + GST_TYPE_STRUCTURE, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_FILE_START_TIME, + g_param_spec_uint64("file-start-time", "File Start Time", + "Epoch time that the file starts in kinesis video stream. By default, current time is used. Unit: Seconds", 0, G_MAXULONG, + 0, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_DISABLE_BUFFER_CLIPPING, + g_param_spec_boolean("disable-buffer-clipping", "Disable Buffer Clipping", + "Set to true only if your src/mux elements produce GST_CLOCK_TIME_NONE for segment start times. It is non-standard " + "behavior to set this to true, only use if there are known issues with your src/mux segment start/stop times.", + DEFAULT_DISABLE_BUFFER_CLIPPING, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_USE_ORIGINAL_PTS, + g_param_spec_boolean("use-original-pts", "Use Original PTS", + "Set to true only if you want to use the original presentation time stamp on the buffer and that timestamp is expected " + "to be a valid epoch value in nanoseconds. Most encoders will not have a valid PTS", + DEFAULT_USE_ORIGINAL_PTS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_GET_METRICS, + g_param_spec_boolean( + "get-kvs-metrics", "Get client and stream level metrics on every key frame", + "Set to true if you want to read on the producer streamMetrics and clientMetrics object every key frame. Disabled by default", + DEFAULT_ENABLE_METRICS, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property(gobject_class, PROP_ALLOW_CREATE_STREAM, + g_param_spec_boolean("allow-create-stream", "Allow creating stream if stream does not exist", + "Set to true if allowing create stream call, false otherwise", DEFAULT_ALLOW_CREATE_STREAM, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + gst_element_class_set_static_metadata(gstelement_class, "KVS Sink", "Sink/Video/Network", "GStreamer AWS KVS plugin", "AWS KVS "); gst_element_class_add_pad_template(gstelement_class, gst_static_pad_template_get(&audiosink_templ)); gst_element_class_add_pad_template(gstelement_class, gst_static_pad_template_get(&videosink_templ)); - gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_kvs_sink_change_state); - gstelement_class->request_new_pad = GST_DEBUG_FUNCPTR (gst_kvs_sink_request_new_pad); - gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_kvs_sink_release_pad); - - KvsSinkSignals::err_signal_id = g_signal_new("stream-error", G_TYPE_FROM_CLASS(gobject_class), (GSignalFlags)(G_SIGNAL_RUN_LAST), G_STRUCT_OFFSET (GstKvsSinkClass, sink_stream_error), NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT64); - KvsSinkSignals::ack_signal_id = g_signal_new("fragment-ack", G_TYPE_FROM_CLASS(gobject_class), - (GSignalFlags)(G_SIGNAL_ACTION), G_STRUCT_OFFSET (GstKvsSinkClass, sink_fragment_ack), - NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_POINTER); - KvsSinkSignals::metric_signal_id = g_signal_new("stream-client-metric", G_TYPE_FROM_CLASS(gobject_class), - (GSignalFlags)(G_SIGNAL_ACTION), G_STRUCT_OFFSET (GstKvsSinkClass, sink_stream_metric), - NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_POINTER); + gstelement_class->change_state = GST_DEBUG_FUNCPTR(gst_kvs_sink_change_state); + gstelement_class->request_new_pad = GST_DEBUG_FUNCPTR(gst_kvs_sink_request_new_pad); + gstelement_class->release_pad = GST_DEBUG_FUNCPTR(gst_kvs_sink_release_pad); + + KvsSinkSignals::err_signal_id = + g_signal_new("stream-error", G_TYPE_FROM_CLASS(gobject_class), (GSignalFlags) (G_SIGNAL_RUN_LAST), + G_STRUCT_OFFSET(GstKvsSinkClass, sink_stream_error), NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT64); + KvsSinkSignals::ack_signal_id = + g_signal_new("fragment-ack", G_TYPE_FROM_CLASS(gobject_class), (GSignalFlags) (G_SIGNAL_ACTION), + G_STRUCT_OFFSET(GstKvsSinkClass, sink_fragment_ack), NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_POINTER); + KvsSinkSignals::metric_signal_id = + g_signal_new("stream-client-metric", G_TYPE_FROM_CLASS(gobject_class), (GSignalFlags) (G_SIGNAL_ACTION), + G_STRUCT_OFFSET(GstKvsSinkClass, sink_stream_metric), NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_POINTER); } -static void -gst_kvs_sink_init(GstKvsSink *kvssink) { +static void gst_kvs_sink_init(GstKvsSink* kvssink) +{ kvssink->collect = gst_collect_pads_new(); - gst_collect_pads_set_buffer_function (kvssink->collect, - GST_DEBUG_FUNCPTR (gst_kvs_sink_handle_buffer), kvssink); - gst_collect_pads_set_event_function (kvssink->collect, - GST_DEBUG_FUNCPTR (gst_kvs_sink_handle_sink_event), kvssink); + gst_collect_pads_set_buffer_function(kvssink->collect, GST_DEBUG_FUNCPTR(gst_kvs_sink_handle_buffer), kvssink); + gst_collect_pads_set_event_function(kvssink->collect, GST_DEBUG_FUNCPTR(gst_kvs_sink_handle_sink_event), kvssink); kvssink->num_streams = 0; kvssink->num_audio_streams = 0; kvssink->num_video_streams = 0; // Stream definition - kvssink->stream_name = g_strdup (DEFAULT_STREAM_NAME); + kvssink->stream_name = g_strdup(DEFAULT_STREAM_NAME); kvssink->user_agent = g_strdup(KVS_CLIENT_USER_AGENT_NAME "/" KVSSINK_USER_AGENT_POSTFIX_VERSION); kvssink->retention_period_hours = DEFAULT_RETENTION_PERIOD_HOURS; - kvssink->kms_key_id = g_strdup (DEFAULT_KMS_KEY_ID); + kvssink->kms_key_id = g_strdup(DEFAULT_KMS_KEY_ID); kvssink->streaming_type = DEFAULT_STREAMING_TYPE; kvssink->max_latency_seconds = DEFAULT_MAX_LATENCY_SECONDS; kvssink->fragment_duration_miliseconds = DEFAULT_FRAGMENT_DURATION_MILLISECONDS; @@ -731,23 +694,22 @@ gst_kvs_sink_init(GstKvsSink *kvssink) { kvssink->replay_duration_seconds = DEFAULT_REPLAY_DURATION_SECONDS; kvssink->connection_staleness_seconds = DEFAULT_CONNECTION_STALENESS_SECONDS; kvssink->disable_buffer_clipping = DEFAULT_DISABLE_BUFFER_CLIPPING; - kvssink->codec_id = g_strdup (DEFAULT_CODEC_ID_H264); - kvssink->track_name = g_strdup (DEFAULT_TRACKNAME); - kvssink->access_key = g_strdup (DEFAULT_ACCESS_KEY); - kvssink->secret_key = g_strdup (DEFAULT_SECRET_KEY); + kvssink->codec_id = g_strdup(DEFAULT_CODEC_ID_H264); + kvssink->track_name = g_strdup(DEFAULT_TRACKNAME); + kvssink->access_key = g_strdup(DEFAULT_ACCESS_KEY); + kvssink->secret_key = g_strdup(DEFAULT_SECRET_KEY); kvssink->session_token = g_strdup(DEFAULT_SESSION_TOKEN); - kvssink->aws_region = g_strdup (DEFAULT_REGION); + kvssink->aws_region = g_strdup(DEFAULT_REGION); kvssink->rotation_period = DEFAULT_ROTATION_PERIOD_SECONDS; - kvssink->log_config_path = g_strdup (DEFAULT_LOG_FILE_PATH); + kvssink->log_config_path = g_strdup(DEFAULT_LOG_FILE_PATH); kvssink->storage_size = DEFAULT_STORAGE_SIZE_MB; kvssink->stop_stream_timeout = DEFAULT_STOP_STREAM_TIMEOUT_SEC; kvssink->service_connection_timeout = DEFAULT_SERVICE_CONNECTION_TIMEOUT_SEC; kvssink->service_completion_timeout = DEFAULT_SERVICE_COMPLETION_TIMEOUT_SEC; - kvssink->credential_file_path = g_strdup (DEFAULT_CREDENTIAL_FILE_PATH); - kvssink->file_start_time = (uint64_t) chrono::duration_cast( - systemCurrentTime().time_since_epoch()).count(); + kvssink->credential_file_path = g_strdup(DEFAULT_CREDENTIAL_FILE_PATH); + kvssink->file_start_time = (uint64_t) chrono::duration_cast(systemCurrentTime().time_since_epoch()).count(); kvssink->track_info_type = MKV_TRACK_INFO_TYPE_VIDEO; - kvssink->audio_codec_id = g_strdup (DEFAULT_AUDIO_CODEC_ID_AAC); + kvssink->audio_codec_id = g_strdup(DEFAULT_AUDIO_CODEC_ID_AAC); kvssink->data = make_shared(); kvssink->data->err_signal_id = KvsSinkSignals::err_signal_id; @@ -755,15 +717,15 @@ gst_kvs_sink_init(GstKvsSink *kvssink) { kvssink->data->metric_signal_id = KvsSinkSignals::metric_signal_id; // Mark plugin as sink - GST_OBJECT_FLAG_SET (kvssink, GST_ELEMENT_FLAG_SINK); + GST_OBJECT_FLAG_SET(kvssink, GST_ELEMENT_FLAG_SINK); LOGGER_TAG("com.amazonaws.kinesis.video.gstkvs"); LOG_CONFIGURE_STDOUT("DEBUG"); } -static void -gst_kvs_sink_finalize(GObject *object) { - GstKvsSink *kvssink = GST_KVS_SINK (object); +static void gst_kvs_sink_finalize(GObject* object) +{ + GstKvsSink* kvssink = GST_KVS_SINK(object); auto data = kvssink->data; gst_object_unref(kvssink->collect); @@ -782,135 +744,134 @@ gst_kvs_sink_finalize(GObject *object) { g_free(kvssink->credential_file_path); if (kvssink->iot_certificate) { - gst_structure_free (kvssink->iot_certificate); + gst_structure_free(kvssink->iot_certificate); } if (kvssink->stream_tags) { - gst_structure_free (kvssink->stream_tags); + gst_structure_free(kvssink->stream_tags); } if (data->kinesis_video_producer) { data->kinesis_video_producer.reset(); } - G_OBJECT_CLASS (parent_class)->finalize(object); + G_OBJECT_CLASS(parent_class)->finalize(object); } -static void -gst_kvs_sink_set_property(GObject *object, guint prop_id, - const GValue *value, GParamSpec *pspec) { - GstKvsSink *kvssink; +static void gst_kvs_sink_set_property(GObject* object, guint prop_id, const GValue* value, GParamSpec* pspec) +{ + GstKvsSink* kvssink; - kvssink = GST_KVS_SINK (object); + kvssink = GST_KVS_SINK(object); switch (prop_id) { case PROP_STREAM_NAME: g_free(kvssink->stream_name); - kvssink->stream_name = g_strdup (g_value_get_string (value)); + kvssink->stream_name = g_strdup(g_value_get_string(value)); break; case PROP_USER_AGENT_NAME: g_free(kvssink->user_agent); - kvssink->user_agent = g_strdup (g_value_get_string (value)); + kvssink->user_agent = g_strdup(g_value_get_string(value)); break; case PROP_RETENTION_PERIOD: - kvssink->retention_period_hours = g_value_get_uint (value); + kvssink->retention_period_hours = g_value_get_uint(value); break; case PROP_STREAMING_TYPE: - kvssink->streaming_type = (STREAMING_TYPE) g_value_get_enum (value); + kvssink->streaming_type = (STREAMING_TYPE) g_value_get_enum(value); break; case PROP_CONTENT_TYPE: g_free(kvssink->content_type); - kvssink->content_type = g_strdup (g_value_get_string (value)); + kvssink->content_type = g_strdup(g_value_get_string(value)); break; case PROP_MAX_LATENCY: - kvssink->max_latency_seconds = g_value_get_uint (value); + kvssink->max_latency_seconds = g_value_get_uint(value); break; case PROP_FRAGMENT_DURATION: - kvssink->fragment_duration_miliseconds = g_value_get_uint (value); + kvssink->fragment_duration_miliseconds = g_value_get_uint(value); break; case PROP_TIMECODE_SCALE: - kvssink->timecode_scale_milliseconds = g_value_get_uint (value); + kvssink->timecode_scale_milliseconds = g_value_get_uint(value); break; case PROP_KEY_FRAME_FRAGMENTATION: - kvssink->key_frame_fragmentation = g_value_get_boolean (value); + kvssink->key_frame_fragmentation = g_value_get_boolean(value); break; case PROP_FRAME_TIMECODES: - kvssink->frame_timecodes = g_value_get_boolean (value); + kvssink->frame_timecodes = g_value_get_boolean(value); break; case PROP_ABSOLUTE_FRAGMENT_TIMES: - kvssink->absolute_fragment_times = g_value_get_boolean (value); + kvssink->absolute_fragment_times = g_value_get_boolean(value); break; case PROP_FRAGMENT_ACKS: - kvssink->fragment_acks = g_value_get_boolean (value); + kvssink->fragment_acks = g_value_get_boolean(value); break; case PROP_RESTART_ON_ERROR: - kvssink->restart_on_error = g_value_get_boolean (value); + kvssink->restart_on_error = g_value_get_boolean(value); break; case PROP_RECALCULATE_METRICS: - kvssink->recalculate_metrics = g_value_get_boolean (value); + kvssink->recalculate_metrics = g_value_get_boolean(value); break; case PROP_AVG_BANDWIDTH_BPS: - kvssink->avg_bandwidth_bps = g_value_get_uint (value); + kvssink->avg_bandwidth_bps = g_value_get_uint(value); break; case PROP_BUFFER_DURATION: - kvssink->buffer_duration_seconds = g_value_get_uint (value); + kvssink->buffer_duration_seconds = g_value_get_uint(value); break; case PROP_REPLAY_DURATION: - kvssink->replay_duration_seconds = g_value_get_uint (value); + kvssink->replay_duration_seconds = g_value_get_uint(value); break; case PROP_CONNECTION_STALENESS: - kvssink->connection_staleness_seconds = g_value_get_uint (value); + kvssink->connection_staleness_seconds = g_value_get_uint(value); break; case PROP_CODEC_ID: g_free(kvssink->codec_id); - kvssink->codec_id = g_strdup (g_value_get_string (value)); + kvssink->codec_id = g_strdup(g_value_get_string(value)); break; case PROP_TRACK_NAME: g_free(kvssink->track_name); - kvssink->track_name = g_strdup (g_value_get_string (value)); + kvssink->track_name = g_strdup(g_value_get_string(value)); break; case PROP_ACCESS_KEY: g_free(kvssink->access_key); - kvssink->access_key = g_strdup (g_value_get_string (value)); + kvssink->access_key = g_strdup(g_value_get_string(value)); break; case PROP_SECRET_KEY: g_free(kvssink->secret_key); - kvssink->secret_key = g_strdup (g_value_get_string (value)); + kvssink->secret_key = g_strdup(g_value_get_string(value)); break; case PROP_SESSION_TOKEN: g_free(kvssink->session_token); - kvssink->session_token = g_strdup (g_value_get_string (value)); + kvssink->session_token = g_strdup(g_value_get_string(value)); break; case PROP_AWS_REGION: g_free(kvssink->aws_region); - kvssink->aws_region = g_strdup (g_value_get_string (value)); + kvssink->aws_region = g_strdup(g_value_get_string(value)); break; case PROP_ROTATION_PERIOD: - kvssink->rotation_period = g_value_get_uint (value); + kvssink->rotation_period = g_value_get_uint(value); break; case PROP_LOG_CONFIG_PATH: - kvssink->log_config_path = g_strdup (g_value_get_string (value)); + kvssink->log_config_path = g_strdup(g_value_get_string(value)); break; case PROP_FRAMERATE: - kvssink->framerate = g_value_get_uint (value); + kvssink->framerate = g_value_get_uint(value); break; case PROP_STORAGE_SIZE: - kvssink->storage_size = g_value_get_uint (value); + kvssink->storage_size = g_value_get_uint(value); break; case PROP_STOP_STREAM_TIMEOUT: - kvssink->stop_stream_timeout = g_value_get_uint (value); + kvssink->stop_stream_timeout = g_value_get_uint(value); break; case PROP_SERVICE_CONNECTION_TIMEOUT: - kvssink->service_connection_timeout = g_value_get_uint (value); + kvssink->service_connection_timeout = g_value_get_uint(value); break; case PROP_SERVICE_COMPLETION_TIMEOUT: - kvssink->service_completion_timeout = g_value_get_uint (value); + kvssink->service_completion_timeout = g_value_get_uint(value); break; case PROP_CREDENTIAL_FILE_PATH: - kvssink->credential_file_path = g_strdup (g_value_get_string (value)); + kvssink->credential_file_path = g_strdup(g_value_get_string(value)); break; case PROP_IOT_CERTIFICATE: { - const GstStructure *s = gst_value_get_structure(value); + const GstStructure* s = gst_value_get_structure(value); if (kvssink->iot_certificate) { gst_structure_free(kvssink->iot_certificate); @@ -919,7 +880,7 @@ gst_kvs_sink_set_property(GObject *object, guint prop_id, break; } case PROP_STREAM_TAGS: { - const GstStructure *s = gst_value_get_structure(value); + const GstStructure* s = gst_value_get_structure(value); if (kvssink->stream_tags) { gst_structure_free(kvssink->stream_tags); @@ -928,7 +889,7 @@ gst_kvs_sink_set_property(GObject *object, guint prop_id, break; } case PROP_FILE_START_TIME: - kvssink->file_start_time = g_value_get_uint64 (value); + kvssink->file_start_time = g_value_get_uint64(value); break; case PROP_DISABLE_BUFFER_CLIPPING: kvssink->disable_buffer_clipping = g_value_get_boolean(value); @@ -943,187 +904,185 @@ gst_kvs_sink_set_property(GObject *object, guint prop_id, kvssink->allow_create_stream = g_value_get_boolean(value); break; default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); break; } } -static void -gst_kvs_sink_get_property(GObject *object, guint prop_id, GValue *value, - GParamSpec *pspec) { - GstKvsSink *kvssink; +static void gst_kvs_sink_get_property(GObject* object, guint prop_id, GValue* value, GParamSpec* pspec) +{ + GstKvsSink* kvssink; - kvssink = GST_KVS_SINK (object); + kvssink = GST_KVS_SINK(object); switch (prop_id) { case PROP_STREAM_NAME: - g_value_set_string (value, kvssink->stream_name); + g_value_set_string(value, kvssink->stream_name); break; case PROP_USER_AGENT_NAME: - g_value_set_string (value, kvssink->user_agent); + g_value_set_string(value, kvssink->user_agent); break; case PROP_RETENTION_PERIOD: - g_value_set_uint (value, kvssink->retention_period_hours); + g_value_set_uint(value, kvssink->retention_period_hours); break; case PROP_STREAMING_TYPE: - g_value_set_enum (value, kvssink->streaming_type); + g_value_set_enum(value, kvssink->streaming_type); break; case PROP_CONTENT_TYPE: - g_value_set_string (value, kvssink->content_type); + g_value_set_string(value, kvssink->content_type); break; case PROP_MAX_LATENCY: - g_value_set_uint (value, kvssink->max_latency_seconds); + g_value_set_uint(value, kvssink->max_latency_seconds); break; case PROP_FRAGMENT_DURATION: - g_value_set_uint (value, kvssink->fragment_duration_miliseconds); + g_value_set_uint(value, kvssink->fragment_duration_miliseconds); break; case PROP_TIMECODE_SCALE: - g_value_set_uint (value, kvssink->timecode_scale_milliseconds); + g_value_set_uint(value, kvssink->timecode_scale_milliseconds); break; case PROP_KEY_FRAME_FRAGMENTATION: - g_value_set_boolean (value, kvssink->key_frame_fragmentation); + g_value_set_boolean(value, kvssink->key_frame_fragmentation); break; case PROP_FRAME_TIMECODES: - g_value_set_boolean (value, kvssink->frame_timecodes); + g_value_set_boolean(value, kvssink->frame_timecodes); break; case PROP_ABSOLUTE_FRAGMENT_TIMES: - g_value_set_boolean (value, kvssink->absolute_fragment_times); + g_value_set_boolean(value, kvssink->absolute_fragment_times); break; case PROP_FRAGMENT_ACKS: - g_value_set_boolean (value, kvssink->fragment_acks); + g_value_set_boolean(value, kvssink->fragment_acks); break; case PROP_RESTART_ON_ERROR: - g_value_set_boolean (value, kvssink->restart_on_error); + g_value_set_boolean(value, kvssink->restart_on_error); break; case PROP_RECALCULATE_METRICS: - g_value_set_boolean (value, kvssink->recalculate_metrics); + g_value_set_boolean(value, kvssink->recalculate_metrics); break; case PROP_AVG_BANDWIDTH_BPS: - g_value_set_uint (value, kvssink->avg_bandwidth_bps); + g_value_set_uint(value, kvssink->avg_bandwidth_bps); break; case PROP_BUFFER_DURATION: - g_value_set_uint (value, kvssink->buffer_duration_seconds); + g_value_set_uint(value, kvssink->buffer_duration_seconds); break; case PROP_REPLAY_DURATION: - g_value_set_uint (value, kvssink->replay_duration_seconds); + g_value_set_uint(value, kvssink->replay_duration_seconds); break; case PROP_CONNECTION_STALENESS: - g_value_set_uint (value, kvssink->connection_staleness_seconds); + g_value_set_uint(value, kvssink->connection_staleness_seconds); break; case PROP_CODEC_ID: - g_value_set_string (value, kvssink->codec_id); + g_value_set_string(value, kvssink->codec_id); break; case PROP_TRACK_NAME: - g_value_set_string (value, kvssink->track_name); + g_value_set_string(value, kvssink->track_name); break; case PROP_ACCESS_KEY: - g_value_set_string (value, kvssink->access_key); + g_value_set_string(value, kvssink->access_key); break; case PROP_SECRET_KEY: - g_value_set_string (value, kvssink->secret_key); + g_value_set_string(value, kvssink->secret_key); break; case PROP_SESSION_TOKEN: - g_value_set_string (value, kvssink->session_token); + g_value_set_string(value, kvssink->session_token); break; case PROP_AWS_REGION: - g_value_set_string (value, kvssink->aws_region); + g_value_set_string(value, kvssink->aws_region); break; case PROP_ROTATION_PERIOD: - g_value_set_uint (value, kvssink->rotation_period); + g_value_set_uint(value, kvssink->rotation_period); break; case PROP_LOG_CONFIG_PATH: - g_value_set_string (value, kvssink->log_config_path); + g_value_set_string(value, kvssink->log_config_path); break; case PROP_FRAMERATE: - g_value_set_uint (value, kvssink->framerate); + g_value_set_uint(value, kvssink->framerate); break; case PROP_STORAGE_SIZE: - g_value_set_uint (value, kvssink->storage_size); + g_value_set_uint(value, kvssink->storage_size); break; case PROP_STOP_STREAM_TIMEOUT: - g_value_set_uint (value, kvssink->stop_stream_timeout); + g_value_set_uint(value, kvssink->stop_stream_timeout); break; case PROP_SERVICE_CONNECTION_TIMEOUT: - g_value_set_uint (value, kvssink->service_connection_timeout); + g_value_set_uint(value, kvssink->service_connection_timeout); break; case PROP_SERVICE_COMPLETION_TIMEOUT: - g_value_set_uint (value, kvssink->service_completion_timeout); + g_value_set_uint(value, kvssink->service_completion_timeout); break; case PROP_CREDENTIAL_FILE_PATH: - g_value_set_string (value, kvssink->credential_file_path); + g_value_set_string(value, kvssink->credential_file_path); break; case PROP_IOT_CERTIFICATE: - gst_value_set_structure (value, kvssink->iot_certificate); + gst_value_set_structure(value, kvssink->iot_certificate); break; case PROP_STREAM_TAGS: - gst_value_set_structure (value, kvssink->stream_tags); + gst_value_set_structure(value, kvssink->stream_tags); break; case PROP_FILE_START_TIME: - g_value_set_uint64 (value, kvssink->file_start_time); + g_value_set_uint64(value, kvssink->file_start_time); break; case PROP_DISABLE_BUFFER_CLIPPING: - g_value_set_boolean (value, kvssink->disable_buffer_clipping); + g_value_set_boolean(value, kvssink->disable_buffer_clipping); break; case PROP_USE_ORIGINAL_PTS: - g_value_set_boolean (value, kvssink->data->use_original_pts); + g_value_set_boolean(value, kvssink->data->use_original_pts); break; case PROP_GET_METRICS: - g_value_set_boolean (value, kvssink->data->get_metrics); + g_value_set_boolean(value, kvssink->data->get_metrics); break; case PROP_ALLOW_CREATE_STREAM: - g_value_set_boolean (value, kvssink->allow_create_stream); + g_value_set_boolean(value, kvssink->allow_create_stream); break; default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); break; } } -static gboolean -gst_kvs_sink_handle_sink_event (GstCollectPads *pads, - GstCollectData *track_data, GstEvent * event, gpointer user_data) { - GstKvsSink *kvssink = GST_KVS_SINK(user_data); +static gboolean gst_kvs_sink_handle_sink_event(GstCollectPads* pads, GstCollectData* track_data, GstEvent* event, gpointer user_data) +{ + GstKvsSink* kvssink = GST_KVS_SINK(user_data); auto data = kvssink->data; - GstKvsSinkTrackData *kvs_sink_track_data = (GstKvsSinkTrackData *) track_data; + GstKvsSinkTrackData* kvs_sink_track_data = (GstKvsSinkTrackData*) track_data; gboolean ret = TRUE; - GstCaps *gstcaps = NULL; + GstCaps* gstcaps = NULL; string err_msg; uint64_t track_id = kvs_sink_track_data->track_id; gint samplerate = 0, channels = 0; - const gchar *media_type; + const gchar* media_type; - switch (GST_EVENT_TYPE (event)) { + switch (GST_EVENT_TYPE(event)) { case GST_EVENT_CAPS: { gst_event_parse_caps(event, &gstcaps); - GstStructure *gststructforcaps = gst_caps_get_structure(gstcaps, 0); - GST_INFO ("structure is %" GST_PTR_FORMAT, gststructforcaps); - media_type = gst_structure_get_name (gststructforcaps); + GstStructure* gststructforcaps = gst_caps_get_structure(gstcaps, 0); + GST_INFO("structure is %" GST_PTR_FORMAT, gststructforcaps); + media_type = gst_structure_get_name(gststructforcaps); - if (!strcmp (media_type, GSTREAMER_MEDIA_TYPE_ALAW) || !strcmp (media_type, GSTREAMER_MEDIA_TYPE_MULAW)) { + if (!strcmp(media_type, GSTREAMER_MEDIA_TYPE_ALAW) || !strcmp(media_type, GSTREAMER_MEDIA_TYPE_MULAW)) { guint8 codec_private_data[KVS_PCM_CPD_SIZE_BYTE]; KVS_PCM_FORMAT_CODE format = KVS_PCM_FORMAT_CODE_MULAW; - gst_structure_get_int (gststructforcaps, "rate", &samplerate); - gst_structure_get_int (gststructforcaps, "channels", &channels); + gst_structure_get_int(gststructforcaps, "rate", &samplerate); + gst_structure_get_int(gststructforcaps, "channels", &channels); if (samplerate == 0 || channels == 0) { - GST_ERROR_OBJECT (kvssink, "Missing channels/samplerate on caps"); + GST_ERROR_OBJECT(kvssink, "Missing channels/samplerate on caps"); ret = FALSE; goto CleanUp; } - if (!strcmp (media_type, GSTREAMER_MEDIA_TYPE_ALAW)) { + if (!strcmp(media_type, GSTREAMER_MEDIA_TYPE_ALAW)) { format = KVS_PCM_FORMAT_CODE_ALAW; } else { format = KVS_PCM_FORMAT_CODE_MULAW; } if (mkvgenGeneratePcmCpd(format, (UINT32) samplerate, (UINT16) channels, (PBYTE) codec_private_data, KVS_PCM_CPD_SIZE_BYTE)) { - GST_ERROR_OBJECT (kvssink, "Failed to generate pcm cpd"); + GST_ERROR_OBJECT(kvssink, "Failed to generate pcm cpd"); ret = FALSE; goto CleanUp; } @@ -1132,8 +1091,8 @@ gst_kvs_sink_handle_sink_event (GstCollectPads *pads, ret = data->kinesis_video_stream->start(codec_private_data, KVS_PCM_CPD_SIZE_BYTE, track_id); } else if (data->track_cpd_received.count(track_id) == 0 && gst_structure_has_field(gststructforcaps, "codec_data")) { - const GValue *gstStreamFormat = gst_structure_get_value(gststructforcaps, "codec_data"); - gchar *cpd = gst_value_serialize(gstStreamFormat); + const GValue* gstStreamFormat = gst_structure_get_value(gststructforcaps, "codec_data"); + gchar* cpd = gst_value_serialize(gstStreamFormat); string cpd_str = string(cpd); data->track_cpd_received.insert(track_id); g_free(cpd); @@ -1142,7 +1101,7 @@ gst_kvs_sink_handle_sink_event (GstCollectPads *pads, ret = data->kinesis_video_stream->start(cpd_str, track_id); } - gst_event_unref (event); + gst_event_unref(event); event = NULL; if (!ret) { @@ -1152,7 +1111,7 @@ gst_kvs_sink_handle_sink_event (GstCollectPads *pads, break; } case GST_EVENT_CUSTOM_DOWNSTREAM: { - const GstStructure *structure = gst_event_get_structure(event); + const GstStructure* structure = gst_event_get_structure(event); std::string metadata_name, metadata_value; gboolean persistent; bool is_persist; @@ -1165,8 +1124,8 @@ gst_kvs_sink_handle_sink_event (GstCollectPads *pads, if (NULL == gst_structure_get_string(structure, KVS_ADD_METADATA_NAME) || NULL == gst_structure_get_string(structure, KVS_ADD_METADATA_VALUE) || !gst_structure_get_boolean(structure, KVS_ADD_METADATA_PERSISTENT, &persistent)) { - - LOG_WARN("Event structure contains invalid field: " << std::string(gst_structure_to_string (structure)) << " for " << kvssink->stream_name); + LOG_WARN("Event structure contains invalid field: " << std::string(gst_structure_to_string(structure)) << " for " + << kvssink->stream_name); goto CleanUp; } @@ -1176,9 +1135,10 @@ gst_kvs_sink_handle_sink_event (GstCollectPads *pads, bool result = data->kinesis_video_stream->putFragmentMetadata(metadata_name, metadata_value, is_persist); if (!result) { - LOG_WARN("Failed to putFragmentMetadata. name: " << metadata_name << ", value: " << metadata_value << ", persistent: " << is_persist << " for " << kvssink->stream_name); + LOG_WARN("Failed to putFragmentMetadata. name: " << metadata_name << ", value: " << metadata_value << ", persistent: " << is_persist + << " for " << kvssink->stream_name); } - gst_event_unref (event); + gst_event_unref(event); event = NULL; break; } @@ -1199,8 +1159,9 @@ gst_kvs_sink_handle_sink_event (GstCollectPads *pads, return ret; } -void create_kinesis_video_frame(Frame *frame, const nanoseconds &pts, const nanoseconds &dts, FRAME_FLAGS flags, - void *frame_data, size_t len, uint64_t track_id, uint32_t index) { +void create_kinesis_video_frame(Frame* frame, const nanoseconds& pts, const nanoseconds& dts, FRAME_FLAGS flags, void* frame_data, size_t len, + uint64_t track_id, uint32_t index) +{ frame->flags = flags; frame->index = index; frame->decodingTs = static_cast(dts.count()) / DEFAULT_TIME_UNIT_IN_NANOS; @@ -1211,15 +1172,15 @@ void create_kinesis_video_frame(Frame *frame, const nanoseconds &pts, const nano frame->trackId = static_cast(track_id); } -bool put_frame(shared_ptr data, void *frame_data, size_t len, const nanoseconds &pts, - const nanoseconds &dts, FRAME_FLAGS flags, uint64_t track_id, uint32_t index) { - +bool put_frame(shared_ptr data, void* frame_data, size_t len, const nanoseconds& pts, const nanoseconds& dts, FRAME_FLAGS flags, + uint64_t track_id, uint32_t index) +{ Frame frame; create_kinesis_video_frame(&frame, pts, dts, flags, frame_data, len, track_id, index); bool ret = data->kinesis_video_stream->putFrame(frame); - if(data->get_metrics && ret) { - if(CHECK_FRAME_FLAG_KEY_FRAME(flags) || data->on_first_frame){ - KvsSinkMetric *kvs_sink_metric = new KvsSinkMetric(); + if (data->get_metrics && ret) { + if (CHECK_FRAME_FLAG_KEY_FRAME(flags) || data->on_first_frame) { + KvsSinkMetric* kvs_sink_metric = new KvsSinkMetric(); kvs_sink_metric->stream_metrics = data->kinesis_video_stream->getMetrics(); kvs_sink_metric->client_metrics = data->kinesis_video_producer->getMetrics(); kvs_sink_metric->frame_pts = frame.presentationTs; @@ -1232,17 +1193,16 @@ bool put_frame(shared_ptr data, void *frame_data, size_t len, return ret; } -static GstFlowReturn -gst_kvs_sink_handle_buffer (GstCollectPads * pads, - GstCollectData * track_data, GstBuffer * buf, gpointer user_data) { - GstKvsSink *kvssink = GST_KVS_SINK(user_data); +static GstFlowReturn gst_kvs_sink_handle_buffer(GstCollectPads* pads, GstCollectData* track_data, GstBuffer* buf, gpointer user_data) +{ + GstKvsSink* kvssink = GST_KVS_SINK(user_data); GstFlowReturn ret = GST_FLOW_OK; - GstKvsSinkTrackData *kvs_sink_track_data = (GstKvsSinkTrackData *) track_data; + GstKvsSinkTrackData* kvs_sink_track_data = (GstKvsSinkTrackData*) track_data; auto data = kvssink->data; string err_msg; bool isDroppable; STATUS stream_status = data->stream_status.load(); - GstMessage *message; + GstMessage* message; bool delta; uint64_t track_id; FRAME_FLAGS kinesis_video_flags = FRAME_FLAG_NONE; @@ -1257,15 +1217,15 @@ gst_kvs_sink_handle_buffer (GstCollectPads * pads, // we want to avoid an extra call. It is not possible for this callback to be invoked // after stopSync() since we stop collecting on pads before invoking. But having this // check anyways in case it happens - if(!data->streamingStopped.load()) { + if (!data->streamingStopped.load()) { data->kinesis_video_stream->stopSync(); data->streamingStopped.store(true); LOG_INFO("Sending eos for " << kvssink->stream_name); } // send out eos message to gstreamer bus - message = gst_message_new_eos (GST_OBJECT_CAST (kvssink)); - gst_element_post_message (GST_ELEMENT_CAST (kvssink), message); + message = gst_message_new_eos(GST_OBJECT_CAST(kvssink)); + gst_element_post_message(GST_ELEMENT_CAST(kvssink), message); ret = GST_FLOW_EOS; goto CleanUp; @@ -1276,8 +1236,7 @@ gst_kvs_sink_handle_buffer (GstCollectPads * pads, // therefore error out and let higher level application do the retry. if (IS_OFFLINE_STREAMING_MODE(kvssink->streaming_type) || !IS_RETRIABLE_ERROR(stream_status)) { // fatal cases - GST_ELEMENT_ERROR (kvssink, STREAM, FAILED, (NULL), - ("[%s] Stream error occurred. Status: 0x%08x", kvssink->stream_name, stream_status)); + GST_ELEMENT_ERROR(kvssink, STREAM, FAILED, (NULL), ("[%s] Stream error occurred. Status: 0x%08x", kvssink->stream_name, stream_status)); ret = GST_FLOW_ERROR; goto CleanUp; } else { @@ -1288,13 +1247,12 @@ gst_kvs_sink_handle_buffer (GstCollectPads * pads, } } - if(buf != NULL) { - isDroppable = GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_CORRUPTED) || - GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DECODE_ONLY) || - (GST_BUFFER_FLAGS(buf) == GST_BUFFER_FLAG_DISCONT) || - (GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DISCONT) && GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) || - // drop if buffer contains header and has invalid timestamp - (GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_HEADER) && (!GST_BUFFER_PTS_IS_VALID(buf) || !GST_BUFFER_DTS_IS_VALID(buf))); + if (buf != NULL) { + isDroppable = GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_CORRUPTED) || GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DECODE_ONLY) || + (GST_BUFFER_FLAGS(buf) == GST_BUFFER_FLAG_DISCONT) || + (GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DISCONT) && GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) || + // drop if buffer contains header and has invalid timestamp + (GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_HEADER) && (!GST_BUFFER_PTS_IS_VALID(buf) || !GST_BUFFER_DTS_IS_VALID(buf))); if (isDroppable) { LOG_DEBUG("Dropping frame with flag: " << GST_BUFFER_FLAGS(buf) << " for " << kvssink->stream_name); goto CleanUp; @@ -1304,11 +1262,10 @@ gst_kvs_sink_handle_buffer (GstCollectPads * pads, // timestamp. Therefore in here we add the file_start_time to frame pts to create absolute timestamp. // If user did not specify file_start_time, file_start_time will be 0 and has no effect. if (IS_OFFLINE_STREAMING_MODE(kvssink->streaming_type)) { - if(!data->use_original_pts) { + if (!data->use_original_pts) { buf->dts = 0; // if offline mode, i.e. streaming a file, the dts from gstreamer is undefined. buf->pts += data->pts_base; - } - else { + } else { buf->pts = buf->dts; } } else if (!GST_BUFFER_DTS_IS_VALID(buf)) { @@ -1318,7 +1275,7 @@ gst_kvs_sink_handle_buffer (GstCollectPads * pads, data->last_dts = buf->dts; track_id = kvs_sink_track_data->track_id; - if (!gst_buffer_map(buf, &info, GST_MAP_READ)){ + if (!gst_buffer_map(buf, &info, GST_MAP_READ)) { goto CleanUp; } @@ -1332,7 +1289,7 @@ gst_kvs_sink_handle_buffer (GstCollectPads * pads, } break; case AUDIO_VIDEO: - if(!delta && kvs_sink_track_data->track_type == MKV_TRACK_INFO_TYPE_VIDEO) { + if (!delta && kvs_sink_track_data->track_type == MKV_TRACK_INFO_TYPE_VIDEO) { if (data->first_video_frame) { data->first_video_frame = false; } @@ -1345,23 +1302,19 @@ gst_kvs_sink_handle_buffer (GstCollectPads * pads, data->first_pts = buf->pts; } if (data->producer_start_time == GST_CLOCK_TIME_NONE) { - data->producer_start_time = (uint64_t) chrono::duration_cast( - systemCurrentTime().time_since_epoch()).count(); + data->producer_start_time = (uint64_t) chrono::duration_cast(systemCurrentTime().time_since_epoch()).count(); } - if(!data->use_original_pts) { + if (!data->use_original_pts) { buf->pts += data->producer_start_time - data->first_pts; - } - else { + } else { buf->pts = buf->dts; } } - put_frame(kvssink->data, info.data, info.size, - std::chrono::nanoseconds(buf->pts), - std::chrono::nanoseconds(buf->dts), kinesis_video_flags, track_id, data->frame_count); + put_frame(kvssink->data, info.data, info.size, std::chrono::nanoseconds(buf->pts), std::chrono::nanoseconds(buf->dts), kinesis_video_flags, + track_id, data->frame_count); data->frame_count++; - } - else { + } else { LOG_WARN("GStreamer buffer is invalid for " << kvssink->stream_name); } @@ -1371,53 +1324,51 @@ gst_kvs_sink_handle_buffer (GstCollectPads * pads, } if (buf != NULL) { - gst_buffer_unref (buf); + gst_buffer_unref(buf); } return ret; } -static GstPad * -gst_kvs_sink_request_new_pad (GstElement * element, GstPadTemplate * templ, - const gchar * req_name, const GstCaps * caps) +static GstPad* gst_kvs_sink_request_new_pad(GstElement* element, GstPadTemplate* templ, const gchar* req_name, const GstCaps* caps) { - GstElementClass *klass = GST_ELEMENT_GET_CLASS (element); + GstElementClass* klass = GST_ELEMENT_GET_CLASS(element); GstKvsSink* kvssink = GST_KVS_SINK(element); - gchar *name = NULL; - GstPad *newpad = NULL; - const gchar *pad_name = NULL; + gchar* name = NULL; + GstPad* newpad = NULL; + const gchar* pad_name = NULL; MKV_TRACK_INFO_TYPE track_type = MKV_TRACK_INFO_TYPE_VIDEO; gboolean locked = TRUE; - GstKvsSinkTrackData *kvs_sink_track_data; + GstKvsSinkTrackData* kvs_sink_track_data; if (req_name != NULL) { - GST_WARNING_OBJECT (kvssink, "Custom pad name not supported"); + GST_WARNING_OBJECT(kvssink, "Custom pad name not supported"); } // Check if the pad template is supported - if (templ == gst_element_class_get_pad_template (klass, "audio_%u")) { + if (templ == gst_element_class_get_pad_template(klass, "audio_%u")) { if (kvssink->num_audio_streams == 1) { - GST_ERROR_OBJECT (kvssink, "Can not have more than one audio stream."); + GST_ERROR_OBJECT(kvssink, "Can not have more than one audio stream."); goto CleanUp; } - name = g_strdup_printf ("audio_%u", kvssink->num_audio_streams++); + name = g_strdup_printf("audio_%u", kvssink->num_audio_streams++); pad_name = name; track_type = MKV_TRACK_INFO_TYPE_AUDIO; - } else if (templ == gst_element_class_get_pad_template (klass, "video_%u")) { + } else if (templ == gst_element_class_get_pad_template(klass, "video_%u")) { if (kvssink->num_video_streams == 1) { - GST_ERROR_OBJECT (kvssink, "Can not have more than one video stream."); + GST_ERROR_OBJECT(kvssink, "Can not have more than one video stream."); goto CleanUp; } - name = g_strdup_printf ("video_%u", kvssink->num_video_streams++); + name = g_strdup_printf("video_%u", kvssink->num_video_streams++); pad_name = name; track_type = MKV_TRACK_INFO_TYPE_VIDEO; } else { - GST_WARNING_OBJECT (kvssink, "This is not our template!"); + GST_WARNING_OBJECT(kvssink, "This is not our template!"); goto CleanUp; } @@ -1429,22 +1380,18 @@ gst_kvs_sink_request_new_pad (GstElement * element, GstPadTemplate * templ, kvssink->data->media_type = AUDIO_ONLY; } - newpad = GST_PAD_CAST (g_object_new (GST_TYPE_PAD, - "name", pad_name, "direction", templ->direction, "template", templ, - NULL)); + newpad = GST_PAD_CAST(g_object_new(GST_TYPE_PAD, "name", pad_name, "direction", templ->direction, "template", templ, NULL)); - kvs_sink_track_data = (GstKvsSinkTrackData *) - gst_collect_pads_add_pad (kvssink->collect, GST_PAD (newpad), - sizeof (GstKvsSinkTrackData), - NULL, locked); + kvs_sink_track_data = + (GstKvsSinkTrackData*) gst_collect_pads_add_pad(kvssink->collect, GST_PAD(newpad), sizeof(GstKvsSinkTrackData), NULL, locked); kvs_sink_track_data->kvssink = kvssink; kvs_sink_track_data->track_type = track_type; kvs_sink_track_data->track_id = KVS_SINK_DEFAULT_TRACKID; - if (!gst_element_add_pad (element, GST_PAD (newpad))) { - gst_object_unref (newpad); + if (!gst_element_add_pad(element, GST_PAD(newpad))) { + gst_object_unref(newpad); newpad = NULL; - GST_WARNING_OBJECT (kvssink, "Adding the new pad '%s' failed", pad_name); + GST_WARNING_OBJECT(kvssink, "Adding the new pad '%s' failed", pad_name); goto CleanUp; } @@ -1453,23 +1400,23 @@ gst_kvs_sink_request_new_pad (GstElement * element, GstPadTemplate * templ, GST_INFO("Added new request pad"); CleanUp: - g_free (name); + g_free(name); return newpad; } -static void -gst_kvs_sink_release_pad (GstElement *element, GstPad *pad) { - GstKvsSink *kvssink = GST_KVS_SINK (GST_PAD_PARENT (pad)); - GSList *walk; +static void gst_kvs_sink_release_pad(GstElement* element, GstPad* pad) +{ + GstKvsSink* kvssink = GST_KVS_SINK(GST_PAD_PARENT(pad)); + GSList* walk; // when a pad is released, check whether it's audio or video and keep track of the stream count - for (walk = kvssink->collect->data; walk; walk = g_slist_next (walk)) { - GstCollectData *c_data; - c_data = (GstCollectData *) walk->data; + for (walk = kvssink->collect->data; walk; walk = g_slist_next(walk)) { + GstCollectData* c_data; + c_data = (GstCollectData*) walk->data; if (c_data->pad == pad) { - GstKvsSinkTrackData *kvs_sink_track_data; - kvs_sink_track_data = (GstKvsSinkTrackData *) walk->data; + GstKvsSinkTrackData* kvs_sink_track_data; + kvs_sink_track_data = (GstKvsSinkTrackData*) walk->data; if (kvs_sink_track_data->track_type == MKV_TRACK_INFO_TYPE_VIDEO) { kvssink->num_video_streams--; } else if (kvs_sink_track_data->track_type == MKV_TRACK_INFO_TYPE_AUDIO) { @@ -1478,29 +1425,28 @@ gst_kvs_sink_release_pad (GstElement *element, GstPad *pad) { } } - gst_collect_pads_remove_pad (kvssink->collect, pad); - if (gst_element_remove_pad (element, pad)) { + gst_collect_pads_remove_pad(kvssink->collect, pad); + if (gst_element_remove_pad(element, pad)) { kvssink->num_streams--; } } -static void -init_track_data(GstKvsSink *kvssink) { - GSList *walk; - GstCaps *caps; +static void init_track_data(GstKvsSink* kvssink) +{ + GSList* walk; + GstCaps* caps; gchar *video_content_type = NULL, *audio_content_type = NULL; - const gchar *media_type; + const gchar* media_type; - for (walk = kvssink->collect->data; walk != NULL; walk = g_slist_next (walk)) { - GstKvsSinkTrackData *kvs_sink_track_data = (GstKvsSinkTrackData *) walk->data; + for (walk = kvssink->collect->data; walk != NULL; walk = g_slist_next(walk)) { + GstKvsSinkTrackData* kvs_sink_track_data = (GstKvsSinkTrackData*) walk->data; if (kvs_sink_track_data->track_type == MKV_TRACK_INFO_TYPE_VIDEO) { - if (kvssink->data->media_type == AUDIO_VIDEO) { kvs_sink_track_data->track_id = KVS_SINK_DEFAULT_TRACKID; } - GstCollectData *collect_data = (GstCollectData *) walk->data; + GstCollectData* collect_data = (GstCollectData*) walk->data; // extract media type from GstCaps to check whether it's h264 or h265 caps = gst_pad_get_allowed_caps(collect_data->pad); @@ -1514,17 +1460,17 @@ init_track_data(GstKvsSink *kvssink) { video_content_type = g_strdup(MKV_H265_CONTENT_TYPE); } else { // no-op, should result in a caps negotiation error before getting here. - LOG_AND_THROW("Error, media type " << media_type << "not accepted by kvssink" << " for " << kvssink->stream_name); + LOG_AND_THROW("Error, media type " << media_type << "not accepted by kvssink" + << " for " << kvssink->stream_name); } gst_caps_unref(caps); } else if (kvs_sink_track_data->track_type == MKV_TRACK_INFO_TYPE_AUDIO) { - if (kvssink->data->media_type == AUDIO_VIDEO) { kvs_sink_track_data->track_id = KVS_SINK_DEFAULT_AUDIO_TRACKID; } - GstCollectData *collect_data = (GstCollectData *) walk->data; + GstCollectData* collect_data = (GstCollectData*) walk->data; // extract media type from GstCaps to check whether it's h264 or h265 caps = gst_pad_get_allowed_caps(collect_data->pad); @@ -1550,17 +1496,17 @@ init_track_data(GstKvsSink *kvssink) { switch (kvssink->data->media_type) { case AUDIO_VIDEO: - if(video_content_type != NULL && audio_content_type != NULL) { + if (video_content_type != NULL && audio_content_type != NULL) { kvssink->content_type = g_strjoin(",", video_content_type, audio_content_type, NULL); } break; case AUDIO_ONLY: - if(audio_content_type != NULL) { + if (audio_content_type != NULL) { kvssink->content_type = g_strdup(audio_content_type); } break; case VIDEO_ONLY: - if(video_content_type != NULL) { + if (video_content_type != NULL) { kvssink->content_type = g_strdup(video_content_type); } break; @@ -1570,17 +1516,17 @@ init_track_data(GstKvsSink *kvssink) { g_free(audio_content_type); } -static GstStateChangeReturn -gst_kvs_sink_change_state(GstElement *element, GstStateChange transition) { +static GstStateChangeReturn gst_kvs_sink_change_state(GstElement* element, GstStateChange transition) +{ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; - GstKvsSink *kvssink = GST_KVS_SINK (element); + GstKvsSink* kvssink = GST_KVS_SINK(element); auto data = kvssink->data; string err_msg = ""; ostringstream oss; switch (transition) { case GST_STATE_CHANGE_NULL_TO_READY: - if(kvssink->log_config_path != NULL) { + if (kvssink->log_config_path != NULL) { log4cplus::initialize(); log4cplus::PropertyConfigurator::doConfigure(kvssink->log_config_path); LOG_INFO("Logger config being used: " << kvssink->log_config_path); @@ -1594,7 +1540,7 @@ gst_kvs_sink_change_state(GstElement *element, GstStateChange transition) { kvssink->data->first_pts = GST_CLOCK_TIME_NONE; kvssink->data->producer_start_time = GST_CLOCK_TIME_NONE; - } catch (runtime_error &err) { + } catch (runtime_error& err) { oss << "Failed to init kvs producer. Error: " << err.what(); err_msg = oss.str(); ret = GST_STATE_CHANGE_FAILURE; @@ -1607,16 +1553,16 @@ gst_kvs_sink_change_state(GstElement *element, GstStateChange transition) { } break; case GST_STATE_CHANGE_READY_TO_PAUSED: - gst_collect_pads_start (kvssink->collect); + gst_collect_pads_start(kvssink->collect); break; case GST_STATE_CHANGE_PAUSED_TO_READY: LOG_INFO("Stopping kvssink for " << kvssink->stream_name); - gst_collect_pads_stop (kvssink->collect); + gst_collect_pads_stop(kvssink->collect); // Need this check in case an EOS was received in the buffer handler and // stream was already stopped. Although stopSync() is an idempotent call, // we want to avoid an extra call - if(!data->streamingStopped.load()) { + if (!data->streamingStopped.load()) { data->kinesis_video_stream->stopSync(); data->streamingStopped.store(true); } else { @@ -1631,21 +1577,21 @@ gst_kvs_sink_change_state(GstElement *element, GstStateChange transition) { break; } - ret = GST_ELEMENT_CLASS (parent_class)->change_state(element, transition); + ret = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition); CleanUp: if (ret != GST_STATE_CHANGE_SUCCESS) { - GST_ELEMENT_ERROR (kvssink, LIBRARY, INIT, (NULL), ("%s", err_msg.c_str())); + GST_ELEMENT_ERROR(kvssink, LIBRARY, INIT, (NULL), ("%s", err_msg.c_str())); } return ret; } -GST_DEBUG_CATEGORY (kvs_debug); - -static gboolean plugin_init(GstPlugin *plugin) { +GST_DEBUG_CATEGORY(kvs_debug); +static gboolean plugin_init(GstPlugin* plugin) +{ if (!gst_element_register(plugin, "kvssink", GST_RANK_PRIMARY + 10, GST_TYPE_KVS_SINK)) { return FALSE; } @@ -1655,14 +1601,5 @@ static gboolean plugin_init(GstPlugin *plugin) { } #define PACKAGE "kvssinkpackage" -GST_PLUGIN_DEFINE ( - GST_VERSION_MAJOR, - GST_VERSION_MINOR, - kvssink, - "GStreamer AWS KVS plugin", - plugin_init, - "1.0", - "Proprietary", - "GStreamer", - "http://gstreamer.net/" -) \ No newline at end of file +GST_PLUGIN_DEFINE(GST_VERSION_MAJOR, GST_VERSION_MINOR, kvssink, "GStreamer AWS KVS plugin", plugin_init, "1.0", "Proprietary", "GStreamer", + "http://gstreamer.net/") \ No newline at end of file diff --git a/src/gstreamer/gstkvssink.h b/src/gstreamer/gstkvssink.h index 8f0b1a13..1bb8bdad 100644 --- a/src/gstreamer/gstkvssink.h +++ b/src/gstreamer/gstkvssink.h @@ -4,29 +4,29 @@ // // Portions Copyright /* -* GStreamer -* Copyright (C) 2005 Thomas Vander Stichele -* Copyright (C) 2005 Ronald S. Bultje -* Copyright (C) 2017 <> -* -* Permission is hereby granted, free of charge, to any person obtaining a -* copy of this software and associated documentation files (the "Software"), -* to deal in the Software without restriction, including without limitation -* the rights to use, copy, modify, merge, publish, distribute, sublicense, -* and/or sell copies of the Software, and to permit persons to whom the -* Software is furnished to do so, subject to the following conditions: -* -* The above copyright notice and this permission notice shall be included in -* all copies or substantial portions of the Software. -* -* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -* DEALINGS IN THE SOFTWARE. -*/ + * GStreamer + * Copyright (C) 2005 Thomas Vander Stichele + * Copyright (C) 2005 Ronald S. Bultje + * Copyright (C) 2017 <> + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + */ #ifndef __GST_KVS_SINK_H__ #define __GST_KVS_SINK_H__ @@ -45,17 +45,12 @@ G_BEGIN_DECLS #define KVS_GST_VERSION AWS_SDK_KVS_PRODUCER_VERSION_STRING -#define GST_TYPE_KVS_SINK \ - (gst_kvs_sink_get_type()) -#define GST_KVS_SINK(obj) \ - (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_KVS_SINK,GstKvsSink)) -#define GST_KVS_SINK_CLASS(klass) \ - (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_KVS_SINK,GstKvsSinkClass)) -#define GST_IS_KVS_SINK(obj) \ - (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_KVS_SINK)) -#define GST_IS_KVS_SINK_CLASS(klass) \ - (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_KVS_SINK)) -#define GST_KVS_SINK_CAST(obj) ((GstKvsSink *)obj) +#define GST_TYPE_KVS_SINK (gst_kvs_sink_get_type()) +#define GST_KVS_SINK(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_KVS_SINK, GstKvsSink)) +#define GST_KVS_SINK_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_KVS_SINK, GstKvsSinkClass)) +#define GST_IS_KVS_SINK(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_KVS_SINK)) +#define GST_IS_KVS_SINK_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_KVS_SINK)) +#define GST_KVS_SINK_CAST(obj) ((GstKvsSink*) obj) #ifdef CPP_VERSION_STRING #define KVSSINK_USER_AGENT_POSTFIX_VERSION CPP_VERSION_STRING @@ -70,17 +65,13 @@ typedef struct _KvsSinkMetric KvsSinkMetric; /* all information needed for one track */ typedef struct _GstKvsSinkTrackData { - GstCollectData collect; /* we extend the CollectData */ + GstCollectData collect; /* we extend the CollectData */ MKV_TRACK_INFO_TYPE track_type; - GstKvsSink *kvssink; + GstKvsSink* kvssink; guint track_id; } GstKvsSinkTrackData; -typedef enum _MediaType { - AUDIO_VIDEO, - VIDEO_ONLY, - AUDIO_ONLY -} MediaType; +typedef enum _MediaType { AUDIO_VIDEO, VIDEO_ONLY, AUDIO_ONLY } MediaType; /** * GstKvsSink: @@ -88,57 +79,55 @@ typedef enum _MediaType { * The opaque #GstKvsSink data structure. */ struct _GstKvsSink { - GstElement element; + GstElement element; - GstCollectPads *collect; + GstCollectPads* collect; // Stream definition - gchar *stream_name; - gchar *user_agent; - guint retention_period_hours; - gchar *kms_key_id; - STREAMING_TYPE streaming_type; - gchar *content_type; - guint max_latency_seconds; - guint fragment_duration_miliseconds; - guint timecode_scale_milliseconds; - gboolean key_frame_fragmentation; - gboolean frame_timecodes; - gboolean absolute_fragment_times; - gboolean fragment_acks; - gboolean restart_on_error; - gboolean recalculate_metrics; - gboolean allow_create_stream; - gboolean disable_buffer_clipping; - guint framerate; - guint avg_bandwidth_bps; - guint buffer_duration_seconds; - guint replay_duration_seconds; - guint connection_staleness_seconds; - gchar *codec_id; - gchar *track_name; - gchar *access_key; - gchar *secret_key; - gchar *session_token; - gchar *aws_region; - guint rotation_period; - gchar *log_config_path; - guint storage_size; - guint stop_stream_timeout; - guint service_connection_timeout; - guint service_completion_timeout; - gchar *credential_file_path; - GstStructure *iot_certificate; - GstStructure *stream_tags; - guint64 file_start_time; - MKV_TRACK_INFO_TYPE track_info_type; - gchar *audio_codec_id; - - - guint num_streams; - guint num_audio_streams; - guint num_video_streams; - + gchar* stream_name; + gchar* user_agent; + guint retention_period_hours; + gchar* kms_key_id; + STREAMING_TYPE streaming_type; + gchar* content_type; + guint max_latency_seconds; + guint fragment_duration_miliseconds; + guint timecode_scale_milliseconds; + gboolean key_frame_fragmentation; + gboolean frame_timecodes; + gboolean absolute_fragment_times; + gboolean fragment_acks; + gboolean restart_on_error; + gboolean recalculate_metrics; + gboolean allow_create_stream; + gboolean disable_buffer_clipping; + guint framerate; + guint avg_bandwidth_bps; + guint buffer_duration_seconds; + guint replay_duration_seconds; + guint connection_staleness_seconds; + gchar* codec_id; + gchar* track_name; + gchar* access_key; + gchar* secret_key; + gchar* session_token; + gchar* aws_region; + guint rotation_period; + gchar* log_config_path; + guint storage_size; + guint stop_stream_timeout; + guint service_connection_timeout; + guint service_completion_timeout; + gchar* credential_file_path; + GstStructure* iot_certificate; + GstStructure* stream_tags; + guint64 file_start_time; + MKV_TRACK_INFO_TYPE track_info_type; + gchar* audio_codec_id; + + guint num_streams; + guint num_audio_streams; + guint num_video_streams; std::unique_ptr credentials_; std::shared_ptr data; @@ -146,35 +135,27 @@ struct _GstKvsSink { struct _GstKvsSinkClass { GstElementClass parent_class; - void (*sink_fragment_ack) (GstKvsSink *kvssink, gpointer user_data); - void (*sink_stream_metric) (GstKvsSink *kvssink, gpointer user_data); - void (*sink_stream_error) (GstKvsSink *kvssink, gpointer user_data); + void (*sink_fragment_ack)(GstKvsSink* kvssink, gpointer user_data); + void (*sink_stream_metric)(GstKvsSink* kvssink, gpointer user_data); + void (*sink_stream_error)(GstKvsSink* kvssink, gpointer user_data); }; -GType gst_kvs_sink_get_type (void); +GType gst_kvs_sink_get_type(void); G_END_DECLS struct _KvsSinkCustomData { - - _KvsSinkCustomData(): - stream_status(STATUS_SUCCESS), - last_dts(0), - pts_base(0), - media_type(VIDEO_ONLY), - first_video_frame(true), - use_original_pts(false), - get_metrics(false), - on_first_frame(true), - frame_count(0), - first_pts(GST_CLOCK_TIME_NONE), - producer_start_time(GST_CLOCK_TIME_NONE), - streamingStopped(false) {} + _KvsSinkCustomData() + : stream_status(STATUS_SUCCESS), last_dts(0), pts_base(0), media_type(VIDEO_ONLY), first_video_frame(true), use_original_pts(false), + get_metrics(false), on_first_frame(true), frame_count(0), first_pts(GST_CLOCK_TIME_NONE), producer_start_time(GST_CLOCK_TIME_NONE), + streamingStopped(false) + { + } std::unique_ptr kinesis_video_producer; std::shared_ptr kinesis_video_stream; std::unordered_set track_cpd_received; - GstKvsSink *kvs_sink = nullptr; + GstKvsSink* kvs_sink = nullptr; MediaType media_type; bool first_video_frame; bool use_original_pts; @@ -193,14 +174,13 @@ struct _KvsSinkCustomData { guint err_signal_id = 0; guint ack_signal_id = 0; guint metric_signal_id = 0; - uint64_t start_time; // [nanoSeconds] + uint64_t start_time; // [nanoSeconds] }; struct _KvsSinkMetric { - _KvsSinkMetric(): - frame_pts(0), - on_first_frame(true) - {} + _KvsSinkMetric() : frame_pts(0), on_first_frame(true) + { + } KinesisVideoStreamMetrics stream_metrics = KinesisVideoStreamMetrics(); KinesisVideoProducerMetrics client_metrics = KinesisVideoProducerMetrics(); uint64_t frame_pts;