diff --git a/app/Controllers/User/education_controller.py b/app/Controllers/User/education_controller.py index 9751a55..3b4c490 100644 --- a/app/Controllers/User/education_controller.py +++ b/app/Controllers/User/education_controller.py @@ -4,7 +4,7 @@ from sqlmodel import Session from Entities.UserDTOs.education_entity import CreateEducation, UpdateEducation, ReadEducation -from Services.User.education_service import EducationService +from Services.User.education_service import EducationService, get_education_service_with_publisher from Settings.logging_config import setup_logging from db import get_session @@ -14,10 +14,9 @@ @router.post("/", response_model=ReadEducation) -def create_education(education_create: CreateEducation, session: Session = Depends(get_session)): - service = EducationService(session) +def create_education(education_create: CreateEducation, education_service: EducationService = Depends(get_education_service_with_publisher)): logger.info(f"Creating Education entry for profile_id={education_create.profile_id}") - education = service.create_education(education_create) + education = education_service.create_education(education_create) logger.info(f"Created Education with ID: {education.id}") return education @@ -53,19 +52,17 @@ def list_educations( @router.put("/{education_id}", response_model=ReadEducation) -def update_education(education_id: UUID, education_update: UpdateEducation, session: Session = Depends(get_session)): - service = EducationService(session) +def update_education(education_id: UUID, education_update: UpdateEducation, education_service: EducationService = Depends(get_education_service_with_publisher)): logger.info(f"Updating Education ID: {education_id} with data: {education_update.dict(exclude_unset=True)}") - education = service.update_education(education_id, education_update) + education = education_service.update_education(education_id, education_update) logger.info(f"Updated Education ID: {education.id}") return education @router.delete("/{education_id}") -def delete_education(education_id: UUID, session: Session = Depends(get_session)): - service = EducationService(session) +def delete_education(education_id: UUID, education_service: EducationService = Depends(get_education_service_with_publisher)): logger.info(f"Deleting Education ID: {education_id}") - message = service.delete_education(education_id) + message = education_service.delete_education(education_id) logger.info(message) return {"detail": message} diff --git a/app/Controllers/User/leetcode_controller.py b/app/Controllers/User/leetcode_controller.py index 7f0bda7..100153e 100644 --- a/app/Controllers/User/leetcode_controller.py +++ b/app/Controllers/User/leetcode_controller.py @@ -14,14 +14,15 @@ ReadLeetcodeTag, ) +from Services.User.leetcode_service import get_leetcode_service_with_publisher + logger = setup_logging() router = APIRouter(prefix="/Dijkstra/v1/leetcode", tags=["Leetcode"]) @router.post("/sync/{profile_id}/{lc_username}", response_model=ReadLeetcode) -def sync_leetcode(profile_id: UUID, lc_username: str, session: Session = Depends(get_session)): - service = LeetCodeService(session) +def sync_leetcode(profile_id: UUID, lc_username: str, service: LeetCodeService = Depends(get_leetcode_service_with_publisher)): logger.info(f"Syncing LeetCode data profile_id={profile_id} username={lc_username}") return service.create_or_update_from_api(profile_id, lc_username) @@ -55,8 +56,7 @@ def get_leetcode_by_profile(profile_id: UUID, session: Session = Depends(get_ses @router.delete("/{leetcode_id}", status_code=204) -def delete_leetcode(leetcode_id: UUID, session: Session = Depends(get_session)): - service = LeetCodeService(session) +def delete_leetcode(leetcode_id: UUID, service: LeetCodeService = Depends(get_leetcode_service_with_publisher)): service.delete(leetcode_id) return Response(status_code=204) diff --git a/app/Controllers/User/workexperience_controller.py b/app/Controllers/User/workexperience_controller.py index 5621465..9736eba 100644 --- a/app/Controllers/User/workexperience_controller.py +++ b/app/Controllers/User/workexperience_controller.py @@ -9,14 +9,15 @@ from db import get_session from Schema.SQL.Enums.enums import EmploymentType, WorkLocationType, Domain +from Services.User.workexperience_service import get_workexperience_service_with_publisher + logger = setup_logging() router = APIRouter(prefix="/Dijkstra/v1/wp", tags=["Work Experiences"]) @router.post("/", response_model=ReadWorkExperience) -def create_work_experience(work_experience_create: CreateWorkExperience, session: Session = Depends(get_session)): - service = WorkExperienceService(session) +def create_work_experience(work_experience_create: CreateWorkExperience, service: WorkExperienceService = Depends(get_workexperience_service_with_publisher)): logger.info(f"Creating Work Experience: {work_experience_create.title} at {work_experience_create.company_name}") work_experience = service.create_work_experience(work_experience_create) logger.info(f"Created Work Experience with ID: {work_experience.id}") @@ -111,17 +112,15 @@ def autocomplete_work_experiences( @router.put("/{work_experience_id}", response_model=ReadWorkExperience) def update_work_experience( - work_experience_id: UUID, work_experience_update: UpdateWorkExperience, session: Session = Depends(get_session) + work_experience_id: UUID, work_experience_update: UpdateWorkExperience, service: WorkExperienceService = Depends(get_workexperience_service_with_publisher) ): - service = WorkExperienceService(session) logger.info(f"Updating Work Experience ID: {work_experience_id} with data: {work_experience_update.dict(exclude_unset=True)}") work_experience = service.update_work_experience(work_experience_id, work_experience_update) logger.info(f"Updated Work Experience ID: {work_experience.id}") return work_experience @router.delete("/{work_experience_id}") -def delete_work_experience(work_experience_id: UUID, session: Session = Depends(get_session)): - service = WorkExperienceService(session) +def delete_work_experience(work_experience_id: UUID, service: WorkExperienceService = Depends(get_workexperience_service_with_publisher)): logger.info(f"Deleting Work Experience ID: {work_experience_id}") message = service.delete_work_experience(work_experience_id) logger.info(f"Deleted Work Experience ID: {work_experience_id}") diff --git a/app/Entities/UserDTOs/extended_entities.py b/app/Entities/UserDTOs/extended_entities.py index ee6929e..ad4e832 100644 --- a/app/Entities/UserDTOs/extended_entities.py +++ b/app/Entities/UserDTOs/extended_entities.py @@ -53,4 +53,3 @@ class ReadUserFull(ReadUser): class Config: from_attributes = True - diff --git a/app/Entities/UserDTOs/profile_entity_kafka_dto.py b/app/Entities/UserDTOs/profile_entity_kafka_dto.py new file mode 100644 index 0000000..d3ce275 --- /dev/null +++ b/app/Entities/UserDTOs/profile_entity_kafka_dto.py @@ -0,0 +1,80 @@ +from pydantic import BaseModel +from typing import Optional, List, Dict +from Schema.SQL.Models.models import Profile +from sqlalchemy import null + +from Schema.SQL.Enums.enums import SchoolType +from Utils.utility_functions import calculate_months_served + + +class EducationUpdateEventDTO(BaseModel): + """Education Update Event DTO""" + salary: int + time_served_months: int + +class CpgaUpdateEventDTO(BaseModel): + """CPGA Update Event DTO""" + cgpa: float + +class DsaMetricsUpdateEventDTO(BaseModel): + """Dsa Metrics Update Event DTO""" + contest_rating: int + global_rank: int + +class ProfileUpdateKafkaEvent(BaseModel): + """Kafka event payload for profile updates""" + user_id: str + work_experiences: Optional[List[EducationUpdateEventDTO]] = None + cgpa_metrics: Optional[CpgaUpdateEventDTO] = None + dsa_metrics: Optional[DsaMetricsUpdateEventDTO] = None + +def map_profile_to_kafka_event(profile: Profile) -> ProfileUpdateKafkaEvent: + """ + Convert a Profile ORM object into a ProfileUpdateKafkaEvent DTO. + """ + + # ---------------------------- + # Work Experiences Mapping + # ---------------------------- + work_experience_dtos = [] + for wx in profile.work_experience: + months_served = calculate_months_served( + wx.start_date_month, + wx.start_date_year, + wx.end_date_month, + wx.end_date_year + ) + work_experience_dtos.append( + EducationUpdateEventDTO( + salary=(wx.yearly_salary_rupees or 2000000) / 100000, #TODO: This should be 0. Have kept it to a large number for dev testing + time_served_months=months_served, + ) + ) + # ---------------------------- + # CGPA Mapping + # ---------------------------- + edu = next( + (e for e in profile.education + if e.school_type in {SchoolType.UNIVERSITY, SchoolType.COLLEGE}), + None + ) + #FIXME: Currently the CGPA is in the 4 scale while helios expects the 10 scale. + cgpa_dto = CpgaUpdateEventDTO(cgpa=edu.cgpa) if edu and edu.cgpa is not None else None + + # ---------------------------- + # DSA Metrics Mapping + # ---------------------------- + leetcode_profile = profile.leetcode + dsa_dto = DsaMetricsUpdateEventDTO(global_rank=leetcode_profile.global_ranking, contest_rating=leetcode_profile.competition_rating) if leetcode_profile is not None and leetcode_profile.competition_rating is not None else None + + # ---------------------------- + # Construct final event DTO + # ---------------------------- + event = ProfileUpdateKafkaEvent( + user_id=str(profile.user_rel.github_user_name), + work_experiences=work_experience_dtos, + cgpa_metrics=cgpa_dto, + dsa_metrics=dsa_dto, + ) + + return event \ No newline at end of file diff --git a/app/Entities/UserDTOs/workexperience_entity.py b/app/Entities/UserDTOs/workexperience_entity.py index cfb087d..1ccd326 100644 --- a/app/Entities/UserDTOs/workexperience_entity.py +++ b/app/Entities/UserDTOs/workexperience_entity.py @@ -154,6 +154,7 @@ class ReadWorkExperience(BaseModel): domain: Optional[List[Domain]] company_name: str company_logo: Optional[str] + yearly_salary_rupees: Optional[float] currently_working: bool location: Optional[ReadLocation] location_type: WorkLocationType diff --git a/app/Schema/SQL/Models/models.py b/app/Schema/SQL/Models/models.py index 117fba6..9638460 100644 --- a/app/Schema/SQL/Models/models.py +++ b/app/Schema/SQL/Models/models.py @@ -184,7 +184,7 @@ class WorkExperience(UUIDBaseTable, table=True): tools_used: Optional[List[Tools]] = Field( sa_column=Column(ARRAY(SQLEnum(Tools, name="TOOLS"))) ) - + yearly_salary_rupees: Optional[float] = None # Relationships profile_rel: Profile = Relationship(back_populates="work_experience") location_rel: Optional[Location] = Relationship(back_populates="work_experience") diff --git a/app/Services/User/education_service.py b/app/Services/User/education_service.py index 3479428..cb0f513 100644 --- a/app/Services/User/education_service.py +++ b/app/Services/User/education_service.py @@ -1,5 +1,6 @@ from uuid import UUID from typing import List, Optional +from fastapi.params import Depends from sqlmodel import Session from Repository.User.education_repository import EducationRepository @@ -8,13 +9,16 @@ from Entities.UserDTOs.location_entity import CreateLocation, ReadLocation from Schema.SQL.Models.models import Education, Profile, Location from Utils.Exceptions.user_exceptions import EducationNotFound, ProfileNotFound, LocationNotFound +from Services.Kafka.producer_service import KafkaProducerService, get_kafka_producer +from db import get_session class EducationService: - def __init__(self, session: Session): + def __init__(self, session: Session, kafka_producer: KafkaProducerService = None): self.repo = EducationRepository(session) self.location_repo = LocationRepository(session) self.session = session + self.kafka_producer = kafka_producer def _convert_to_read_dto(self, education: Education) -> ReadEducation: """Convert Education database model to ReadEducation DTO with populated location""" @@ -73,6 +77,7 @@ def create_education(self, education_create: CreateEducation) -> ReadEducation: education = Education(**education_data) created_education = self.repo.create(education) + self.publish_profile_on_education_persist(created_education.profile_id) return self._convert_to_read_dto(created_education) def get_education(self, education_id: UUID) -> ReadEducation: @@ -136,13 +141,16 @@ def update_education(self, education_id: UUID, education_update: UpdateEducation setattr(education, key, value) updated_education = self.repo.update(education) + self.publish_profile_on_education_persist(updated_education.profile_id) return self._convert_to_read_dto(updated_education) def delete_education(self, education_id: UUID) -> str: education = self.repo.get(education_id) if not education: raise EducationNotFound(education_id) + profile_id = education.profile_id self.repo.delete(education) + self.publish_profile_on_education_persist(profile_id) return f"Education {education_id} deleted successfully" def get_educations_by_profile_with_locations(self, profile_id: UUID) -> List[ReadEducation]: @@ -158,4 +166,18 @@ def get_educations_by_github_username(self, github_username: str) -> List[ReadEd profile_service = ProfileService(self.session) profile_id = profile_service.get_profile_id_by_github_username(github_username) - return self.get_educations_by_profile(profile_id) \ No newline at end of file + return self.get_educations_by_profile(profile_id) + + def publish_profile_on_education_persist(self, profile_id: UUID): + """Publish profile update when education is created or updated""" + from Services.User.profile_service import ProfileService + + profile_service = ProfileService(self.session, self.kafka_producer) + profile_service.publish_profile_update(profile_id) + + +def get_education_service_with_publisher( + session: Session = Depends(get_session), + kafka: KafkaProducerService = Depends(get_kafka_producer), +): + return EducationService(session=session, kafka_producer=kafka) \ No newline at end of file diff --git a/app/Services/User/leetcode_service.py b/app/Services/User/leetcode_service.py index 8be6355..1d64fab 100644 --- a/app/Services/User/leetcode_service.py +++ b/app/Services/User/leetcode_service.py @@ -1,6 +1,7 @@ from typing import Optional, List, Dict, Any from uuid import UUID import requests +from fastapi import Depends from sqlmodel import Session from Settings.logging_config import setup_logging @@ -27,16 +28,19 @@ LeetcodeTagNotFound, ) +from Services.Kafka.producer_service import KafkaProducerService, get_kafka_producer +from db import get_session + logger = setup_logging() class LeetCodeService: - def __init__(self, session: Session): + def __init__(self, session: Session, kafka_producer: KafkaProducerService = None): self.session = session self.repo = LeetcodeRepository(session) self.badge_repo = LeetcodeBadgeRepository(session) self.tag_repo = LeetcodeTagRepository(session) - + self.kafka_producer = kafka_producer # Basic read helpers (kept small to align with minimal service design) def get(self, leetcode_id: UUID) -> Optional[Leetcode]: return self.repo.get(leetcode_id) @@ -187,7 +191,10 @@ def diff_count(name: str) -> Optional[int]: top_percentage=contest.get("topPercentage"), competition_badge=(contest.get("badge") or {}).get("name") if isinstance(contest.get("badge"), dict) else None, ) - return self.repo.create(model) + + model = self.repo.create(model) + self.publish_profile_on_education_persist(model.profile_id) + return model def _fetch_api(self, username: str) -> Dict[str, Any]: try: @@ -206,3 +213,15 @@ def _fetch_api(self, username: str) -> Dict[str, Any]: except Exception as e: logger.exception("LeetCode API fetch failed") return {"error": str(e)} + + def publish_profile_on_education_persist(self, profile_id: UUID): + """Publish profile update when education is created or updated""" + from Services.User.profile_service import ProfileService + profile_service = ProfileService(self.session, self.kafka_producer) + profile_service.publish_profile_update(profile_id) + +def get_leetcode_service_with_publisher( + session: Session = Depends(get_session), + kafka: KafkaProducerService = Depends(get_kafka_producer), +): + return LeetCodeService(session=session, kafka_producer=kafka) diff --git a/app/Services/User/profile_service.py b/app/Services/User/profile_service.py index f9f197b..f2f36d5 100644 --- a/app/Services/User/profile_service.py +++ b/app/Services/User/profile_service.py @@ -1,17 +1,24 @@ from uuid import UUID +from fastapi import Depends +from kafka import KafkaProducer from sqlmodel import Session, select from typing import List, Optional -from Entities.UserDTOs.profile_entity import CreateProfile, UpdateProfile +from Entities.UserDTOs.profile_entity import CreateProfile, ReadProfile, UpdateProfile from Schema.SQL.Models.models import Profile, User from Repository.User.profile_repository import ProfileRepository -from Utils.Exceptions.user_exceptions import GitHubUsernameNotFound, ProfileAlreadyExists, ProfileNotFound, ProfileNotFound, UserNotFound +from Utils.Exceptions.user_exceptions import ProfileAlreadyExists, ProfileNotFound, ProfileNotFound, UserNotFound + +from Entities.UserDTOs.profile_entity_kafka_dto import map_profile_to_kafka_event +from Services.Kafka.producer_service import KafkaProducerService, get_kafka_producer +from db import get_session class ProfileService: - def __init__(self, session: Session): + def __init__(self, session: Session, kafka_producer: KafkaProducerService = None): self.repo = ProfileRepository(session) self.session = session + self.kafka_producer = kafka_producer def create_profile(self, profile_create: CreateProfile) -> Profile: # Check if user exists @@ -114,72 +121,124 @@ def get_profile_with_user_details(self, profile_id: UUID) -> Optional[Profile]: return None - def get_profile_full_data_by_user_id(self, user_id: UUID) -> dict: + def get_profile_by_profile_id(self, profile_id: UUID) -> Profile: """ Get full profile data with all nested relationships populated. Returns profile with education, work experience, certifications, publications, volunteering, projects, and leetcode (None for now). """ + + # Get the base profile + from Entities.UserDTOs.profile_entity import ReadProfile + profile = self.get_profile(profile_id) + return profile + + def populate_full_profile_data(self, profile: Profile, fields: list[str] | str = "all") -> dict: + """ + Populate full profile data with selected nested relationships. + + Args: + profile (Profile): Base profile object. + fields (list[str] | str): Sections to fetch. + Use "all" (default) to fetch everything. + Example: ["education", "projects", "certifications"] + + Returns: + dict: Profile dictionary with selected nested relations populated. + """ + + # If user passed "all" or omitted fields → fetch everything + fetch_all = fields == "all" + if not fetch_all and not isinstance(fields, list): + raise ValueError("fields must be 'all' or a list of field names") + + # Local import to avoid circular dependencies from Services.User.education_service import EducationService from Services.User.workexperience_service import WorkExperienceService from Services.User.certifications_service import CertificationService from Services.User.publication_service import PublicationService from Services.User.volunteering_service import VolunteeringService from Services.User.projects_service import ProjectsService - from Entities.UserDTOs.profile_entity import ReadProfile + from Entities.UserDTOs.certification_entity import ReadCertification from Entities.UserDTOs.publication_entity import ReadPublication from Entities.UserDTOs.volunteering_entity import ReadVolunteering - from Entities.UserDTOs.projects_entity import ReadProject - - # Get the base profile - profile = self.get_profile_by_user_id(user_id) - profile_dict = ReadProfile.model_validate(profile).model_dump() - - # Initialize all sub-services + + profile_id = profile.id + # Initialize sub-services (only once) education_service = EducationService(self.session) work_exp_service = WorkExperienceService(self.session) cert_service = CertificationService(self.session) pub_service = PublicationService(self.session) vol_service = VolunteeringService(self.session) proj_service = ProjectsService(self.session) - - # Get all related data - profile_dict['education'] = education_service.get_educations_by_profile_with_locations(profile.id) - profile_dict['work_experience'] = work_exp_service.get_work_experiences_by_profile_with_locations(profile.id) - - # Get certifications - try: - certifications = cert_service.get_certifications_by_profile(profile.id) - profile_dict['certifications'] = [ReadCertification.model_validate(cert).model_dump() for cert in certifications] - except: - profile_dict['certifications'] = [] - - # Get publications - try: - publications = pub_service.get_publications_by_profile_id(profile.id) - profile_dict['publications'] = [ReadPublication.model_validate(pub).model_dump() for pub in publications] - except: - profile_dict['publications'] = [] - - # Get volunteering - try: - volunteering = vol_service.get_volunteering_by_profile_id(profile.id) - profile_dict['volunteering'] = [ReadVolunteering.model_validate(vol).model_dump() for vol in volunteering] - except: - profile_dict['volunteering'] = [] - - # Get projects - try: - projects = proj_service.get_projects_by_profile(profile.id) - profile_dict['projects'] = [proj.model_dump() if hasattr(proj, 'model_dump') else proj for proj in projects] - except: - profile_dict['projects'] = [] - - # Leetcode excluded for now - profile_dict['leetcode'] = None - + profile_dict = ReadProfile.model_validate(profile).model_dump() + # Helper to check whether a field needs to be fetched + def need(field_name: str) -> bool: + return fetch_all or field_name in fields + + # ------------------------------- + # Conditional population + # ------------------------------- + if need("education"): + profile_dict["education"] = education_service.get_educations_by_profile_with_locations(profile_id) + + if need("work_experience"): + profile_dict["work_experience"] = work_exp_service.get_work_experiences_by_profile_with_locations(profile_id) + + if need("certifications"): + try: + certifications = cert_service.get_certifications_by_profile(profile_id) + profile_dict["certifications"] = [ + ReadCertification.model_validate(cert).model_dump() for cert in certifications + ] + except Exception: + profile_dict["certifications"] = [] + + if need("publications"): + try: + publications = pub_service.get_publications_by_profile_id(profile_id) + profile_dict["publications"] = [ + ReadPublication.model_validate(pub).model_dump() for pub in publications + ] + except Exception: + profile_dict["publications"] = [] + + if need("volunteering"): + try: + vols = vol_service.get_volunteering_by_profile_id(profile_id) + profile_dict["volunteering"] = [ + ReadVolunteering.model_validate(vol).model_dump() for vol in vols + ] + except Exception: + profile_dict["volunteering"] = [] + + if need("projects"): + try: + projects = proj_service.get_projects_by_profile(profile_id) + # Some DTOs use model_dump, others do not + profile_dict["projects"] = [ + proj.model_dump() if hasattr(proj, "model_dump") else proj for proj in projects + ] + except Exception: + profile_dict["projects"] = [] + + # Leetcode (placeholder) + if need("leetcode"): + profile_dict["leetcode"] = None + return profile_dict + + def get_profile_full_data_by_user_id(self, user_id: UUID) -> dict: + """ + Get full profile data with all nested relationships populated. + Returns profile with education, work experience, certifications, + publications, volunteering, projects, and leetcode (None for now). + """ + from Entities.UserDTOs.profile_entity import ReadProfile + # Get the base profile + profile = self.get_profile_by_user_id(user_id) + return self.populate_full_profile_data(profile) def get_profile_full_data_by_github_username(self, github_username: str) -> dict: @@ -238,4 +297,29 @@ def delete_profile_by_github_username(self, github_username: str) -> str: profile = self.get_profile_by_user_id(user_id) # Delete the profile using the existing method - return self.delete_profile(profile.id) \ No newline at end of file + return self.delete_profile(profile.id) + + def publish_profile_update(self, profile_id: UUID): + """ + Publish profile update event. + This is a placeholder for the actual implementation of publishing + profile updates to a message broker or notification system. + """ + profile = self.get_profile_by_profile_id(profile_id) + event = { + "eventType": "DataForgeUpdateEvent", + "data": map_profile_to_kafka_event(profile).model_dump(), + } + try: + self.kafka_producer.publish("dm_user_metrics", key=str(profile.user_rel.github_user_name), value=event) + except Exception as ex: + # Production: log and/or use DLQ + print(f"Kafka publish failed: {ex}") + raise + + +def get_profile_service_with_publisher( + session: Session = Depends(get_session), + kafka: KafkaProducerService = Depends(get_kafka_producer), +): + return ProfileService(session=session, kafka_producer=kafka) \ No newline at end of file diff --git a/app/Services/User/workexperience_service.py b/app/Services/User/workexperience_service.py index a66ecd6..2e1acbd 100644 --- a/app/Services/User/workexperience_service.py +++ b/app/Services/User/workexperience_service.py @@ -1,7 +1,8 @@ from uuid import UUID + +from fastapi import Depends from sqlmodel import Session from typing import List, Optional -from datetime import date from Schema.SQL.Models.models import WorkExperience, Profile, Location from Schema.SQL.Enums.enums import EmploymentType, WorkLocationType, Domain, Tools @@ -10,12 +11,16 @@ from Entities.UserDTOs.workexperience_entity import CreateWorkExperience, UpdateWorkExperience, ReadWorkExperience from Entities.UserDTOs.location_entity import CreateLocation, ReadLocation from Utils.Exceptions.user_exceptions import LocationNotFound, ProfileNotFound, WorkExperienceNotFound +from Services.Kafka.producer_service import KafkaProducerService, get_kafka_producer +from db import get_session class WorkExperienceService: - def __init__(self, session: Session): + def __init__(self, session: Session, kafka_producer: KafkaProducerService = None): self.repo = WorkExperienceRepository(session) self.location_repo = LocationRepository(session) self.session = session + self.kafka_producer = kafka_producer + def _convert_to_read_dto(self, work_experience: WorkExperience) -> ReadWorkExperience: """Convert WorkExperience database model to ReadWorkExperience DTO with populated location""" @@ -74,6 +79,7 @@ def create_work_experience(self, work_experience_create: CreateWorkExperience) - work_experience = WorkExperience(**work_experience_data) created_work_experience = self.repo.create(work_experience) + self.publish_profile_on_work_experience_persist(created_work_experience.profile_id) return self._convert_to_read_dto(created_work_experience) def get_work_experience(self, work_experience_id: UUID) -> Optional[ReadWorkExperience]: @@ -182,6 +188,7 @@ def update_work_experience(self, work_experience_id: UUID, work_experience_updat for key, value in update_data.items(): setattr(work_experience, key, value) updated_work_experience = self.repo.update(work_experience) + self.publish_profile_on_work_experience_persist(updated_work_experience.profile_id) return self._convert_to_read_dto(updated_work_experience) if updated_work_experience else None def delete_work_experience(self, work_experience_id: UUID) -> Optional[str]: @@ -189,6 +196,7 @@ def delete_work_experience(self, work_experience_id: UUID) -> Optional[str]: if not work_experience: raise WorkExperienceNotFound(work_experience_id) self.repo.delete(work_experience) + self.publish_profile_on_work_experience_persist(work_experience.profile_id) return f"Work Experience {work_experience_id} deleted successfully" def get_work_experiences_by_profile_with_locations(self, profile_id: UUID) -> List[ReadWorkExperience]: @@ -208,4 +216,17 @@ def get_work_experiences_by_github_username(self, github_username: str) -> List[ def get_work_experiences_by_github_username_with_locations(self, github_username: str) -> List[ReadWorkExperience]: """Get all work experiences with locations by GitHub username""" - return self.get_work_experiences_by_github_username(github_username) \ No newline at end of file + return self.get_work_experiences_by_github_username(github_username) + + def publish_profile_on_work_experience_persist(self, profile_id: UUID): + """Publish profile update when work experience is created or updated""" + from Services.User.profile_service import ProfileService + profile_service = ProfileService(self.session, self.kafka_producer) + profile_service.publish_profile_update(profile_id) + + +def get_workexperience_service_with_publisher( + session: Session = Depends(get_session), + kafka: KafkaProducerService = Depends(get_kafka_producer), +): + return WorkExperienceService(session=session, kafka_producer=kafka) \ No newline at end of file diff --git a/app/Utils/utility_functions.py b/app/Utils/utility_functions.py new file mode 100644 index 0000000..851836e --- /dev/null +++ b/app/Utils/utility_functions.py @@ -0,0 +1,25 @@ +from typing import Optional +from datetime import date + + +def calculate_months_served( + start_month: int, + start_year: int, + end_month: Optional[int], + end_year: Optional[int] +) -> int: + + # If no end date, assume ongoing → use today + if end_month is None or end_year is None: + today = date.today() + end_month = today.month + end_year = today.year + + # Convert dates into total months + start_total = start_year * 12 + start_month + end_total = end_year * 12 + end_month + + months = end_total - start_total + + # Ensure non-negative result (if data is wrong) + return max(months, 0) \ No newline at end of file diff --git a/app/db.py b/app/db.py index 5fddb4a..1ae17dd 100644 --- a/app/db.py +++ b/app/db.py @@ -2,7 +2,6 @@ import os from dotenv import load_dotenv from sqlmodel import SQLModel, create_engine, Session -from contextlib import contextmanager load_dotenv() @@ -17,4 +16,4 @@ def init_db(): # Dependency for FastAPI def get_session(): with Session(engine) as session: - yield session + yield session \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index ada4331..07eb129 100644 Binary files a/requirements.txt and b/requirements.txt differ