Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 63 additions & 0 deletions backend/openedx_ai_extensions/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,73 @@
from django.utils.html import format_html
from django.utils.safestring import mark_safe

from openedx_ai_extensions.models import PromptTemplate
from openedx_ai_extensions.workflows.models import AIWorkflowProfile, AIWorkflowScope, AIWorkflowSession
from openedx_ai_extensions.workflows.template_utils import discover_templates, parse_json5_string


@admin.register(PromptTemplate)
class PromptTemplateAdmin(admin.ModelAdmin):
"""
Admin interface for Prompt Templates - one big textbox for easy editing.
"""

list_display = ('slug', 'body_preview', 'updated_at')
list_filter = ('created_at', 'updated_at')
search_fields = ('slug', 'body')
readonly_fields = ('id', 'created_at', 'updated_at')

def get_fieldsets(self, request, obj=None):
"""Return dynamic fieldsets with UUID example if editing existing object."""
if obj and obj.pk:
# Editing existing - show UUID example
identification_description = (
f'Slug is human-readable, ID is the stable UUID reference. <br/>'
f'Use in profile: <code>"prompt_template": "{obj.pk}"</code> or '
f'<code>"prompt_template": "{obj.slug}"</code>'
)
else:
# Creating new
identification_description = (
'Slug is human-readable, ID will be generated automatically.'
)

return (
('Identification', {
'fields': ('slug', 'id'),
'description': format_html(identification_description)
}),
('Prompt Content', {
'fields': ('body',),
'description': 'The prompt template text - edit in the big textbox below.'
}),
('Timestamps', {
'fields': ('created_at', 'updated_at'),
'classes': ('collapse',)
}),
)

def get_form(self, request, obj=None, change=False, **kwargs):
"""Customize the form to use a large textarea for body."""
form = super().get_form(request, obj, change=change, **kwargs)
if 'body' in form.base_fields:
form.base_fields['body'].widget = forms.Textarea(attrs={
'rows': 25,
'cols': 120,
'class': 'vLargeTextField',
'style': 'font-family: monospace; font-size: 14px;'
})
return form

def body_preview(self, obj):
"""Show truncated body text."""
if obj.body:
preview = obj.body[:80].replace('\n', ' ')
return preview + ('...' if len(obj.body) > 80 else '')
return '-'
body_preview.short_description = 'Prompt Preview'


class AIWorkflowProfileAdminForm(forms.ModelForm):
"""Custom form for AIWorkflowProfile with template selection."""

Expand Down
28 changes: 28 additions & 0 deletions backend/openedx_ai_extensions/migrations/0005_prompttemplate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# Generated by Django 4.2.20 on 2025-12-24 03:04

from django.db import migrations, models
import django.db.models.deletion
import uuid


class Migration(migrations.Migration):

dependencies = [
('openedx_ai_extensions', '0004_aiworkflowsession_profile_aiworkflowscope_and_more'),
]

operations = [
migrations.CreateModel(
name='PromptTemplate',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, help_text='Stable UUID for referencing this template', primary_key=True, serialize=False)),
('slug', models.SlugField(help_text="Human-readable identifier (e.g., 'eli5', 'summarize_unit')", max_length=100, unique=True)),
('body', models.TextField(help_text='The prompt template text')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'ordering': ['slug'],
},
)
]
96 changes: 95 additions & 1 deletion backend/openedx_ai_extensions/models.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,100 @@
"""
Database models for openedx_ai_extensions.
"""
import logging
import re
from uuid import uuid4

from django.db import models

# TODO: your models here
logger = logging.getLogger(__name__)


class PromptTemplate(models.Model):
"""
Reusable prompt templates for AI workflows.

This is the source for reusable prompt text. Profiles can reference
templates by slug (human-readable) or UUID (stable).

Examples:
- slug: "eli5", "summarize_unit", "explain_concept"
- body: "You are a helpful AI that explains things simply..."

.. no_pii:
"""

id = models.UUIDField(
primary_key=True,
default=uuid4,
editable=False,
help_text="Stable UUID for referencing this template"
)
slug = models.SlugField(
max_length=100,
unique=True,
help_text="Human-readable identifier (e.g., 'eli5', 'summarize_unit')"
)
body = models.TextField(
help_text="The prompt template text"
)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)

class Meta:
"""Model metadata."""

ordering = ['slug']

def __str__(self):
"""Return string representation."""
return f"{self.slug}"

def __repr__(self):
"""Return detailed string representation."""
return f"<PromptTemplate: {self.slug}>"

@classmethod
def load_prompt(cls, template_identifier):
"""
Load prompt text by slug or UUID.

Uses regex to detect UUID format and query accordingly for efficiency.

Args:
template_identifier: Either a slug (str) or UUID string

Returns:
str or None: The prompt body, or None if not found
"""
if not template_identifier:
return None

# UUID pattern: 32 hex digits with or without dashes
uuid_pattern = re.compile(
r'^[a-f\d]{8}-?([a-f\d]{4}-?){3}[a-f\d]{12}$',
re.IGNORECASE
)
if uuid_pattern.match(str(template_identifier)):
try:
template = cls.objects.get(id=template_identifier)
logger.info(f"Loaded prompt template by UUID: {template_identifier}")
return template.body
except cls.DoesNotExist:
logger.warning(f"PromptTemplate with UUID '{template_identifier}' not found")
return None
except Exception as e: # pylint: disable=broad-exception-caught
logger.warning(f"Error loading PromptTemplate by UUID '{template_identifier}': {e}")
return None

# Otherwise, try as slug
try:
template = cls.objects.get(slug=template_identifier)
logger.info(f"Loaded prompt template by slug: {template_identifier}")
return template.body
except cls.DoesNotExist:
logger.warning(f"PromptTemplate with slug '{template_identifier}' not found")
return None
except Exception as e: # pylint: disable=broad-exception-caught
logger.warning(f"Error loading PromptTemplate by slug '{template_identifier}': {e}")
return None
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def __init__(self, config=None, user_session=None):
)

self.provider = model.split("/")[0]
self.custom_prompt = self.config.get("prompt", None)
self.custom_prompt = self._load_prompt()
self.stream = self.config.get("stream", False)

enabled_tools = self.config.get("enabled_tools", [])
Expand Down Expand Up @@ -80,6 +80,29 @@ def __init__(self, config=None, user_session=None):
for key, value in self.mcp_configs.items()
]

def _load_prompt(self):
"""
Load prompt from PromptTemplate model or inline config.

Priority:
1. prompt_template: Load by slug or UUID (unified key)
2. prompt: Use inline prompt (backwards compatibility)
3. None: No custom prompt

Returns:
str or None: The prompt text
"""
from openedx_ai_extensions.models import PromptTemplate # pylint: disable=import-outside-toplevel

# Try loading from PromptTemplate (handles both slug and UUID)
template_id = self.config.get("prompt_template")
if template_id:
prompt = PromptTemplate.load_prompt(template_id)
if prompt:
return prompt
# Fall back to inline prompt (backwards compatibility)
return self.config.get("prompt")

def process(self, *args, **kwargs):
"""Process based on configured function - must be implemented by subclasses"""
raise NotImplementedError("Subclasses must implement process method")
Expand Down
Loading