@@ -91,12 +91,13 @@ async def get_suggestions(
9191 return await self ._tier1_profile_based (profile , field_context , form_context , previous_answers , form_intent )
9292 else :
9393 # STRICT: No profile = No suggestions.
94- logger .warning ( "⛔ [Lifecycle] No Profile found. Skipping Tier 3 fallback (returning empty) ." )
95- return []
94+ logger .info ( "🌱 [Lifecycle] No Profile found. Attempting Tier 0: Cold-Start suggestions ." )
95+ return await self . _tier0_cold_start ( field_context , form_context , previous_answers , form_intent )
9696
9797 except Exception as e :
9898 logger .error (f"❌ [Lifecycle] CRITICAL ERROR: { str (e )} " , exc_info = True )
9999 return []
100+
100101
101102 async def _tier1_profile_based (
102103 self ,
@@ -121,6 +122,28 @@ async def _tier1_profile_based(
121122 except Exception as e :
122123 logger .error (f"❌ [Lifecycle] Tier 1: LLM Failed ({ str (e )} )" )
123124 return [] # STRICT: Return empty instead of fallback
125+
126+ def _format_profile_for_prompt (self , profile : Any ) -> str :
127+ """Extract and structure profile data for better LLM consumption."""
128+ profile_text = getattr (profile , 'profile_text' , None )
129+
130+ if not profile_text :
131+ return str (profile )
132+
133+ try :
134+ parsed = json .loads (profile_text ) if isinstance (profile_text , str ) else profile_text
135+
136+ # If it's already structured JSON, format it clearly
137+ if isinstance (parsed , dict ):
138+ sections = []
139+ for key , value in parsed .items ():
140+ label = key .replace ("_" , " " ).title ()
141+ sections .append (f"- { label } : { value } " )
142+ return "\n " .join (sections )
143+ except (json .JSONDecodeError , TypeError ):
144+ pass
145+
146+ return str (profile_text )
124147
125148 async def _generate_llm_suggestions (
126149 self ,
@@ -142,7 +165,18 @@ async def _generate_llm_suggestions(
142165 return None
143166
144167 # Extract profile text safely
145- profile_text = getattr (profile , 'profile_text' , str (profile ))
168+ profile_text = self ._format_profile_for_prompt (profile )
169+
170+ # ADD ↓
171+ form_count = getattr (profile , 'form_count' , 1 )
172+ try :
173+ metadata = json .loads (getattr (profile , 'metadata_json' , '{}' ) or '{}' )
174+ except Exception :
175+ metadata = {}
176+ forms_history = metadata .get ('forms_analyzed' , [])
177+ history_str = ", " .join (forms_history [- 5 :]) if forms_history else "None"
178+ maturity_hint = "mature — trust it heavily" if form_count >= 5 else "early stage — use as a hint, stay flexible"
179+
146180
147181 # Context extraction
148182 field_name = field_context .get ("name" , "unknown" )
@@ -189,6 +223,8 @@ async def _generate_llm_suggestions(
1892234. **Guardrail:** NEVER describe the user in the third person (e.g., "User exhibits...") unless the form_type is explicitly 'diagnostic_report'.
190224
1912255. **Output:** Return a JSON object with a list of 1-3 suggestions and your reasoning. The reasoning MUST mention the detected Form Intent.
226+ 6. **Profile Maturity:** The user has filled {form_count} forms — profile is {maturity_hint}. Weight suggestions accordingly.
227+ 7. **Past Forms:** They've previously filled: {forms_history}. Use this to infer domain or recurring needs.
192228
193229FORMAT:
194230{{
@@ -212,6 +248,9 @@ async def _generate_llm_suggestions(
212248 "field_name" : field_name ,
213249 "persona" : persona ,
214250 "previous_answers_context" : previous_answers_str ,
251+ "form_count" : form_count ,
252+ "maturity_hint" : maturity_hint ,
253+ "forms_history" : history_str ,
215254 })
216255
217256 duration = (datetime .now () - start_time ).total_seconds ()
@@ -259,6 +298,83 @@ def _tier3_pattern_only(
259298 # DISABLED as per request
260299 logger .info ("🧩 [Lifecycle] Tier 3 requested but DISABLED." )
261300 return []
301+ async def _tier0_cold_start (
302+ self ,
303+ field_context : Dict [str , Any ],
304+ form_context : Dict [str , Any ],
305+ previous_answers : Dict [str , str ],
306+ form_intent : Optional [FormIntent ]
307+ ) -> List [IntelligentSuggestion ]:
308+ """
309+ Tier 0: Cold-start suggestions for users with no profile.
310+ Uses only form intent + field semantics to generate contextual placeholders.
311+ """
312+ gemini = get_gemini_service ()
313+ if not gemini or not gemini .llm :
314+ return []
315+
316+ field_label = field_context .get ("label" , field_context .get ("name" , "unknown" ))
317+ form_purpose = form_intent .intent if form_intent else form_context .get ("purpose" , "General Form" )
318+ persona = form_intent .persona if form_intent else "Customer"
319+
320+ previous_answers_str = "None"
321+ if previous_answers :
322+ previous_answers_str = "\n " .join ([f"- { k } : { v } " for k , v in previous_answers .items () if v ])
323+
324+ prompt = ChatPromptTemplate .from_messages ([
325+ ("system" , """You are a smart form-filling assistant helping a first-time user.
326+ You have NO prior information about this user. Generate helpful, realistic example suggestions
327+ for the field based ONLY on the form's purpose and previously filled fields.
328+
329+ CONTEXT:
330+ - Form Intent: {form_intent}
331+ - Persona: {persona}
332+ - Field: "{field_label}"
333+ - Previously Filled Fields:
334+ {previous_answers_context}
335+
336+ INSTRUCTIONS:
337+ 1. Generate 2-3 realistic, generic-but-useful example values a typical {persona} would enter.
338+ 2. Use the form intent to tailor suggestions (e.g., for "Job Application" + "Skills" field → "Python, FastAPI, SQL").
339+ 3. Use previous answers to stay consistent (e.g., if Role = "Designer", suggest design-related skills).
340+ 4. Keep suggestions short, realistic, and immediately usable.
341+ 5. Do NOT say "example" or "placeholder" - write as if the user would actually submit this.
342+
343+ FORMAT:
344+ {{
345+ "suggestions": ["Value 1", "Value 2"],
346+ "reasoning": "Based on the form intent '{form_intent}', these are typical values a {persona} would provide."
347+ }}
348+ """ )
349+ ])
350+
351+ parser = JsonOutputParser (pydantic_object = SuggestionResponse )
352+ chain = prompt | gemini .llm | parser
353+
354+ try :
355+ result = await chain .ainvoke ({
356+ "form_intent" : form_purpose ,
357+ "persona" : persona ,
358+ "field_label" : field_label ,
359+ "previous_answers_context" : previous_answers_str ,
360+ })
361+
362+ if result and result .get ("suggestions" ):
363+ return [
364+ IntelligentSuggestion (
365+ value = val ,
366+ confidence = 0.55 , # Lower confidence - no profile backing
367+ tier = SuggestionTier .PATTERN_ONLY ,
368+ reasoning = result .get ("reasoning" , "Cold-start suggestion based on form intent" ),
369+ behavioral_match = "cold_start_intent"
370+ )
371+ for val in result ["suggestions" ]
372+ ]
373+ except Exception as e :
374+ logger .error (f"❌ [Lifecycle] Tier 0 Cold Start Failed: { str (e )} " )
375+
376+ return []
377+
262378
263379
264380# Singleton instance
0 commit comments