[2ff88f42] Finalize SuperOffice Integration: Enhanced Persona model with Influencer role, switched Matrix Generator to Gemini, implemented Noise Reduction for Webhooks, and added E2E test scenarios.
This commit is contained in:
@@ -1,17 +1,18 @@
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import argparse
|
||||
from typing import List
|
||||
import google.generativeai as genai
|
||||
|
||||
# Setup Environment
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), "../../"))
|
||||
|
||||
from backend.database import SessionLocal, Industry, Persona, MarketingMatrix
|
||||
from backend.config import settings
|
||||
|
||||
# --- Configuration ---
|
||||
MODEL = "gpt-4o"
|
||||
MODEL_NAME = "gemini-1.5-pro-latest" # High quality copy
|
||||
|
||||
def generate_prompt(industry: Industry, persona: Persona) -> str:
|
||||
"""
|
||||
@@ -54,6 +55,8 @@ Tonalität: Professionell, lösungsorientiert, auf den Punkt. Keine Marketing-Fl
|
||||
3. "social_proof": Ein Satz, der Vertrauen aufbaut. Nenne generische Erfolge (z.B. "Unternehmen in der {industry.name} senken so ihre Kosten um 15%"), da wir noch keine spezifischen Logos nennen dürfen.
|
||||
|
||||
--- FORMAT ---
|
||||
Respond ONLY with a valid JSON object. Do not add markdown formatting like ```json ... ```.
|
||||
Format:
|
||||
{{
|
||||
"subject": "...",
|
||||
"intro": "...",
|
||||
@@ -62,7 +65,7 @@ Tonalität: Professionell, lösungsorientiert, auf den Punkt. Keine Marketing-Fl
|
||||
"""
|
||||
return prompt
|
||||
|
||||
def mock_openai_call(prompt: str):
|
||||
def mock_call(prompt: str):
|
||||
"""Simulates an API call for dry runs."""
|
||||
print(f"\n--- [MOCK] GENERATING PROMPT ---\n{prompt[:300]}...\n--------------------------------")
|
||||
return {
|
||||
@@ -71,23 +74,40 @@ def mock_openai_call(prompt: str):
|
||||
"social_proof": "[MOCK] Ähnliche Betriebe sparten 20% Kosten."
|
||||
}
|
||||
|
||||
def real_openai_call(prompt: str):
|
||||
# This would link to the actual OpenAI client
|
||||
# For now, we keep it simple or import from a lib
|
||||
import openai
|
||||
from backend.config import settings
|
||||
|
||||
if not settings.OPENAI_API_KEY:
|
||||
raise ValueError("OPENAI_API_KEY not set")
|
||||
def real_gemini_call(prompt: str):
|
||||
if not settings.GEMINI_API_KEY:
|
||||
raise ValueError("GEMINI_API_KEY not set in config/env")
|
||||
|
||||
client = openai.OpenAI(api_key=settings.OPENAI_API_KEY)
|
||||
response = client.chat.completions.create(
|
||||
model=MODEL,
|
||||
response_format={"type": "json_object"},
|
||||
messages=[{"role": "user", "content": prompt}],
|
||||
temperature=0.7
|
||||
genai.configure(api_key=settings.GEMINI_API_KEY)
|
||||
|
||||
# Configure Model
|
||||
generation_config = {
|
||||
"temperature": 0.7,
|
||||
"top_p": 0.95,
|
||||
"top_k": 64,
|
||||
"max_output_tokens": 1024,
|
||||
"response_mime_type": "application/json",
|
||||
}
|
||||
|
||||
model = genai.GenerativeModel(
|
||||
model_name=MODEL_NAME,
|
||||
generation_config=generation_config,
|
||||
)
|
||||
return json.loads(response.choices[0].message.content)
|
||||
|
||||
response = model.generate_content(prompt)
|
||||
|
||||
try:
|
||||
# Clean response if necessary (Gemini usually returns clean JSON with mime_type set, but safety first)
|
||||
text = response.text.strip()
|
||||
if text.startswith("```json"):
|
||||
text = text[7:-3].strip()
|
||||
elif text.startswith("```"):
|
||||
text = text[3:-3].strip()
|
||||
|
||||
return json.loads(text)
|
||||
except Exception as e:
|
||||
print(f"JSON Parse Error: {e}. Raw Response: {response.text}")
|
||||
raise
|
||||
|
||||
def run_matrix_generation(dry_run: bool = True, force: bool = False):
|
||||
db = SessionLocal()
|
||||
@@ -96,7 +116,7 @@ def run_matrix_generation(dry_run: bool = True, force: bool = False):
|
||||
personas = db.query(Persona).all()
|
||||
|
||||
print(f"Found {len(industries)} Industries and {len(personas)} Personas.")
|
||||
print(f"Mode: {'DRY RUN (No API calls, no DB writes)' if dry_run else 'LIVE'}")
|
||||
print(f"Mode: {'DRY RUN (No API calls, no DB writes)' if dry_run else 'LIVE - GEMINI GENERATION'}")
|
||||
|
||||
total_combinations = len(industries) * len(personas)
|
||||
processed = 0
|
||||
@@ -120,10 +140,15 @@ def run_matrix_generation(dry_run: bool = True, force: bool = False):
|
||||
prompt = generate_prompt(ind, pers)
|
||||
|
||||
if dry_run:
|
||||
result = mock_openai_call(prompt)
|
||||
result = mock_call(prompt)
|
||||
else:
|
||||
try:
|
||||
result = real_openai_call(prompt)
|
||||
result = real_gemini_call(prompt)
|
||||
# Basic Validation
|
||||
if not result.get("subject") or not result.get("intro"):
|
||||
print(" -> Invalid result structure. Skipping.")
|
||||
continue
|
||||
|
||||
except Exception as e:
|
||||
print(f" -> API ERROR: {e}")
|
||||
continue
|
||||
@@ -155,8 +180,8 @@ def run_matrix_generation(dry_run: bool = True, force: bool = False):
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--live", action="store_true", help="Actually call OpenAI and write to DB")
|
||||
parser.add_argument("--live", action="store_true", help="Actually call Gemini and write to DB")
|
||||
parser.add_argument("--force", action="store_true", help="Overwrite existing matrix entries")
|
||||
args = parser.parse_args()
|
||||
|
||||
run_matrix_generation(dry_run=not args.live, force=args.force)
|
||||
run_matrix_generation(dry_run=not args.live, force=args.force)
|
||||
Reference in New Issue
Block a user