diff --git a/content-engine/Dockerfile b/content-engine/Dockerfile
new file mode 100644
index 00000000..e1c26379
--- /dev/null
+++ b/content-engine/Dockerfile
@@ -0,0 +1,42 @@
+FROM node:20-slim AS frontend-build
+WORKDIR /app/frontend
+# Correct path relative to build context (root)
+COPY content-engine/frontend/package*.json ./
+RUN npm install
+COPY content-engine/frontend/ ./
+RUN npm run build
+
+FROM python:3.11-slim
+WORKDIR /app
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ curl \
+ gnupg \
+ && curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
+ && apt-get install -y nodejs \
+ && rm -rf /var/lib/apt/lists/*
+
+# Install Python dependencies
+COPY content-engine/requirements.txt ./
+RUN pip install --no-cache-dir -r requirements.txt
+
+# Install Backend Node dependencies
+COPY content-engine/package.json ./
+RUN npm install
+
+# Copy backend files
+COPY content-engine/*.py ./
+COPY content-engine/server.cjs ./
+# Helpers and Config from root
+COPY helpers.py ./
+COPY config.py ./
+
+# Copy built frontend
+COPY --from=frontend-build /app/frontend/dist ./dist
+
+# Keys and persistence placeholders
+RUN touch gemini_api_key.txt serpapikey.txt
+
+EXPOSE 3006
+CMD ["node", "server.cjs"]
\ No newline at end of file
diff --git a/content-engine/README.md b/content-engine/README.md
index 1af6f97d..9045b12d 100644
--- a/content-engine/README.md
+++ b/content-engine/README.md
@@ -1,11 +1,12 @@
# Content Engine (v1.0 - MVP)
-**Status:** Planning / Initial Setup
+**Status:** Live / MVP Implemented
**Date:** Jan 20, 2026
+**URL:** `/content/`
## 1. Vision & Purpose
The **Content Engine** acts as the execution arm ("The Mouth") for the strategies developed in the GTM Architect ("The Brain").
-It is a **Content Generation Dashboard** designed to produce high-quality, SEO-optimized, and sales-ready marketing assets.
+It is a **Content Generation Dashboard** designed to produce high-quality, SEO-optimized, and sales-ready marketing assets based on the strategic foundation of the GTM Architect.
**Core Philosophy:**
* **SEO First:** Keywords guide the structure, not just metadata.
@@ -17,43 +18,49 @@ It is a **Content Generation Dashboard** designed to produce high-quality, SEO-o
### Data Layer
* **Persistence:** A dedicated SQLite database (`content_engine.db`) stores all content projects, SEO strategies, and drafts.
-* **Integration:** Read-only access to `gtm_projects.db` to import strategy baselines.
+* **Integration:** Read-only access to `gtm_projects.db` via Docker volume mounts to import strategy baselines.
### The Stack
-* **Frontend:** React (Vite + TypeScript) - Focus on "Writer UI" (Split Screen: Config vs. Editor).
-* **Backend:** Python (Flask/Process-based Orchestrator) - Utilizing `helpers.py` for AI interaction.
-* **Container:** Dockerized service, integrated into the existing Marketing Hub network.
+* **Frontend:** React (Vite + TypeScript + Tailwind CSS).
+* **Backend:** Node.js Bridge (`server.cjs`, Express) communicating with a Python Orchestrator (`content_orchestrator.py`).
+* **Container:** Dockerized service (`content-app`), integrated into the central Nginx Gateway.
-## 3. Workflow (MVP Scope: Website & SEO)
+## 3. Implemented Features (MVP)
### Phase 1: Project Setup & Import
-1. Select a source project from GTM Architect (e.g., "PUMA M20").
-2. Import core data: Product Category, Hybrid Logic, Pain Points per ICP.
-3. **Competitor Scan:** Optional input of competitor URLs to analyze their tone and position against it.
+* [x] **GTM Bridge:** Lists and imports strategies directly from GTM Architect.
+* [x] **Context Loading:** Automatically extracts Product Category, ICPs, and Core Value Propositions.
### Phase 2: SEO Strategy
-1. **Seed Generation:** AI suggests seed keywords based on GTM data.
-2. **Expansion & Validation:** Use Google Search/Suggest (via Gemini Tools) to find real-world query patterns.
-3. **Selection:** User selects Primary and Secondary Keywords.
+* [x] **AI Brainstorming:** Generates 15 strategic Seed Keywords (Short & Long Tail) based on the imported strategy.
+* [x] **Persistence:** Saves the chosen SEO strategy to the database.
-### Phase 3: Structure & Copy Generation
-1. **Sitemap:** AI proposes a site structure (Home, Use Case Pages, Tech Specs).
-2. **Section Generation:**
- * **Hero:** Headline (Keyword-focused) + Subline.
- * **Value Prop:** "Pain vs. Solution" (from GTM Phase 4).
- * **Features:** "Feature-to-Value" (from GTM Phase 9).
- * **Proof:** FAQ & Objections (from GTM Phase 6 - Battlecards).
-3. **Refinement:** "Re-Roll" buttons for specific sections (e.g., "Make it punchier", "More focus on Compliance").
+### Phase 3: Website Copy Generator
+* [x] **Section Generator:** Generates structured copy for:
+ * **Hero Section** (Headline, Subline, CTA)
+ * **Challenger Story** (Problem/Agitation)
+ * **Value Proposition** (Hybrid Solution Logic)
+ * **Feature-to-Value** (Tech Deep Dive)
+* [x] **Editor UI:** Integrated Markdown editor for manual refinement.
+* [x] **Copy-to-Clipboard:** Quick export for deployment.
-### Phase 4: Export
-* Copy to Clipboard (Markdown/HTML).
-* PDF Export.
+## 4. Lessons Learned (Development Log)
-## 4. Future Modules (Post-MVP)
+### Docker & Networking
+* **Volume Mounts:** Never mount a local folder over a container folder that contains build artifacts (like `node_modules` or `dist`). *Solution:* Build frontend inside Docker and serve via Node/Express static files, or be extremely precise with volume mounts.
+* **Nginx Routing:** Frontend fetch calls must use **relative paths** (e.g., `api/import` instead of `/api/import`) to respect the reverse proxy path (`/content/`). Absolute paths lead to 404/502 errors because Nginx tries to route them to the root.
+* **502 Bad Gateway:** Often caused by the Node server crashing immediately on startup. *Common cause:* Missing backend dependencies (like `express`) in the Docker image because `package.json` wasn't copied/installed for the backend context.
+
+### Frontend (Vite/React)
+* **TypeScript Configuration:** `tsc` requires a valid `tsconfig.json`. Without it, `npm run build` fails silently or with obscure errors.
+* **Linting vs. Prototyping:** Strict linting (`noUnusedLocals: true`) is good for production but blocks rapid prototyping. *Solution:* Relax rules in `tsconfig.json` during MVP phase.
+* **ES Modules vs. CommonJS:** When `package.json` has `"type": "module"`, configuration files like `postcss.config.js` MUST be renamed to `.cjs` if they use `module.exports`.
+
+### Python & Backend
+* **Standard Libs:** Do NOT include standard libraries like `sqlite3` in `requirements.txt`. Pip will fail.
+* **Strings in Prompts:** ALWAYS use `r"""..."""` (Raw Strings) for prompts to avoid syntax errors with curly braces in JSON templates.
+
+## 5. Roadmap
* **LinkedIn Matrix:** Generate posts for (Persona x Content Type).
* **Outbound Email:** Cold outreach sequences.
-* **Press Kit:** Automated PR generation.
-
-## 5. Quick Actions ("Türen öffnen")
-A dashboard feature to bypass the full project flow:
-* *"Write a LinkedIn post about [Feature] for [Role]."*
+* **Press Kit:** Automated PR generation.
\ No newline at end of file
diff --git a/content-engine/content_db_manager.py b/content-engine/content_db_manager.py
new file mode 100644
index 00000000..bf2cbad4
--- /dev/null
+++ b/content-engine/content_db_manager.py
@@ -0,0 +1,179 @@
+import sqlite3
+import json
+import os
+import logging
+from datetime import datetime
+
+DB_PATH = os.environ.get('DB_PATH', 'content_engine.db')
+GTM_DB_PATH = os.environ.get('GTM_DB_PATH', 'gtm_projects.db')
+
+def get_db_connection(path=DB_PATH):
+ conn = sqlite3.connect(path)
+ conn.row_factory = sqlite3.Row
+ return conn
+
+def init_db():
+ conn = get_db_connection()
+ cursor = conn.cursor()
+
+ # Projects table
+ cursor.execute('''
+ CREATE TABLE IF NOT EXISTS content_projects (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL,
+ gtm_project_id TEXT,
+ category TEXT,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ gtm_data_snapshot TEXT, -- Full JSON snapshot of GTM data at import
+ seo_strategy TEXT, -- JSON blob
+ site_structure TEXT, -- JSON blob
+ metadata TEXT -- JSON blob
+ )
+ ''')
+
+ # Content Assets table
+ cursor.execute('''
+ CREATE TABLE IF NOT EXISTS content_assets (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ project_id INTEGER,
+ asset_type TEXT NOT NULL, -- 'website_section', 'linkedin', 'email', 'pr'
+ section_key TEXT, -- e.g., 'hero', 'features', 'faq'
+ title TEXT,
+ content TEXT, -- Markdown content
+ status TEXT DEFAULT 'draft',
+ keywords TEXT, -- JSON list of used keywords
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (project_id) REFERENCES content_projects (id) ON DELETE CASCADE
+ )
+ ''')
+
+ conn.commit()
+ conn.close()
+ logging.info(f"Database initialized at {DB_PATH}")
+
+# --- GTM READ ACCESS ---
+
+def get_all_gtm_projects():
+ """Lists all available GTM projects."""
+ if not os.path.exists(GTM_DB_PATH):
+ logging.warning(f"GTM DB not found at {GTM_DB_PATH}")
+ return []
+
+ conn = get_db_connection(GTM_DB_PATH)
+ try:
+ query = """
+ SELECT
+ id,
+ name,
+ updated_at,
+ json_extract(data, '$.phases.phase1_result.category') AS productCategory
+ FROM gtm_projects
+ ORDER BY updated_at DESC
+ """
+ projects = [dict(row) for row in conn.execute(query).fetchall()]
+ return projects
+ finally:
+ conn.close()
+
+def get_gtm_project_data(gtm_id):
+ """Retrieves full data for a GTM project."""
+ conn = get_db_connection(GTM_DB_PATH)
+ try:
+ row = conn.execute("SELECT data FROM gtm_projects WHERE id = ?", (gtm_id,)).fetchone()
+ return json.loads(row['data']) if row else None
+ finally:
+ conn.close()
+
+# --- CONTENT ENGINE OPERATIONS ---
+
+def import_gtm_project(gtm_id):
+ """Imports a GTM project as a new Content Engine project."""
+ gtm_data = get_gtm_project_data(gtm_id)
+ if not gtm_data:
+ return None
+
+ name = gtm_data.get('name', 'Imported Project')
+ # Phase 1 has the category
+ phase1 = gtm_data.get('phases', {}).get('phase1_result', {})
+ if isinstance(phase1, str): phase1 = json.loads(phase1)
+ category = phase1.get('category', 'Unknown')
+
+ conn = get_db_connection()
+ cursor = conn.cursor()
+ cursor.execute(
+ "INSERT INTO content_projects (name, gtm_project_id, category, gtm_data_snapshot) VALUES (?, ?, ?, ?)",
+ (name, gtm_id, category, json.dumps(gtm_data))
+ )
+ project_id = cursor.lastrowid
+ conn.commit()
+ conn.close()
+ return {"id": project_id, "name": name, "category": category}
+
+def get_all_content_projects():
+ conn = get_db_connection()
+ cursor = conn.cursor()
+ cursor.execute("SELECT id, name, category, created_at, gtm_project_id FROM content_projects ORDER BY updated_at DESC")
+ projects = [dict(row) for row in cursor.fetchall()]
+ conn.close()
+ return projects
+
+def get_content_project(project_id):
+ conn = get_db_connection()
+ row = conn.execute("SELECT * FROM content_projects WHERE id = ?", (project_id,)).fetchone()
+ conn.close()
+ if row:
+ d = dict(row)
+ if d['gtm_data_snapshot']: d['gtm_data_snapshot'] = json.loads(d['gtm_data_snapshot'])
+ if d['seo_strategy']: d['seo_strategy'] = json.loads(d['seo_strategy'])
+ return d
+ return None
+
+def save_seo_strategy(project_id, strategy_dict):
+ conn = get_db_connection()
+ cursor = conn.cursor()
+ cursor.execute(
+ "UPDATE content_projects SET seo_strategy = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?",
+ (json.dumps(strategy_dict), project_id)
+ )
+ conn.commit()
+ conn.close()
+
+def save_content_asset(project_id, asset_type, section_key, title, content, keywords=None):
+ conn = get_db_connection()
+ cursor = conn.cursor()
+ # Check if exists (upsert logic for sections)
+ cursor.execute(
+ "SELECT id FROM content_assets WHERE project_id = ? AND asset_type = ? AND section_key = ?",
+ (project_id, asset_type, section_key)
+ )
+ existing = cursor.fetchone()
+
+ if existing:
+ cursor.execute(
+ "UPDATE content_assets SET title = ?, content = ?, keywords = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?",
+ (title, content, json.dumps(keywords) if keywords else None, existing['id'])
+ )
+ asset_id = existing['id']
+ else:
+ cursor.execute(
+ "INSERT INTO content_assets (project_id, asset_type, section_key, title, content, keywords) VALUES (?, ?, ?, ?, ?, ?)",
+ (project_id, asset_type, section_key, title, content, json.dumps(keywords) if keywords else None)
+ )
+ asset_id = cursor.lastrowid
+
+ conn.commit()
+ conn.close()
+ return asset_id
+
+def get_project_assets(project_id):
+ conn = get_db_connection()
+ cursor = conn.cursor()
+ cursor.execute("SELECT * FROM content_assets WHERE project_id = ?", (project_id,))
+ assets = [dict(row) for row in cursor.fetchall()]
+ conn.close()
+ return assets
+
+if __name__ == "__main__":
+ init_db()
\ No newline at end of file
diff --git a/content-engine/content_orchestrator.py b/content-engine/content_orchestrator.py
new file mode 100644
index 00000000..dbe5ec19
--- /dev/null
+++ b/content-engine/content_orchestrator.py
@@ -0,0 +1,181 @@
+
+import argparse
+import base64
+import json
+import logging
+import sys
+import os
+from datetime import datetime
+import content_db_manager as db_manager
+
+# Ensure helper path is correct
+sys.path.append(os.path.dirname(os.path.abspath(__file__)))
+sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))
+
+from helpers import call_gemini_flash, scrape_website_details
+from config import Config
+
+LOG_DIR = "Log_from_docker"
+if not os.path.exists(LOG_DIR):
+ os.makedirs(LOG_DIR)
+
+run_timestamp = datetime.now().strftime("%y-%m-%d_%H-%M-%S")
+logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
+
+Config.load_api_keys()
+
+def get_copywriter_instruction(lang='de'):
+ return r"""
+ Du bist ein Senior Copywriter und SEO-Experte. Deine Spezialität ist der 'Challenger Sale'.
+ Du schreibst Texte, die fachlich tief fundiert, professionell und leicht aggressiv/fordernd sind.
+
+ DEIN STIL:
+ - Keine Buzzwords ohne Substanz.
+ - Fokus auf den 'Cost of Inaction' (Was kostet es den Kunden, wenn er NICHT handelt?).
+ - Übersetzung von Technik in geschäftlichen Nutzen.
+ - SEO-Integration: Baue Keywords natürlich aber präsent ein.
+ """
+
+# --- MODES ---
+
+def list_gtm_projects(payload):
+ projects = db_manager.get_all_gtm_projects()
+ return {"projects": projects}
+
+def import_project(payload):
+ gtm_id = payload.get('gtmProjectId')
+ if not gtm_id:
+ return {"error": "Missing gtmProjectId"}
+
+ result = db_manager.import_gtm_project(gtm_id)
+ if not result:
+ return {"error": "GTM Project not found or import failed"}
+
+ return result
+
+def list_content_projects(payload):
+ projects = db_manager.get_all_content_projects()
+ return {"projects": projects}
+
+def load_project_details(payload):
+ project_id = payload.get('projectId')
+ project = db_manager.get_content_project(project_id)
+ if not project:
+ return {"error": "Project not found"}
+
+ assets = db_manager.get_project_assets(project_id)
+ project['assets'] = assets
+ return project
+
+def seo_brainstorming(payload):
+ project_id = payload.get('projectId')
+ lang = payload.get('lang', 'de')
+
+ project = db_manager.get_content_project(project_id)
+ if not project:
+ return {"error": "Project context not found"}
+
+ gtm_data = project.get('gtm_data_snapshot', {})
+
+ # GOLDEN RULE: Use Raw Quotes and .format()
+ prompt = r"""
+ Basierend auf folgendem GTM-Kontext (Strategie für ein technisches Produkt):
+ {gtm_context}
+
+ AUFGABE:
+ Generiere eine strategische Liste von 15 SEO-Keywords.
+ 1. 5 Short-Tail Fokus-Keywords (z.B. Produktkategorie + 'kaufen/mieten').
+ 2. 10 Long-Tail Keywords, die spezifische Pain Points oder Usecases adressieren (z.B. 'Kostenreduktion bei Sicherheitsrundgängen').
+
+ Die Keywords müssen für Entscheider relevant sein (CFO, Head of Security, Operations Manager).
+
+ Output NUR als JSON Liste von Strings.
+ """.format(gtm_context=json.dumps(gtm_data))
+
+ response = call_gemini_flash(prompt, system_instruction=get_copywriter_instruction(lang), json_mode=True)
+ keywords = json.loads(response)
+
+ db_manager.save_seo_strategy(project_id, {"seed_keywords": keywords})
+ return {"keywords": keywords}
+
+def generate_section(payload):
+ project_id = payload.get('projectId')
+ section_key = payload.get('sectionKey') # e.g., 'hero', 'problem', 'features'
+ manual_content = payload.get('manualContent')
+ lang = payload.get('lang', 'de')
+ keywords = payload.get('keywords', [])
+
+ if manual_content:
+ # User is saving their manual edits
+ db_manager.save_content_asset(project_id, 'website_section', section_key, f"Section: {section_key}", manual_content, keywords)
+ return {"status": "saved", "sectionKey": section_key}
+
+ project = db_manager.get_content_project(project_id)
+ if not project:
+ return {"error": "Project context not found"}
+
+ gtm_data = project.get('gtm_data_snapshot', {})
+
+ # Context extraction
+ category = project.get('category')
+
+ prompt = r"""
+ Erstelle den Website-Inhalt für die Sektion '{section}' eines Produkts in der Kategorie '{cat}'.
+
+ STRATEGIE-KONTEXT:
+ {gtm_context}
+
+ SEO-KEYWORDS ZU NUTZEN:
+ {kws}
+
+ ANFORDERUNG:
+ - Schreibe im Stil eines Senior Copywriters (fachlich fundiert, Challenger Sale).
+ - Format: Markdown.
+ - Die Sektion muss den Nutzer zur nächsten Aktion (CTA) führen.
+ """.format(
+ section=section_key,
+ cat=category,
+ gtm_context=json.dumps(gtm_data),
+ kws=json.dumps(keywords)
+ )
+
+ content = call_gemini_flash(prompt, system_instruction=get_copywriter_instruction(lang), json_mode=False)
+
+ # Save as asset
+ db_manager.save_content_asset(project_id, 'website_section', section_key, f"Section: {section_key}", content, keywords)
+
+ return {"content": content, "sectionKey": section_key}
+
+def main():
+ parser = argparse.ArgumentParser(description="Content Engine Orchestrator")
+ parser.add_argument("--mode", required=True)
+ parser.add_argument("--payload_file", help="Path to JSON payload")
+
+ args = parser.parse_args()
+
+ payload = {}
+ if args.payload_file:
+ with open(args.payload_file, 'r') as f:
+ payload = json.load(f)
+
+ modes = {
+ "list_gtm_projects": list_gtm_projects,
+ "import_project": import_project,
+ "list_content_projects": list_content_projects,
+ "load_project": load_project_details,
+ "seo_brainstorming": seo_brainstorming,
+ "generate_section": generate_section,
+ }
+
+ if args.mode in modes:
+ try:
+ result = modes[args.mode](payload)
+ print(json.dumps(result, ensure_ascii=False))
+ except Exception as e:
+ logging.error(f"Error in mode {args.mode}: {str(e)}")
+ print(json.dumps({"error": str(e)}))
+ else:
+ print(json.dumps({"error": f"Unknown mode: {args.mode}"}))
+
+if __name__ == "__main__":
+ main()
diff --git a/content-engine/frontend/index.html b/content-engine/frontend/index.html
new file mode 100644
index 00000000..7868bda1
--- /dev/null
+++ b/content-engine/frontend/index.html
@@ -0,0 +1,13 @@
+
+
+
+
+
+
+ Content Engine
+
+
+
+
+
+
diff --git a/content-engine/frontend/package.json b/content-engine/frontend/package.json
new file mode 100644
index 00000000..36e993b0
--- /dev/null
+++ b/content-engine/frontend/package.json
@@ -0,0 +1,26 @@
+{
+ "name": "content-engine-frontend",
+ "private": true,
+ "version": "0.1.0",
+ "type": "module",
+ "scripts": {
+ "dev": "vite",
+ "build": "tsc && vite build",
+ "preview": "vite preview"
+ },
+ "dependencies": {
+ "react": "^18.2.0",
+ "react-dom": "^18.2.0",
+ "lucide-react": "^0.263.1",
+ "clsx": "^2.0.0",
+ "tailwind-merge": "^1.14.0",
+ "@types/react": "^18.2.15",
+ "@types/react-dom": "^18.2.7",
+ "@vitejs/plugin-react": "^4.0.3",
+ "typescript": "^5.0.2",
+ "vite": "^4.4.5",
+ "autoprefixer": "^10.4.14",
+ "postcss": "^8.4.27",
+ "tailwindcss": "^3.3.3"
+ }
+}
\ No newline at end of file
diff --git a/content-engine/frontend/postcss.config.cjs b/content-engine/frontend/postcss.config.cjs
new file mode 100644
index 00000000..3b3933fb
--- /dev/null
+++ b/content-engine/frontend/postcss.config.cjs
@@ -0,0 +1,7 @@
+
+module.exports = {
+ plugins: {
+ tailwindcss: {},
+ autoprefixer: {},
+ },
+}
diff --git a/content-engine/frontend/src/App.tsx b/content-engine/frontend/src/App.tsx
new file mode 100644
index 00000000..4e064740
--- /dev/null
+++ b/content-engine/frontend/src/App.tsx
@@ -0,0 +1,509 @@
+
+import { useState, useEffect } from 'react';
+import {
+ Rocket,
+ Search,
+ FileText,
+ ArrowRight,
+ ChevronLeft,
+ Database,
+ Plus,
+ RefreshCw,
+ Edit3
+} from 'lucide-react';
+
+// --- TYPES ---
+
+interface GTMProject {
+ id: string;
+ name: string;
+ productCategory: string;
+}
+
+interface ContentProject {
+ id: number;
+ name: string;
+ category: string;
+ gtm_project_id: string;
+ created_at: string;
+ seo_strategy?: { seed_keywords?: string[] };
+ assets?: ContentAsset[];
+}
+
+interface ContentAsset {
+ id: number;
+ section_key: string;
+ content: string;
+ status: string;
+}
+
+// --- SUB-COMPONENTS ---
+
+function SEOPlanner({ project, setLoading }: { project: ContentProject, setLoading: (b: boolean) => void }) {
+ const [keywords, setKeywords] = useState(project.seo_strategy?.seed_keywords || []);
+
+ const generateKeywords = async () => {
+ setLoading(true);
+ try {
+ // FIX: Relative path
+ const res = await fetch('api/seo_brainstorming', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ projectId: project.id })
+ });
+ const data = await res.json();
+ setKeywords(data.keywords || []);
+ } catch (err) { console.error(err); }
+ setLoading(false);
+ };
+
+ return (
+
+
+
+
SEO Strategy
+
Define the keywords that drive your content structure.
+
+
+
+
+ {keywords.length > 0 ? (
+
+ {keywords.map((kw, i) => (
+
+ {String(i+1).padStart(2, '0')}
+ {kw}
+
+ ))}
+
+ ) : (
+
+
No keywords generated yet. Start here!
+
+ )}
+
+ );
+}
+
+function WebsiteBuilder({ project, setLoading }: { project: ContentProject, setLoading: (b: boolean) => void }) {
+ const [sections, setSections] = useState(project.assets || []);
+ const [editingContent, setEditingContent] = useState<{ [key: string]: string }>({});
+
+ useEffect(() => {
+ const newEditing: { [key: string]: string } = {};
+ if (sections) {
+ sections.forEach(s => {
+ newEditing[s.section_key] = s.content;
+ });
+ }
+ setEditingContent(newEditing);
+ }, [sections]);
+
+ const generateSection = async (key: string) => {
+ setLoading(true);
+ try {
+ // FIX: Relative path
+ const res = await fetch('api/generate_section', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ projectId: project.id,
+ sectionKey: key,
+ keywords: project.seo_strategy?.seed_keywords || []
+ })
+ });
+ const data = await res.json();
+ setSections(prev => {
+ const other = prev.filter(s => s.section_key !== key);
+ return [...other, { id: Date.now(), section_key: key, content: data.content, status: 'draft' }];
+ });
+ } catch (err) { console.error(err); }
+ setLoading(false);
+ };
+
+ const handleEditChange = (key: string, val: string) => {
+ setEditingContent(prev => ({ ...prev, [key]: val }));
+ };
+
+ const saveEdit = async (key: string) => {
+ const content = editingContent[key];
+ if (!content) return;
+
+ setLoading(true);
+ try {
+ // FIX: Relative path
+ await fetch('api/generate_section', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ projectId: project.id,
+ sectionKey: key,
+ manualContent: content
+ })
+ });
+ alert("Saved successfully!");
+ } catch (err) { console.error(err); }
+ setLoading(false);
+ };
+
+ const copyToClipboard = (val: string) => {
+ navigator.clipboard.writeText(val);
+ alert('Copied to clipboard!');
+ };
+
+ return (
+
+
+
+
Website Copy Sections
+
Generate and refine high-converting blocks based on your strategy.
+
+
+
+
+ {[
+ { key: 'hero', label: 'Hero Section', desc: 'Headline, Subline & CTA' },
+ { key: 'problem', label: 'The Challenger Story', desc: 'Pain Points & Consequences' },
+ { key: 'value_prop', label: 'Hybrid Solution', desc: 'Symbiosis of Machine & Human' },
+ { key: 'features', label: 'Feature-to-Value', desc: 'Benefit-driven Tech Deep Dive' }
+ ].map(s => {
+ const hasContent = editingContent[s.key] !== undefined;
+ return (
+
+
+
+
+ {hasContent && (
+
+ )}
+
+
+
+
+ {hasContent ? (
+
+ ) : (
+
+ )}
+
+
+ );
+ })}
+
+
+ );
+}
+
+function ProjectDashboard({ project, onBack, setLoading }: { project: ContentProject, onBack: () => void, setLoading: (b: boolean) => void }) {
+ const [activeTab, setActiveTab] = useState<'SEO' | 'WEBSITE' | 'SOCIAL'>('SEO');
+
+ return (
+
+
+
+ ID: {project.id}
+
+
+
+
+
+
{project.name}
+
+
+ Category: {project.category}
+
+
+
+
+ {[
+ { id: 'SEO', label: 'SEO Plan', icon: Search },
+ { id: 'WEBSITE', label: 'Website Copy', icon: FileText },
+ { id: 'SOCIAL', label: 'LinkedIn', icon: Edit3 },
+ ].map(tab => (
+
+ ))}
+
+
+
+ {/* Tab Content */}
+
+ {activeTab === 'SEO' &&
}
+ {activeTab === 'WEBSITE' &&
}
+ {activeTab === 'SOCIAL' && (
+
+
+
LinkedIn Content Matrix coming soon...
+
+ )}
+
+
+
+ );
+}
+
+// --- MAIN APP ---
+
+export default function App() {
+ const [view, setView] = useState<'LIST' | 'IMPORT' | 'DETAILS'>('LIST');
+ const [contentProjects, setContentProjects] = useState([]);
+ const [gtmProjects, setGtmProjects] = useState([]);
+ const [selectedProject, setSelectedProject] = useState(null);
+ const [loading, setLoading] = useState(false);
+
+ useEffect(() => {
+ fetchContentProjects();
+ }, []);
+
+ const fetchContentProjects = async () => {
+ setLoading(true);
+ try {
+ // FIX: Relative path
+ const res = await fetch('api/list_content_projects', { method: 'POST', body: '{}', headers: {'Content-Type': 'application/json'} });
+ const data = await res.json();
+ setContentProjects(data.projects || []);
+ } catch (err) { console.error(err); }
+ setLoading(false);
+ };
+
+ const fetchGtmProjects = async () => {
+ setLoading(true);
+ try {
+ // FIX: Relative path
+ const res = await fetch('api/list_gtm_projects', { method: 'POST', body: '{}', headers: {'Content-Type': 'application/json'} });
+ const data = await res.json();
+ setGtmProjects(data.projects || []);
+ setView('IMPORT');
+ } catch (err) { console.error(err); }
+ setLoading(false);
+ };
+
+ const handleImport = async (gtmId: string) => {
+ setLoading(true);
+ try {
+ // FIX: Relative path
+ const res = await fetch('api/import_project', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ gtmProjectId: gtmId })
+ });
+ const data = await res.json();
+ if (data.id) {
+ await fetchContentProjects();
+ setView('LIST');
+ }
+ } catch (err) { console.error(err); }
+ setLoading(false);
+ };
+
+ const loadProject = async (id: number) => {
+ setLoading(true);
+ try {
+ // FIX: Relative path
+ const res = await fetch('api/load_project', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ projectId: id })
+ });
+ const data = await res.json();
+ setSelectedProject(data);
+ setView('DETAILS');
+ } catch (err) { console.error(err); }
+ setLoading(false);
+ };
+
+ return (
+
+ {/* Header */}
+
+
+
setView('LIST')}>
+
+
+
+
Content Engine v1.0
+
+
+
+ {loading && (
+
+
+ Processing...
+
+ )}
+
+
+
+
+
+
+
+ {view === 'LIST' && (
+
+
+ Active Campaigns
+
+
+ {contentProjects.length === 0 ? (
+
+
+
+
+
No active campaigns yet
+
Start by importing a strategy from the GTM Architect to turn your plan into actionable content.
+
+
+ ) : (
+
+ {contentProjects.map(p => (
+
loadProject(p.id)}
+ className="bg-slate-800 border border-slate-700 p-6 rounded-2xl hover:border-blue-500 hover:shadow-xl hover:shadow-blue-900/10 transition-all cursor-pointer group relative overflow-hidden"
+ >
+
+
+
+
+
+
+ {p.category}
+
+
+
{p.name}
+
Started: {new Date(p.created_at).toLocaleDateString()}
+
+
+
+ ))}
+
+ )}
+
+ )}
+
+ {view === 'IMPORT' && (
+
+
+
+
+
+
+
+
Import GTM Strategy
+
Select a validated strategy from the GTM Architect to build your content engine.
+
+
+ {gtmProjects.length === 0 ? (
+
+ ) : (
+
+ {gtmProjects.map(p => (
+
+
+
+
+
+
+
{p.name}
+
+
+ {p.productCategory}
+
+ ID: {p.id.split('-')[0]}...
+
+
+
+
+
+ ))}
+
+ )}
+
+
+ )}
+
+ {view === 'DETAILS' && selectedProject && (
+ setView('LIST')}
+ setLoading={setLoading}
+ />
+ )}
+
+
+ );
+}
diff --git a/content-engine/frontend/src/index.css b/content-engine/frontend/src/index.css
new file mode 100644
index 00000000..0f4f6ada
--- /dev/null
+++ b/content-engine/frontend/src/index.css
@@ -0,0 +1,8 @@
+
+@tailwind base;
+@tailwind components;
+@tailwind utilities;
+
+body {
+ @apply bg-slate-900 text-slate-100;
+}
diff --git a/content-engine/frontend/src/main.tsx b/content-engine/frontend/src/main.tsx
new file mode 100644
index 00000000..b0445aea
--- /dev/null
+++ b/content-engine/frontend/src/main.tsx
@@ -0,0 +1,11 @@
+
+import React from 'react'
+import ReactDOM from 'react-dom/client'
+import App from './App.tsx'
+import './index.css'
+
+ReactDOM.createRoot(document.getElementById('root')!).render(
+
+
+ ,
+)
diff --git a/content-engine/frontend/tailwind.config.cjs b/content-engine/frontend/tailwind.config.cjs
new file mode 100644
index 00000000..c82f10a0
--- /dev/null
+++ b/content-engine/frontend/tailwind.config.cjs
@@ -0,0 +1,12 @@
+
+/** @type {import('tailwindcss').Config} */
+export default {
+ content: [
+ "./index.html",
+ "./src/**/*.{js,ts,jsx,tsx}",
+ ],
+ theme: {
+ extend: {},
+ },
+ plugins: [],
+}
diff --git a/content-engine/frontend/vite.config.ts b/content-engine/frontend/vite.config.ts
new file mode 100644
index 00000000..0c91bdaf
--- /dev/null
+++ b/content-engine/frontend/vite.config.ts
@@ -0,0 +1,16 @@
+import { defineConfig } from 'vite'
+import react from '@vitejs/plugin-react'
+
+// https://vitejs.dev/config/
+export default defineConfig({
+ plugins: [react()],
+ base: './', // CRITICAL for sub-path deployment
+ server: {
+ proxy: {
+ '/api': {
+ target: 'http://localhost:3006',
+ changeOrigin: true,
+ }
+ }
+ }
+})
\ No newline at end of file
diff --git a/content-engine/package.json b/content-engine/package.json
new file mode 100644
index 00000000..825c4f30
--- /dev/null
+++ b/content-engine/package.json
@@ -0,0 +1,14 @@
+
+{
+ "name": "content-engine-backend",
+ "version": "1.0.0",
+ "description": "Backend bridge for Content Engine",
+ "main": "server.cjs",
+ "type": "commonjs",
+ "dependencies": {
+ "express": "^4.18.2"
+ },
+ "scripts": {
+ "start": "node server.cjs"
+ }
+}
diff --git a/content-engine/requirements.txt b/content-engine/requirements.txt
new file mode 100644
index 00000000..6c30aa4d
--- /dev/null
+++ b/content-engine/requirements.txt
@@ -0,0 +1,5 @@
+
+requests
+beautifulsoup4
+google-generativeai
+google-genai
diff --git a/content-engine/server.cjs b/content-engine/server.cjs
new file mode 100644
index 00000000..2c4adbcf
--- /dev/null
+++ b/content-engine/server.cjs
@@ -0,0 +1,68 @@
+const express = require('express');
+const { spawn } = require('child_process');
+const path = require('path');
+const fs = require('fs');
+
+const app = express();
+const port = process.env.PORT || 3006;
+
+app.use(express.json({ limit: '50mb' }));
+
+// INITIALIZE DATABASE ON START
+const dbScript = path.join(__dirname, 'content_db_manager.py');
+console.log("Initializing database...");
+spawn('python3', [dbScript]);
+
+// Helper to run python commands
+function runPython(mode, payload) {
+ return new Promise((resolve, reject) => {
+ const payloadFile = path.join(__dirname, `payload_${Date.now()}.json`);
+ fs.writeFileSync(payloadFile, JSON.stringify(payload));
+
+ const pythonProcess = spawn('python3', [
+ path.join(__dirname, 'content_orchestrator.py'),
+ '--mode', mode,
+ '--payload_file', payloadFile
+ ]);
+
+ let stdout = '';
+ let stderr = '';
+
+ pythonProcess.stdout.on('data', (data) => stdout += data.toString());
+ pythonProcess.stderr.on('data', (data) => stderr += data.toString());
+
+ pythonProcess.on('close', (code) => {
+ if (fs.existsSync(payloadFile)) fs.unlinkSync(payloadFile);
+ if (code !== 0) {
+ console.error(`Python error (code ${code}):`, stderr);
+ return reject(stderr);
+ }
+ try {
+ resolve(JSON.parse(stdout));
+ } catch (e) {
+ reject("Failed to parse Python output: " + stdout);
+ }
+ });
+ });
+}
+
+app.post('/api/:mode', async (req, res) => {
+ try {
+ const result = await runPython(req.params.mode, req.body);
+ res.json(result);
+ } catch (error) {
+ res.status(500).json({ error: error.toString() });
+ }
+});
+
+// Serve static assets from build (for production)
+if (fs.existsSync(path.join(__dirname, 'dist'))) {
+ app.use(express.static(path.join(__dirname, 'dist')));
+ app.get('*', (req, res) => {
+ res.sendFile(path.join(__dirname, 'dist', 'index.html'));
+ });
+}
+
+app.listen(port, () => {
+ console.log(`Content Engine Server running on port ${port}`);
+});
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
index aa542678..17ac2b9d 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -18,6 +18,33 @@ services:
- market-frontend
- company-explorer
- competitor-analysis
+ - content-app
+
+ # ... [existing services] ...
+
+ content-app:
+ build:
+ context: .
+ dockerfile: content-engine/Dockerfile
+ container_name: content-app
+ restart: unless-stopped
+ volumes:
+ - ./content-engine:/app/content-engine
+ - ./content-engine/server.cjs:/app/server.cjs
+ - ./content-engine/content_orchestrator.py:/app/content_orchestrator.py
+ - ./content-engine/content_db_manager.py:/app/content_db_manager.py
+ - ./content_engine.db:/app/content_engine.db
+ - ./helpers.py:/app/helpers.py
+ - ./config.py:/app/config.py
+ - ./gtm_projects.db:/app/gtm_projects.db
+ - ./Log_from_docker:/app/Log_from_docker
+ - ./gemini_api_key.txt:/app/gemini_api_key.txt
+ - ./serpapikey.txt:/app/serpapikey.txt
+ environment:
+ - PYTHONUNBUFFERED=1
+ - DB_PATH=/app/content_engine.db
+ - GTM_DB_PATH=/app/gtm_projects.db
+
# --- DASHBOARD (Landing Page) ---
dashboard:
diff --git a/nginx-proxy.conf b/nginx-proxy.conf
index c160fb87..6d039a5a 100644
--- a/nginx-proxy.conf
+++ b/nginx-proxy.conf
@@ -73,6 +73,20 @@ http {
proxy_send_timeout 1200s;
}
+ location /content/ {
+ # Content Engine
+ # Der Trailing Slash am Ende ist wichtig!
+ proxy_pass http://content-app:3006/;
+ proxy_set_header Host $host;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection "upgrade";
+
+ # Explicit timeouts for this location
+ proxy_read_timeout 1200s;
+ proxy_connect_timeout 1200s;
+ proxy_send_timeout 1200s;
+ }
+
location /ce/ {
# Company Explorer (Robotics Edition)
# Der Trailing Slash am Ende ist wichtig!