refactor: implement unified binary architecture
🏗️ **Major Architecture Refactoring: Separate CLI + Server → Unified Binary** **Key Changes:** ✅ **Unified Binary**: Single 'insertr' binary with subcommands (enhance, serve) ✅ **Preserved Database Architecture**: Maintained sophisticated sqlc multi-DB setup ✅ **Smart Configuration**: Viper + YAML config with CLI flag precedence ✅ **Updated Build System**: Unified justfile, Air, and npm scripts **Command Structure:** - `insertr enhance [input-dir]` - Build-time content injection - `insertr serve` - HTTP API server (dev + production modes) - `insertr --config insertr.yaml` - YAML configuration support **Architecture Benefits:** - **Shared Database Layer**: Single source of truth for content models - **Flexible Workflows**: Local DB for dev, remote API for production - **Simple Deployment**: One binary for all use cases - **Better UX**: Consistent configuration across build and runtime **Preserved Features:** - Multi-database support (SQLite + PostgreSQL) - sqlc code generation and type safety - Version control system with rollback - Professional API endpoints - Content enhancement pipeline **Development Workflow:** - `just dev` - Full-stack development (API server + demo site) - `just serve` - API server only - `just enhance` - Build-time content injection - `air` - Hot reload unified binary **Migration:** Consolidated insertr-cli/ and insertr-server/ → unified root structure
This commit is contained in:
46
.air.toml
Normal file
46
.air.toml
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
root = "."
|
||||||
|
testdata_dir = "testdata"
|
||||||
|
tmp_dir = "tmp"
|
||||||
|
|
||||||
|
[build]
|
||||||
|
args_bin = []
|
||||||
|
bin = "./tmp/insertr"
|
||||||
|
cmd = "go build -o ./tmp/insertr ."
|
||||||
|
delay = 1000
|
||||||
|
exclude_dir = ["tmp", "vendor", "testdata", "node_modules", "dist", "insertr-cli", "insertr-server"]
|
||||||
|
exclude_file = []
|
||||||
|
exclude_regex = ["_test.go"]
|
||||||
|
exclude_unchanged = false
|
||||||
|
follow_symlink = false
|
||||||
|
full_bin = "./tmp/insertr serve --dev-mode --db ./dev.db"
|
||||||
|
include_dir = ["cmd", "internal", "lib/src"]
|
||||||
|
include_ext = ["go", "tpl", "tmpl", "html", "js"]
|
||||||
|
include_file = []
|
||||||
|
kill_delay = "0s"
|
||||||
|
log = "build-errors.log"
|
||||||
|
poll = false
|
||||||
|
poll_interval = 0
|
||||||
|
post_cmd = []
|
||||||
|
pre_cmd = ["cd lib && npm run build"]
|
||||||
|
rerun = false
|
||||||
|
rerun_delay = 500
|
||||||
|
send_interrupt = false
|
||||||
|
stop_on_root = false
|
||||||
|
|
||||||
|
[color]
|
||||||
|
app = ""
|
||||||
|
build = "yellow"
|
||||||
|
main = "magenta"
|
||||||
|
runner = "green"
|
||||||
|
watcher = "cyan"
|
||||||
|
|
||||||
|
[log]
|
||||||
|
main_only = false
|
||||||
|
time = false
|
||||||
|
|
||||||
|
[misc]
|
||||||
|
clean_on_exit = true
|
||||||
|
|
||||||
|
[screen]
|
||||||
|
clear_on_rebuild = false
|
||||||
|
keep_scroll = true
|
||||||
82
cmd/enhance.go
Normal file
82
cmd/enhance.go
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/spf13/viper"
|
||||||
|
|
||||||
|
"github.com/insertr/insertr/internal/content"
|
||||||
|
)
|
||||||
|
|
||||||
|
var enhanceCmd = &cobra.Command{
|
||||||
|
Use: "enhance [input-dir]",
|
||||||
|
Short: "Enhance HTML files by injecting content from database",
|
||||||
|
Long: `Enhance processes HTML files and injects latest content from the database
|
||||||
|
while adding editing capabilities. This is the core build-time enhancement
|
||||||
|
process that transforms static HTML into an editable CMS.`,
|
||||||
|
Args: cobra.ExactArgs(1),
|
||||||
|
Run: runEnhance,
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
outputDir string
|
||||||
|
mockContent bool
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
enhanceCmd.Flags().StringVarP(&outputDir, "output", "o", "./dist", "Output directory for enhanced files")
|
||||||
|
enhanceCmd.Flags().BoolVar(&mockContent, "mock", true, "Use mock content for development")
|
||||||
|
|
||||||
|
// Bind flags to viper
|
||||||
|
viper.BindPFlag("build.output", enhanceCmd.Flags().Lookup("output"))
|
||||||
|
viper.BindPFlag("mock_content", enhanceCmd.Flags().Lookup("mock"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func runEnhance(cmd *cobra.Command, args []string) {
|
||||||
|
inputDir := args[0]
|
||||||
|
|
||||||
|
// Validate input directory
|
||||||
|
if _, err := os.Stat(inputDir); os.IsNotExist(err) {
|
||||||
|
log.Fatalf("Input directory does not exist: %s", inputDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get configuration values
|
||||||
|
dbPath := viper.GetString("database.path")
|
||||||
|
apiURL := viper.GetString("api.url")
|
||||||
|
apiKey := viper.GetString("api.key")
|
||||||
|
siteID := viper.GetString("site_id")
|
||||||
|
mockContent := viper.GetBool("mock_content")
|
||||||
|
|
||||||
|
// Create content client
|
||||||
|
var client content.ContentClient
|
||||||
|
if mockContent || (apiURL == "" && dbPath == "") {
|
||||||
|
fmt.Printf("🧪 Using mock content for development\n")
|
||||||
|
client = content.NewMockClient()
|
||||||
|
} else if apiURL != "" {
|
||||||
|
fmt.Printf("🌐 Using content API: %s\n", apiURL)
|
||||||
|
client = content.NewHTTPClient(apiURL, apiKey)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("🗄️ Using database: %s\n", dbPath)
|
||||||
|
// TODO: Implement database client for direct DB access
|
||||||
|
fmt.Printf("⚠️ Direct database access not yet implemented, using mock content\n")
|
||||||
|
client = content.NewMockClient()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create enhancer
|
||||||
|
enhancer := content.NewEnhancer(client, siteID)
|
||||||
|
|
||||||
|
fmt.Printf("🚀 Starting enhancement process...\n")
|
||||||
|
fmt.Printf("📁 Input: %s\n", inputDir)
|
||||||
|
fmt.Printf("📁 Output: %s\n", outputDir)
|
||||||
|
fmt.Printf("🏷️ Site ID: %s\n\n", siteID)
|
||||||
|
|
||||||
|
// Enhance directory
|
||||||
|
if err := enhancer.EnhanceDirectory(inputDir, outputDir); err != nil {
|
||||||
|
log.Fatalf("Enhancement failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("\n✅ Enhancement complete! Enhanced files available in: %s\n", outputDir)
|
||||||
|
}
|
||||||
74
cmd/root.go
Normal file
74
cmd/root.go
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/spf13/viper"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
cfgFile string
|
||||||
|
dbPath string
|
||||||
|
apiURL string
|
||||||
|
apiKey string
|
||||||
|
siteID string
|
||||||
|
)
|
||||||
|
|
||||||
|
var rootCmd = &cobra.Command{
|
||||||
|
Use: "insertr",
|
||||||
|
Short: "Insertr - The Tailwind of CMS",
|
||||||
|
Long: `Insertr adds editing capabilities to static HTML sites by detecting
|
||||||
|
editable elements and injecting content management functionality.
|
||||||
|
|
||||||
|
The unified tool handles both build-time content injection (enhance command)
|
||||||
|
and runtime API server (serve command) for complete CMS functionality.`,
|
||||||
|
Version: "0.1.0",
|
||||||
|
}
|
||||||
|
|
||||||
|
func Execute() {
|
||||||
|
if err := rootCmd.Execute(); err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
cobra.OnInitialize(initConfig)
|
||||||
|
|
||||||
|
// Global flags
|
||||||
|
rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is ./insertr.yaml)")
|
||||||
|
rootCmd.PersistentFlags().StringVar(&dbPath, "db", "./insertr.db", "database path (SQLite file or PostgreSQL connection string)")
|
||||||
|
rootCmd.PersistentFlags().StringVar(&apiURL, "api-url", "", "content API URL")
|
||||||
|
rootCmd.PersistentFlags().StringVar(&apiKey, "api-key", "", "API key for authentication")
|
||||||
|
rootCmd.PersistentFlags().StringVarP(&siteID, "site-id", "s", "demo", "site ID for content lookup")
|
||||||
|
|
||||||
|
// Bind flags to viper
|
||||||
|
viper.BindPFlag("database.path", rootCmd.PersistentFlags().Lookup("db"))
|
||||||
|
viper.BindPFlag("api.url", rootCmd.PersistentFlags().Lookup("api-url"))
|
||||||
|
viper.BindPFlag("api.key", rootCmd.PersistentFlags().Lookup("api-key"))
|
||||||
|
viper.BindPFlag("site_id", rootCmd.PersistentFlags().Lookup("site-id"))
|
||||||
|
|
||||||
|
rootCmd.AddCommand(enhanceCmd)
|
||||||
|
rootCmd.AddCommand(serveCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func initConfig() {
|
||||||
|
if cfgFile != "" {
|
||||||
|
viper.SetConfigFile(cfgFile)
|
||||||
|
} else {
|
||||||
|
viper.AddConfigPath(".")
|
||||||
|
viper.SetConfigName("insertr")
|
||||||
|
viper.SetConfigType("yaml")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Environment variables
|
||||||
|
viper.SetEnvPrefix("INSERTR")
|
||||||
|
viper.AutomaticEnv()
|
||||||
|
|
||||||
|
// Read config file
|
||||||
|
if err := viper.ReadInConfig(); err == nil {
|
||||||
|
fmt.Fprintln(os.Stderr, "Using config file:", viper.ConfigFileUsed())
|
||||||
|
}
|
||||||
|
}
|
||||||
134
cmd/serve.go
Normal file
134
cmd/serve.go
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"syscall"
|
||||||
|
|
||||||
|
"github.com/gorilla/mux"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/spf13/viper"
|
||||||
|
|
||||||
|
"github.com/insertr/insertr/internal/api"
|
||||||
|
"github.com/insertr/insertr/internal/db"
|
||||||
|
)
|
||||||
|
|
||||||
|
var serveCmd = &cobra.Command{
|
||||||
|
Use: "serve",
|
||||||
|
Short: "Start the content API server",
|
||||||
|
Long: `Start the HTTP API server that provides content storage and retrieval.
|
||||||
|
Supports both development and production modes with SQLite or PostgreSQL databases.`,
|
||||||
|
Run: runServe,
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
port int
|
||||||
|
devMode bool
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
serveCmd.Flags().IntVarP(&port, "port", "p", 8080, "Server port")
|
||||||
|
serveCmd.Flags().BoolVar(&devMode, "dev-mode", false, "Enable development mode features")
|
||||||
|
|
||||||
|
// Bind flags to viper
|
||||||
|
viper.BindPFlag("server.port", serveCmd.Flags().Lookup("port"))
|
||||||
|
viper.BindPFlag("server.dev_mode", serveCmd.Flags().Lookup("dev-mode"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func runServe(cmd *cobra.Command, args []string) {
|
||||||
|
// Get configuration values
|
||||||
|
port := viper.GetInt("server.port")
|
||||||
|
dbPath := viper.GetString("database.path")
|
||||||
|
devMode := viper.GetBool("server.dev_mode")
|
||||||
|
|
||||||
|
// Initialize database
|
||||||
|
database, err := db.NewDatabase(dbPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to initialize database: %v", err)
|
||||||
|
}
|
||||||
|
defer database.Close()
|
||||||
|
|
||||||
|
// Initialize handlers
|
||||||
|
contentHandler := api.NewContentHandler(database)
|
||||||
|
|
||||||
|
// Setup router
|
||||||
|
router := mux.NewRouter()
|
||||||
|
|
||||||
|
// Add middleware
|
||||||
|
router.Use(api.CORSMiddleware)
|
||||||
|
router.Use(api.LoggingMiddleware)
|
||||||
|
router.Use(api.ContentTypeMiddleware)
|
||||||
|
|
||||||
|
// Health check endpoint
|
||||||
|
router.HandleFunc("/health", api.HealthMiddleware())
|
||||||
|
|
||||||
|
// API routes
|
||||||
|
apiRouter := router.PathPrefix("/api/content").Subrouter()
|
||||||
|
|
||||||
|
// Content endpoints matching the expected API contract
|
||||||
|
apiRouter.HandleFunc("/bulk", contentHandler.GetBulkContent).Methods("GET")
|
||||||
|
apiRouter.HandleFunc("/{id}", contentHandler.GetContent).Methods("GET")
|
||||||
|
apiRouter.HandleFunc("/{id}", contentHandler.UpdateContent).Methods("PUT")
|
||||||
|
apiRouter.HandleFunc("", contentHandler.GetAllContent).Methods("GET")
|
||||||
|
apiRouter.HandleFunc("", contentHandler.CreateContent).Methods("POST")
|
||||||
|
|
||||||
|
// Version control endpoints
|
||||||
|
apiRouter.HandleFunc("/{id}/versions", contentHandler.GetContentVersions).Methods("GET")
|
||||||
|
apiRouter.HandleFunc("/{id}/rollback", contentHandler.RollbackContent).Methods("POST")
|
||||||
|
|
||||||
|
// Handle CORS preflight requests explicitly
|
||||||
|
apiRouter.HandleFunc("/{id}", api.CORSPreflightHandler).Methods("OPTIONS")
|
||||||
|
apiRouter.HandleFunc("", api.CORSPreflightHandler).Methods("OPTIONS")
|
||||||
|
apiRouter.HandleFunc("/bulk", api.CORSPreflightHandler).Methods("OPTIONS")
|
||||||
|
apiRouter.HandleFunc("/{id}/versions", api.CORSPreflightHandler).Methods("OPTIONS")
|
||||||
|
apiRouter.HandleFunc("/{id}/rollback", api.CORSPreflightHandler).Methods("OPTIONS")
|
||||||
|
|
||||||
|
// Start server
|
||||||
|
addr := fmt.Sprintf(":%d", port)
|
||||||
|
mode := "production"
|
||||||
|
if devMode {
|
||||||
|
mode = "development"
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("🚀 Insertr Content Server starting (%s mode)...\n", mode)
|
||||||
|
fmt.Printf("📁 Database: %s\n", dbPath)
|
||||||
|
fmt.Printf("🌐 Server running at: http://localhost%s\n", addr)
|
||||||
|
fmt.Printf("💚 Health check: http://localhost%s/health\n", addr)
|
||||||
|
fmt.Printf("📊 API endpoints:\n")
|
||||||
|
fmt.Printf(" GET /api/content?site_id={site}\n")
|
||||||
|
fmt.Printf(" GET /api/content/{id}?site_id={site}\n")
|
||||||
|
fmt.Printf(" GET /api/content/bulk?site_id={site}&ids[]={id1}&ids[]={id2}\n")
|
||||||
|
fmt.Printf(" POST /api/content\n")
|
||||||
|
fmt.Printf(" PUT /api/content/{id}\n")
|
||||||
|
fmt.Printf(" GET /api/content/{id}/versions?site_id={site}\n")
|
||||||
|
fmt.Printf(" POST /api/content/{id}/rollback\n")
|
||||||
|
fmt.Printf("\n🔄 Press Ctrl+C to shutdown gracefully\n\n")
|
||||||
|
|
||||||
|
// Setup graceful shutdown
|
||||||
|
server := &http.Server{
|
||||||
|
Addr: addr,
|
||||||
|
Handler: router,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start server in a goroutine
|
||||||
|
go func() {
|
||||||
|
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||||
|
log.Fatalf("Server failed to start: %v", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Wait for interrupt signal
|
||||||
|
quit := make(chan os.Signal, 1)
|
||||||
|
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||||
|
<-quit
|
||||||
|
|
||||||
|
fmt.Println("\n🛑 Shutting down server...")
|
||||||
|
if err := server.Close(); err != nil {
|
||||||
|
log.Fatalf("Server forced to shutdown: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("✅ Server shutdown complete")
|
||||||
|
}
|
||||||
42
db/postgresql/schema.sql
Normal file
42
db/postgresql/schema.sql
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
-- PostgreSQL-specific schema with BIGINT UNIX timestamps
|
||||||
|
-- Main content table (current versions only)
|
||||||
|
CREATE TABLE content (
|
||||||
|
id TEXT NOT NULL,
|
||||||
|
site_id TEXT NOT NULL,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
type TEXT NOT NULL CHECK (type IN ('text', 'markdown', 'link')),
|
||||||
|
created_at BIGINT DEFAULT EXTRACT(EPOCH FROM NOW()) NOT NULL,
|
||||||
|
updated_at BIGINT DEFAULT EXTRACT(EPOCH FROM NOW()) NOT NULL,
|
||||||
|
last_edited_by TEXT DEFAULT 'system' NOT NULL,
|
||||||
|
PRIMARY KEY (id, site_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Version history table for rollback functionality
|
||||||
|
CREATE TABLE content_versions (
|
||||||
|
version_id SERIAL PRIMARY KEY,
|
||||||
|
content_id TEXT NOT NULL,
|
||||||
|
site_id TEXT NOT NULL,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
created_at BIGINT DEFAULT EXTRACT(EPOCH FROM NOW()) NOT NULL,
|
||||||
|
created_by TEXT DEFAULT 'system' NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Indexes for performance
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_site_id ON content(site_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_updated_at ON content(updated_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_versions_lookup ON content_versions(content_id, site_id, created_at DESC);
|
||||||
|
|
||||||
|
-- Function and trigger to automatically update updated_at timestamp
|
||||||
|
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
NEW.updated_at = EXTRACT(EPOCH FROM NOW());
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ language 'plpgsql';
|
||||||
|
|
||||||
|
CREATE TRIGGER update_content_updated_at
|
||||||
|
BEFORE UPDATE ON content
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION update_updated_at_column();
|
||||||
47
db/postgresql/setup.sql
Normal file
47
db/postgresql/setup.sql
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
-- name: InitializeSchema :exec
|
||||||
|
CREATE TABLE IF NOT EXISTS content (
|
||||||
|
id TEXT NOT NULL,
|
||||||
|
site_id TEXT NOT NULL,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
type TEXT NOT NULL CHECK (type IN ('text', 'markdown', 'link')),
|
||||||
|
created_at BIGINT DEFAULT (EXTRACT(EPOCH FROM NOW())) NOT NULL,
|
||||||
|
updated_at BIGINT DEFAULT (EXTRACT(EPOCH FROM NOW())) NOT NULL,
|
||||||
|
last_edited_by TEXT DEFAULT 'system' NOT NULL,
|
||||||
|
PRIMARY KEY (id, site_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- name: InitializeVersionsTable :exec
|
||||||
|
CREATE TABLE IF NOT EXISTS content_versions (
|
||||||
|
version_id SERIAL PRIMARY KEY,
|
||||||
|
content_id TEXT NOT NULL,
|
||||||
|
site_id TEXT NOT NULL,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
created_at BIGINT DEFAULT (EXTRACT(EPOCH FROM NOW())) NOT NULL,
|
||||||
|
created_by TEXT DEFAULT 'system' NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- name: CreateContentSiteIndex :exec
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_site_id ON content(site_id);
|
||||||
|
|
||||||
|
-- name: CreateContentUpdatedAtIndex :exec
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_updated_at ON content(updated_at);
|
||||||
|
|
||||||
|
-- name: CreateVersionsLookupIndex :exec
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_versions_lookup ON content_versions(content_id, site_id, created_at DESC);
|
||||||
|
|
||||||
|
-- name: CreateUpdateFunction :exec
|
||||||
|
CREATE OR REPLACE FUNCTION update_content_timestamp()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
NEW.updated_at = EXTRACT(EPOCH FROM NOW());
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
-- name: CreateUpdateTrigger :exec
|
||||||
|
DROP TRIGGER IF EXISTS update_content_updated_at ON content;
|
||||||
|
CREATE TRIGGER update_content_updated_at
|
||||||
|
BEFORE UPDATE ON content
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION update_content_timestamp();
|
||||||
30
db/queries/content.sql
Normal file
30
db/queries/content.sql
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
-- name: GetContent :one
|
||||||
|
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||||
|
FROM content
|
||||||
|
WHERE id = sqlc.arg(id) AND site_id = sqlc.arg(site_id);
|
||||||
|
|
||||||
|
-- name: GetAllContent :many
|
||||||
|
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||||
|
FROM content
|
||||||
|
WHERE site_id = sqlc.arg(site_id)
|
||||||
|
ORDER BY updated_at DESC;
|
||||||
|
|
||||||
|
-- name: GetBulkContent :many
|
||||||
|
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||||
|
FROM content
|
||||||
|
WHERE site_id = sqlc.arg(site_id) AND id IN (sqlc.slice('ids'));
|
||||||
|
|
||||||
|
-- name: CreateContent :one
|
||||||
|
INSERT INTO content (id, site_id, value, type, last_edited_by)
|
||||||
|
VALUES (sqlc.arg(id), sqlc.arg(site_id), sqlc.arg(value), sqlc.arg(type), sqlc.arg(last_edited_by))
|
||||||
|
RETURNING id, site_id, value, type, created_at, updated_at, last_edited_by;
|
||||||
|
|
||||||
|
-- name: UpdateContent :one
|
||||||
|
UPDATE content
|
||||||
|
SET value = sqlc.arg(value), type = sqlc.arg(type), last_edited_by = sqlc.arg(last_edited_by)
|
||||||
|
WHERE id = sqlc.arg(id) AND site_id = sqlc.arg(site_id)
|
||||||
|
RETURNING id, site_id, value, type, created_at, updated_at, last_edited_by;
|
||||||
|
|
||||||
|
-- name: DeleteContent :exec
|
||||||
|
DELETE FROM content
|
||||||
|
WHERE id = sqlc.arg(id) AND site_id = sqlc.arg(site_id);
|
||||||
29
db/queries/versions.sql
Normal file
29
db/queries/versions.sql
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
-- name: CreateContentVersion :exec
|
||||||
|
INSERT INTO content_versions (content_id, site_id, value, type, created_by)
|
||||||
|
VALUES (sqlc.arg(content_id), sqlc.arg(site_id), sqlc.arg(value), sqlc.arg(type), sqlc.arg(created_by));
|
||||||
|
|
||||||
|
-- name: GetContentVersionHistory :many
|
||||||
|
SELECT version_id, content_id, site_id, value, type, created_at, created_by
|
||||||
|
FROM content_versions
|
||||||
|
WHERE content_id = sqlc.arg(content_id) AND site_id = sqlc.arg(site_id)
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT sqlc.arg(limit_count);
|
||||||
|
|
||||||
|
-- name: GetContentVersion :one
|
||||||
|
SELECT version_id, content_id, site_id, value, type, created_at, created_by
|
||||||
|
FROM content_versions
|
||||||
|
WHERE version_id = sqlc.arg(version_id);
|
||||||
|
|
||||||
|
-- name: GetAllVersionsForSite :many
|
||||||
|
SELECT
|
||||||
|
cv.version_id, cv.content_id, cv.site_id, cv.value, cv.type, cv.created_at, cv.created_by,
|
||||||
|
c.value as current_value
|
||||||
|
FROM content_versions cv
|
||||||
|
LEFT JOIN content c ON cv.content_id = c.id AND cv.site_id = c.site_id
|
||||||
|
WHERE cv.site_id = sqlc.arg(site_id)
|
||||||
|
ORDER BY cv.created_at DESC
|
||||||
|
LIMIT sqlc.arg(limit_count);
|
||||||
|
|
||||||
|
-- name: DeleteOldVersions :exec
|
||||||
|
DELETE FROM content_versions
|
||||||
|
WHERE created_at < sqlc.arg(created_before) AND site_id = sqlc.arg(site_id);
|
||||||
36
db/sqlite/schema.sql
Normal file
36
db/sqlite/schema.sql
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
-- SQLite-specific schema with INTEGER timestamps
|
||||||
|
-- Main content table (current versions only)
|
||||||
|
CREATE TABLE content (
|
||||||
|
id TEXT NOT NULL,
|
||||||
|
site_id TEXT NOT NULL,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
type TEXT NOT NULL CHECK (type IN ('text', 'markdown', 'link')),
|
||||||
|
created_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||||
|
updated_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||||
|
last_edited_by TEXT DEFAULT 'system' NOT NULL,
|
||||||
|
PRIMARY KEY (id, site_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Version history table for rollback functionality
|
||||||
|
CREATE TABLE content_versions (
|
||||||
|
version_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
content_id TEXT NOT NULL,
|
||||||
|
site_id TEXT NOT NULL,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
created_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||||
|
created_by TEXT DEFAULT 'system' NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Indexes for performance
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_site_id ON content(site_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_updated_at ON content(updated_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_versions_lookup ON content_versions(content_id, site_id, created_at DESC);
|
||||||
|
|
||||||
|
-- Trigger to automatically update updated_at timestamp
|
||||||
|
CREATE TRIGGER IF NOT EXISTS update_content_updated_at
|
||||||
|
AFTER UPDATE ON content
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
UPDATE content SET updated_at = strftime('%s', 'now') WHERE id = NEW.id AND site_id = NEW.site_id;
|
||||||
|
END;
|
||||||
39
db/sqlite/setup.sql
Normal file
39
db/sqlite/setup.sql
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
-- name: InitializeSchema :exec
|
||||||
|
CREATE TABLE IF NOT EXISTS content (
|
||||||
|
id TEXT NOT NULL,
|
||||||
|
site_id TEXT NOT NULL,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
type TEXT NOT NULL CHECK (type IN ('text', 'markdown', 'link')),
|
||||||
|
created_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||||
|
updated_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||||
|
last_edited_by TEXT DEFAULT 'system' NOT NULL,
|
||||||
|
PRIMARY KEY (id, site_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- name: InitializeVersionsTable :exec
|
||||||
|
CREATE TABLE IF NOT EXISTS content_versions (
|
||||||
|
version_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
content_id TEXT NOT NULL,
|
||||||
|
site_id TEXT NOT NULL,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
created_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||||
|
created_by TEXT DEFAULT 'system' NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- name: CreateContentSiteIndex :exec
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_site_id ON content(site_id);
|
||||||
|
|
||||||
|
-- name: CreateContentUpdatedAtIndex :exec
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_updated_at ON content(updated_at);
|
||||||
|
|
||||||
|
-- name: CreateVersionsLookupIndex :exec
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_versions_lookup ON content_versions(content_id, site_id, created_at DESC);
|
||||||
|
|
||||||
|
-- name: CreateUpdateTrigger :exec
|
||||||
|
CREATE TRIGGER IF NOT EXISTS update_content_updated_at
|
||||||
|
AFTER UPDATE ON content
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
UPDATE content SET updated_at = strftime('%s', 'now') WHERE id = NEW.id AND site_id = NEW.site_id;
|
||||||
|
END;
|
||||||
35
go.mod
Normal file
35
go.mod
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
module github.com/insertr/insertr
|
||||||
|
|
||||||
|
go 1.24.6
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/gorilla/mux v1.8.1
|
||||||
|
github.com/lib/pq v1.10.9
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.32
|
||||||
|
github.com/spf13/cobra v1.8.0
|
||||||
|
github.com/spf13/viper v1.18.2
|
||||||
|
golang.org/x/net v0.43.0
|
||||||
|
)
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/fsnotify/fsnotify v1.7.0 // indirect
|
||||||
|
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||||
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
|
github.com/magiconair/properties v1.8.7 // indirect
|
||||||
|
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||||
|
github.com/pelletier/go-toml/v2 v2.1.0 // indirect
|
||||||
|
github.com/sagikazarmark/locafero v0.4.0 // indirect
|
||||||
|
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
|
||||||
|
github.com/sourcegraph/conc v0.3.0 // indirect
|
||||||
|
github.com/spf13/afero v1.11.0 // indirect
|
||||||
|
github.com/spf13/cast v1.6.0 // indirect
|
||||||
|
github.com/spf13/pflag v1.0.5 // indirect
|
||||||
|
github.com/subosito/gotenv v1.6.0 // indirect
|
||||||
|
go.uber.org/atomic v1.9.0 // indirect
|
||||||
|
go.uber.org/multierr v1.9.0 // indirect
|
||||||
|
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect
|
||||||
|
golang.org/x/sys v0.35.0 // indirect
|
||||||
|
golang.org/x/text v0.28.0 // indirect
|
||||||
|
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
|
)
|
||||||
83
go.sum
Normal file
83
go.sum
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||||
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||||
|
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||||
|
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
|
||||||
|
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
|
||||||
|
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||||
|
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
|
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||||
|
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||||
|
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
||||||
|
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||||
|
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||||
|
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||||
|
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||||
|
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||||
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
|
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||||
|
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||||
|
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
|
||||||
|
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs=
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||||
|
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||||
|
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||||
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
|
||||||
|
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||||
|
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
|
github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ=
|
||||||
|
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
|
||||||
|
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
|
||||||
|
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
|
||||||
|
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
|
||||||
|
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
|
||||||
|
github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
|
||||||
|
github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
|
||||||
|
github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0=
|
||||||
|
github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
|
||||||
|
github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0=
|
||||||
|
github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho=
|
||||||
|
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||||
|
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
|
github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ=
|
||||||
|
github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
|
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||||
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
|
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||||
|
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
|
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||||
|
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||||
|
go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
|
||||||
|
go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||||
|
go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
|
||||||
|
go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
|
||||||
|
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g=
|
||||||
|
golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k=
|
||||||
|
golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
|
||||||
|
golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
|
||||||
|
golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
|
||||||
|
golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||||
|
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
|
||||||
|
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
||||||
|
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
25
insertr.yaml
Normal file
25
insertr.yaml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# Insertr Configuration File
|
||||||
|
# This file provides default configuration for the unified insertr binary
|
||||||
|
|
||||||
|
# Database configuration
|
||||||
|
database:
|
||||||
|
path: "./insertr.db" # SQLite file path or PostgreSQL connection string
|
||||||
|
|
||||||
|
# API configuration (for remote content API)
|
||||||
|
api:
|
||||||
|
url: "" # Content API URL (leave empty to use local database)
|
||||||
|
key: "" # API authentication key
|
||||||
|
|
||||||
|
# Server configuration
|
||||||
|
server:
|
||||||
|
port: 8080 # HTTP server port
|
||||||
|
dev_mode: false # Enable development mode features
|
||||||
|
|
||||||
|
# Build configuration
|
||||||
|
build:
|
||||||
|
input: "./src" # Default input directory for enhancement
|
||||||
|
output: "./dist" # Default output directory for enhanced files
|
||||||
|
|
||||||
|
# Global settings
|
||||||
|
site_id: "demo" # Default site ID for content lookup
|
||||||
|
mock_content: false # Use mock content instead of real data
|
||||||
668
internal/api/handlers.go
Normal file
668
internal/api/handlers.go
Normal file
@@ -0,0 +1,668 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/gorilla/mux"
|
||||||
|
"github.com/insertr/insertr/internal/db"
|
||||||
|
"github.com/insertr/insertr/internal/db/postgresql"
|
||||||
|
"github.com/insertr/insertr/internal/db/sqlite"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ContentHandler handles all content-related HTTP requests
|
||||||
|
type ContentHandler struct {
|
||||||
|
database *db.Database
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewContentHandler creates a new content handler
|
||||||
|
func NewContentHandler(database *db.Database) *ContentHandler {
|
||||||
|
return &ContentHandler{
|
||||||
|
database: database,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetContent handles GET /api/content/{id}
|
||||||
|
func (h *ContentHandler) GetContent(w http.ResponseWriter, r *http.Request) {
|
||||||
|
vars := mux.Vars(r)
|
||||||
|
contentID := vars["id"]
|
||||||
|
siteID := r.URL.Query().Get("site_id")
|
||||||
|
|
||||||
|
if siteID == "" {
|
||||||
|
http.Error(w, "site_id parameter is required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var content interface{}
|
||||||
|
var err error
|
||||||
|
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
content, err = h.database.GetSQLiteQueries().GetContent(context.Background(), sqlite.GetContentParams{
|
||||||
|
ID: contentID,
|
||||||
|
SiteID: siteID,
|
||||||
|
})
|
||||||
|
case "postgresql":
|
||||||
|
content, err = h.database.GetPostgreSQLQueries().GetContent(context.Background(), postgresql.GetContentParams{
|
||||||
|
ID: contentID,
|
||||||
|
SiteID: siteID,
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
http.Error(w, "Content not found", http.StatusNotFound)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
http.Error(w, fmt.Sprintf("Database error: %v", err), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
item := h.convertToAPIContent(content)
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(item)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAllContent handles GET /api/content
|
||||||
|
func (h *ContentHandler) GetAllContent(w http.ResponseWriter, r *http.Request) {
|
||||||
|
siteID := r.URL.Query().Get("site_id")
|
||||||
|
if siteID == "" {
|
||||||
|
http.Error(w, "site_id parameter is required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var dbContent interface{}
|
||||||
|
var err error
|
||||||
|
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
dbContent, err = h.database.GetSQLiteQueries().GetAllContent(context.Background(), siteID)
|
||||||
|
case "postgresql":
|
||||||
|
dbContent, err = h.database.GetPostgreSQLQueries().GetAllContent(context.Background(), siteID)
|
||||||
|
default:
|
||||||
|
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, fmt.Sprintf("Database error: %v", err), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
items := h.convertToAPIContentList(dbContent)
|
||||||
|
response := ContentResponse{Content: items}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetBulkContent handles GET /api/content/bulk
|
||||||
|
func (h *ContentHandler) GetBulkContent(w http.ResponseWriter, r *http.Request) {
|
||||||
|
siteID := r.URL.Query().Get("site_id")
|
||||||
|
if siteID == "" {
|
||||||
|
http.Error(w, "site_id parameter is required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse ids parameter
|
||||||
|
idsParam := r.URL.Query()["ids[]"]
|
||||||
|
if len(idsParam) == 0 {
|
||||||
|
// Try single ids parameter
|
||||||
|
idsStr := r.URL.Query().Get("ids")
|
||||||
|
if idsStr == "" {
|
||||||
|
http.Error(w, "ids parameter is required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
idsParam = strings.Split(idsStr, ",")
|
||||||
|
}
|
||||||
|
|
||||||
|
var dbContent interface{}
|
||||||
|
var err error
|
||||||
|
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
dbContent, err = h.database.GetSQLiteQueries().GetBulkContent(context.Background(), sqlite.GetBulkContentParams{
|
||||||
|
SiteID: siteID,
|
||||||
|
Ids: idsParam,
|
||||||
|
})
|
||||||
|
case "postgresql":
|
||||||
|
dbContent, err = h.database.GetPostgreSQLQueries().GetBulkContent(context.Background(), postgresql.GetBulkContentParams{
|
||||||
|
SiteID: siteID,
|
||||||
|
Ids: idsParam,
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, fmt.Sprintf("Database error: %v", err), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
items := h.convertToAPIContentList(dbContent)
|
||||||
|
response := ContentResponse{Content: items}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateContent handles POST /api/content
|
||||||
|
func (h *ContentHandler) CreateContent(w http.ResponseWriter, r *http.Request) {
|
||||||
|
var req CreateContentRequest
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
http.Error(w, "Invalid JSON", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
siteID := r.URL.Query().Get("site_id")
|
||||||
|
if siteID == "" {
|
||||||
|
siteID = req.SiteID // fallback to request body
|
||||||
|
}
|
||||||
|
if siteID == "" {
|
||||||
|
siteID = "default" // final fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract user from request (for now, use X-User-ID header or fallback)
|
||||||
|
userID := r.Header.Get("X-User-ID")
|
||||||
|
if userID == "" && req.CreatedBy != "" {
|
||||||
|
userID = req.CreatedBy
|
||||||
|
}
|
||||||
|
if userID == "" {
|
||||||
|
userID = "anonymous"
|
||||||
|
}
|
||||||
|
|
||||||
|
var content interface{}
|
||||||
|
var err error
|
||||||
|
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
content, err = h.database.GetSQLiteQueries().CreateContent(context.Background(), sqlite.CreateContentParams{
|
||||||
|
ID: req.ID,
|
||||||
|
SiteID: siteID,
|
||||||
|
Value: req.Value,
|
||||||
|
Type: req.Type,
|
||||||
|
LastEditedBy: userID,
|
||||||
|
})
|
||||||
|
case "postgresql":
|
||||||
|
content, err = h.database.GetPostgreSQLQueries().CreateContent(context.Background(), postgresql.CreateContentParams{
|
||||||
|
ID: req.ID,
|
||||||
|
SiteID: siteID,
|
||||||
|
Value: req.Value,
|
||||||
|
Type: req.Type,
|
||||||
|
LastEditedBy: userID,
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, fmt.Sprintf("Failed to create content: %v", err), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
item := h.convertToAPIContent(content)
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
w.WriteHeader(http.StatusCreated)
|
||||||
|
json.NewEncoder(w).Encode(item)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateContent handles PUT /api/content/{id}
|
||||||
|
func (h *ContentHandler) UpdateContent(w http.ResponseWriter, r *http.Request) {
|
||||||
|
vars := mux.Vars(r)
|
||||||
|
contentID := vars["id"]
|
||||||
|
siteID := r.URL.Query().Get("site_id")
|
||||||
|
|
||||||
|
if siteID == "" {
|
||||||
|
http.Error(w, "site_id parameter is required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req UpdateContentRequest
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
http.Error(w, "Invalid JSON", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract user from request
|
||||||
|
userID := r.Header.Get("X-User-ID")
|
||||||
|
if userID == "" && req.UpdatedBy != "" {
|
||||||
|
userID = req.UpdatedBy
|
||||||
|
}
|
||||||
|
if userID == "" {
|
||||||
|
userID = "anonymous"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current content for version history and type preservation
|
||||||
|
var currentContent interface{}
|
||||||
|
var err error
|
||||||
|
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
currentContent, err = h.database.GetSQLiteQueries().GetContent(context.Background(), sqlite.GetContentParams{
|
||||||
|
ID: contentID,
|
||||||
|
SiteID: siteID,
|
||||||
|
})
|
||||||
|
case "postgresql":
|
||||||
|
currentContent, err = h.database.GetPostgreSQLQueries().GetContent(context.Background(), postgresql.GetContentParams{
|
||||||
|
ID: contentID,
|
||||||
|
SiteID: siteID,
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
http.Error(w, "Content not found", http.StatusNotFound)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
http.Error(w, fmt.Sprintf("Database error: %v", err), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Archive current version before updating
|
||||||
|
err = h.createContentVersion(currentContent)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, fmt.Sprintf("Failed to create version: %v", err), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine content type
|
||||||
|
contentType := req.Type
|
||||||
|
if contentType == "" {
|
||||||
|
contentType = h.getContentType(currentContent) // preserve existing type if not specified
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the content
|
||||||
|
var updatedContent interface{}
|
||||||
|
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
updatedContent, err = h.database.GetSQLiteQueries().UpdateContent(context.Background(), sqlite.UpdateContentParams{
|
||||||
|
Value: req.Value,
|
||||||
|
Type: contentType,
|
||||||
|
LastEditedBy: userID,
|
||||||
|
ID: contentID,
|
||||||
|
SiteID: siteID,
|
||||||
|
})
|
||||||
|
case "postgresql":
|
||||||
|
updatedContent, err = h.database.GetPostgreSQLQueries().UpdateContent(context.Background(), postgresql.UpdateContentParams{
|
||||||
|
Value: req.Value,
|
||||||
|
Type: contentType,
|
||||||
|
LastEditedBy: userID,
|
||||||
|
ID: contentID,
|
||||||
|
SiteID: siteID,
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, fmt.Sprintf("Failed to update content: %v", err), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
item := h.convertToAPIContent(updatedContent)
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(item)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteContent handles DELETE /api/content/{id}
|
||||||
|
func (h *ContentHandler) DeleteContent(w http.ResponseWriter, r *http.Request) {
|
||||||
|
vars := mux.Vars(r)
|
||||||
|
contentID := vars["id"]
|
||||||
|
siteID := r.URL.Query().Get("site_id")
|
||||||
|
|
||||||
|
if siteID == "" {
|
||||||
|
http.Error(w, "site_id parameter is required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
err = h.database.GetSQLiteQueries().DeleteContent(context.Background(), sqlite.DeleteContentParams{
|
||||||
|
ID: contentID,
|
||||||
|
SiteID: siteID,
|
||||||
|
})
|
||||||
|
case "postgresql":
|
||||||
|
err = h.database.GetPostgreSQLQueries().DeleteContent(context.Background(), postgresql.DeleteContentParams{
|
||||||
|
ID: contentID,
|
||||||
|
SiteID: siteID,
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, fmt.Sprintf("Failed to delete content: %v", err), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetContentVersions handles GET /api/content/{id}/versions
|
||||||
|
func (h *ContentHandler) GetContentVersions(w http.ResponseWriter, r *http.Request) {
|
||||||
|
vars := mux.Vars(r)
|
||||||
|
contentID := vars["id"]
|
||||||
|
siteID := r.URL.Query().Get("site_id")
|
||||||
|
|
||||||
|
if siteID == "" {
|
||||||
|
http.Error(w, "site_id parameter is required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse limit parameter (default to 10)
|
||||||
|
limit := int64(10)
|
||||||
|
if limitStr := r.URL.Query().Get("limit"); limitStr != "" {
|
||||||
|
if parsedLimit, err := strconv.ParseInt(limitStr, 10, 64); err == nil {
|
||||||
|
limit = parsedLimit
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var dbVersions interface{}
|
||||||
|
var err error
|
||||||
|
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
dbVersions, err = h.database.GetSQLiteQueries().GetContentVersionHistory(context.Background(), sqlite.GetContentVersionHistoryParams{
|
||||||
|
ContentID: contentID,
|
||||||
|
SiteID: siteID,
|
||||||
|
LimitCount: limit,
|
||||||
|
})
|
||||||
|
case "postgresql":
|
||||||
|
// Note: PostgreSQL uses different parameter names due to int32 vs int64
|
||||||
|
dbVersions, err = h.database.GetPostgreSQLQueries().GetContentVersionHistory(context.Background(), postgresql.GetContentVersionHistoryParams{
|
||||||
|
ContentID: contentID,
|
||||||
|
SiteID: siteID,
|
||||||
|
LimitCount: int32(limit),
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, fmt.Sprintf("Database error: %v", err), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
versions := h.convertToAPIVersionList(dbVersions)
|
||||||
|
response := ContentVersionsResponse{Versions: versions}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RollbackContent handles POST /api/content/{id}/rollback
|
||||||
|
func (h *ContentHandler) RollbackContent(w http.ResponseWriter, r *http.Request) {
|
||||||
|
vars := mux.Vars(r)
|
||||||
|
contentID := vars["id"]
|
||||||
|
siteID := r.URL.Query().Get("site_id")
|
||||||
|
|
||||||
|
if siteID == "" {
|
||||||
|
http.Error(w, "site_id parameter is required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req RollbackContentRequest
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
http.Error(w, "Invalid JSON", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the target version
|
||||||
|
var targetVersion interface{}
|
||||||
|
var err error
|
||||||
|
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
targetVersion, err = h.database.GetSQLiteQueries().GetContentVersion(context.Background(), req.VersionID)
|
||||||
|
case "postgresql":
|
||||||
|
targetVersion, err = h.database.GetPostgreSQLQueries().GetContentVersion(context.Background(), int32(req.VersionID))
|
||||||
|
default:
|
||||||
|
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
http.Error(w, "Version not found", http.StatusNotFound)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
http.Error(w, fmt.Sprintf("Database error: %v", err), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the version belongs to the correct content
|
||||||
|
if !h.versionMatches(targetVersion, contentID, siteID) {
|
||||||
|
http.Error(w, "Version does not match content", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract user from request
|
||||||
|
userID := r.Header.Get("X-User-ID")
|
||||||
|
if userID == "" && req.RolledBackBy != "" {
|
||||||
|
userID = req.RolledBackBy
|
||||||
|
}
|
||||||
|
if userID == "" {
|
||||||
|
userID = "anonymous"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Archive current version before rollback
|
||||||
|
var currentContent interface{}
|
||||||
|
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
currentContent, err = h.database.GetSQLiteQueries().GetContent(context.Background(), sqlite.GetContentParams{
|
||||||
|
ID: contentID,
|
||||||
|
SiteID: siteID,
|
||||||
|
})
|
||||||
|
case "postgresql":
|
||||||
|
currentContent, err = h.database.GetPostgreSQLQueries().GetContent(context.Background(), postgresql.GetContentParams{
|
||||||
|
ID: contentID,
|
||||||
|
SiteID: siteID,
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, fmt.Sprintf("Failed to get current content: %v", err), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = h.createContentVersion(currentContent)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, fmt.Sprintf("Failed to create version: %v", err), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rollback to target version
|
||||||
|
var updatedContent interface{}
|
||||||
|
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
sqliteVersion := targetVersion.(sqlite.ContentVersion)
|
||||||
|
updatedContent, err = h.database.GetSQLiteQueries().UpdateContent(context.Background(), sqlite.UpdateContentParams{
|
||||||
|
Value: sqliteVersion.Value,
|
||||||
|
Type: sqliteVersion.Type,
|
||||||
|
LastEditedBy: userID,
|
||||||
|
ID: contentID,
|
||||||
|
SiteID: siteID,
|
||||||
|
})
|
||||||
|
case "postgresql":
|
||||||
|
pgVersion := targetVersion.(postgresql.ContentVersion)
|
||||||
|
updatedContent, err = h.database.GetPostgreSQLQueries().UpdateContent(context.Background(), postgresql.UpdateContentParams{
|
||||||
|
Value: pgVersion.Value,
|
||||||
|
Type: pgVersion.Type,
|
||||||
|
LastEditedBy: userID,
|
||||||
|
ID: contentID,
|
||||||
|
SiteID: siteID,
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, fmt.Sprintf("Failed to rollback content: %v", err), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
item := h.convertToAPIContent(updatedContent)
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(item)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper functions for type conversion
|
||||||
|
func (h *ContentHandler) convertToAPIContent(content interface{}) ContentItem {
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
c := content.(sqlite.Content)
|
||||||
|
return ContentItem{
|
||||||
|
ID: c.ID,
|
||||||
|
SiteID: c.SiteID,
|
||||||
|
Value: c.Value,
|
||||||
|
Type: c.Type,
|
||||||
|
CreatedAt: time.Unix(c.CreatedAt, 0),
|
||||||
|
UpdatedAt: time.Unix(c.UpdatedAt, 0),
|
||||||
|
LastEditedBy: c.LastEditedBy,
|
||||||
|
}
|
||||||
|
case "postgresql":
|
||||||
|
c := content.(postgresql.Content)
|
||||||
|
return ContentItem{
|
||||||
|
ID: c.ID,
|
||||||
|
SiteID: c.SiteID,
|
||||||
|
Value: c.Value,
|
||||||
|
Type: c.Type,
|
||||||
|
CreatedAt: time.Unix(c.CreatedAt, 0),
|
||||||
|
UpdatedAt: time.Unix(c.UpdatedAt, 0),
|
||||||
|
LastEditedBy: c.LastEditedBy,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ContentItem{} // Should never happen
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *ContentHandler) convertToAPIContentList(contentList interface{}) []ContentItem {
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
list := contentList.([]sqlite.Content)
|
||||||
|
items := make([]ContentItem, len(list))
|
||||||
|
for i, content := range list {
|
||||||
|
items[i] = h.convertToAPIContent(content)
|
||||||
|
}
|
||||||
|
return items
|
||||||
|
case "postgresql":
|
||||||
|
list := contentList.([]postgresql.Content)
|
||||||
|
items := make([]ContentItem, len(list))
|
||||||
|
for i, content := range list {
|
||||||
|
items[i] = h.convertToAPIContent(content)
|
||||||
|
}
|
||||||
|
return items
|
||||||
|
}
|
||||||
|
return []ContentItem{} // Should never happen
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *ContentHandler) convertToAPIVersionList(versionList interface{}) []ContentVersion {
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
list := versionList.([]sqlite.ContentVersion)
|
||||||
|
versions := make([]ContentVersion, len(list))
|
||||||
|
for i, version := range list {
|
||||||
|
versions[i] = ContentVersion{
|
||||||
|
VersionID: version.VersionID,
|
||||||
|
ContentID: version.ContentID,
|
||||||
|
SiteID: version.SiteID,
|
||||||
|
Value: version.Value,
|
||||||
|
Type: version.Type,
|
||||||
|
CreatedAt: time.Unix(version.CreatedAt, 0),
|
||||||
|
CreatedBy: version.CreatedBy,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return versions
|
||||||
|
case "postgresql":
|
||||||
|
list := versionList.([]postgresql.ContentVersion)
|
||||||
|
versions := make([]ContentVersion, len(list))
|
||||||
|
for i, version := range list {
|
||||||
|
versions[i] = ContentVersion{
|
||||||
|
VersionID: int64(version.VersionID),
|
||||||
|
ContentID: version.ContentID,
|
||||||
|
SiteID: version.SiteID,
|
||||||
|
Value: version.Value,
|
||||||
|
Type: version.Type,
|
||||||
|
CreatedAt: time.Unix(version.CreatedAt, 0),
|
||||||
|
CreatedBy: version.CreatedBy,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return versions
|
||||||
|
}
|
||||||
|
return []ContentVersion{} // Should never happen
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *ContentHandler) createContentVersion(content interface{}) error {
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
c := content.(sqlite.Content)
|
||||||
|
return h.database.GetSQLiteQueries().CreateContentVersion(context.Background(), sqlite.CreateContentVersionParams{
|
||||||
|
ContentID: c.ID,
|
||||||
|
SiteID: c.SiteID,
|
||||||
|
Value: c.Value,
|
||||||
|
Type: c.Type,
|
||||||
|
CreatedBy: c.LastEditedBy,
|
||||||
|
})
|
||||||
|
case "postgresql":
|
||||||
|
c := content.(postgresql.Content)
|
||||||
|
return h.database.GetPostgreSQLQueries().CreateContentVersion(context.Background(), postgresql.CreateContentVersionParams{
|
||||||
|
ContentID: c.ID,
|
||||||
|
SiteID: c.SiteID,
|
||||||
|
Value: c.Value,
|
||||||
|
Type: c.Type,
|
||||||
|
CreatedBy: c.LastEditedBy,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return fmt.Errorf("unsupported database type")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *ContentHandler) getContentType(content interface{}) string {
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
return content.(sqlite.Content).Type
|
||||||
|
case "postgresql":
|
||||||
|
return content.(postgresql.Content).Type
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *ContentHandler) versionMatches(version interface{}, contentID, siteID string) bool {
|
||||||
|
switch h.database.GetDBType() {
|
||||||
|
case "sqlite3":
|
||||||
|
v := version.(sqlite.ContentVersion)
|
||||||
|
return v.ContentID == contentID && v.SiteID == siteID
|
||||||
|
case "postgresql":
|
||||||
|
v := version.(postgresql.ContentVersion)
|
||||||
|
return v.ContentID == contentID && v.SiteID == siteID
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
127
internal/api/middleware.go
Normal file
127
internal/api/middleware.go
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CORSMiddleware adds CORS headers to enable browser requests
|
||||||
|
func CORSMiddleware(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
origin := r.Header.Get("Origin")
|
||||||
|
|
||||||
|
// Allow localhost and 127.0.0.1 on common development ports
|
||||||
|
allowedOrigins := []string{
|
||||||
|
"http://localhost:3000",
|
||||||
|
"http://127.0.0.1:3000",
|
||||||
|
"http://localhost:8080",
|
||||||
|
"http://127.0.0.1:8080",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if origin is allowed
|
||||||
|
originAllowed := false
|
||||||
|
for _, allowed := range allowedOrigins {
|
||||||
|
if origin == allowed {
|
||||||
|
originAllowed = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if originAllowed {
|
||||||
|
w.Header().Set("Access-Control-Allow-Origin", origin)
|
||||||
|
} else {
|
||||||
|
// Fallback to wildcard for development (can be restricted in production)
|
||||||
|
w.Header().Set("Access-Control-Allow-Origin", "*")
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS")
|
||||||
|
w.Header().Set("Access-Control-Allow-Headers", "Content-Type, Authorization")
|
||||||
|
w.Header().Set("Access-Control-Allow-Credentials", "true")
|
||||||
|
|
||||||
|
// Note: Explicit OPTIONS handling is done via routes, not here
|
||||||
|
next.ServeHTTP(w, r)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoggingMiddleware logs HTTP requests
|
||||||
|
func LoggingMiddleware(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
start := time.Now()
|
||||||
|
|
||||||
|
// Create a response writer wrapper to capture status code
|
||||||
|
wrapper := &responseWriter{ResponseWriter: w, statusCode: http.StatusOK}
|
||||||
|
|
||||||
|
next.ServeHTTP(wrapper, r)
|
||||||
|
|
||||||
|
log.Printf("%s %s %d %v", r.Method, r.URL.Path, wrapper.statusCode, time.Since(start))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// responseWriter wraps http.ResponseWriter to capture status code
|
||||||
|
type responseWriter struct {
|
||||||
|
http.ResponseWriter
|
||||||
|
statusCode int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rw *responseWriter) WriteHeader(code int) {
|
||||||
|
rw.statusCode = code
|
||||||
|
rw.ResponseWriter.WriteHeader(code)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ContentTypeMiddleware ensures JSON responses have proper content type
|
||||||
|
func ContentTypeMiddleware(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
// Set default content type for API responses
|
||||||
|
if r.URL.Path != "/" && (r.Method == "GET" || r.Method == "POST" || r.Method == "PUT") {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
}
|
||||||
|
|
||||||
|
next.ServeHTTP(w, r)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// HealthMiddleware provides a simple health check endpoint
|
||||||
|
func HealthMiddleware() http.HandlerFunc {
|
||||||
|
return func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
w.WriteHeader(http.StatusOK)
|
||||||
|
w.Write([]byte(`{"status":"healthy","service":"insertr-server"}`))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CORSPreflightHandler handles CORS preflight requests (OPTIONS)
|
||||||
|
func CORSPreflightHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
origin := r.Header.Get("Origin")
|
||||||
|
|
||||||
|
// Allow localhost and 127.0.0.1 on common development ports
|
||||||
|
allowedOrigins := []string{
|
||||||
|
"http://localhost:3000",
|
||||||
|
"http://127.0.0.1:3000",
|
||||||
|
"http://localhost:8080",
|
||||||
|
"http://127.0.0.1:8080",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if origin is allowed
|
||||||
|
originAllowed := false
|
||||||
|
for _, allowed := range allowedOrigins {
|
||||||
|
if origin == allowed {
|
||||||
|
originAllowed = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if originAllowed {
|
||||||
|
w.Header().Set("Access-Control-Allow-Origin", origin)
|
||||||
|
} else {
|
||||||
|
// Fallback to wildcard for development
|
||||||
|
w.Header().Set("Access-Control-Allow-Origin", "*")
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS")
|
||||||
|
w.Header().Set("Access-Control-Allow-Headers", "Content-Type, Authorization")
|
||||||
|
w.Header().Set("Access-Control-Allow-Credentials", "true")
|
||||||
|
w.Header().Set("Access-Control-Max-Age", "86400") // Cache preflight for 24 hours
|
||||||
|
|
||||||
|
w.WriteHeader(http.StatusOK)
|
||||||
|
}
|
||||||
52
internal/api/models.go
Normal file
52
internal/api/models.go
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import "time"
|
||||||
|
|
||||||
|
// API request/response models
|
||||||
|
type ContentItem struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
LastEditedBy string `json:"last_edited_by"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ContentVersion struct {
|
||||||
|
VersionID int64 `json:"version_id"`
|
||||||
|
ContentID string `json:"content_id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
CreatedBy string `json:"created_by"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ContentResponse struct {
|
||||||
|
Content []ContentItem `json:"content"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ContentVersionsResponse struct {
|
||||||
|
Versions []ContentVersion `json:"versions"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Request models
|
||||||
|
type CreateContentRequest struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
SiteID string `json:"site_id,omitempty"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
CreatedBy string `json:"created_by,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UpdateContentRequest struct {
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type,omitempty"`
|
||||||
|
UpdatedBy string `json:"updated_by,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RollbackContentRequest struct {
|
||||||
|
VersionID int64 `json:"version_id"`
|
||||||
|
RolledBackBy string `json:"rolled_back_by,omitempty"`
|
||||||
|
}
|
||||||
164
internal/content/client.go
Normal file
164
internal/content/client.go
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
package content
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HTTPClient implements ContentClient for HTTP API access
|
||||||
|
type HTTPClient struct {
|
||||||
|
BaseURL string
|
||||||
|
APIKey string
|
||||||
|
HTTPClient *http.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewHTTPClient creates a new HTTP content client
|
||||||
|
func NewHTTPClient(baseURL, apiKey string) *HTTPClient {
|
||||||
|
return &HTTPClient{
|
||||||
|
BaseURL: strings.TrimSuffix(baseURL, "/"),
|
||||||
|
APIKey: apiKey,
|
||||||
|
HTTPClient: &http.Client{
|
||||||
|
Timeout: 30 * time.Second,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetContent fetches a single content item by ID
|
||||||
|
func (c *HTTPClient) GetContent(siteID, contentID string) (*ContentItem, error) {
|
||||||
|
url := fmt.Sprintf("%s/api/content/%s?site_id=%s", c.BaseURL, contentID, siteID)
|
||||||
|
|
||||||
|
req, err := http.NewRequest("GET", url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("creating request: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.APIKey != "" {
|
||||||
|
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := c.HTTPClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("making request: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode == 404 {
|
||||||
|
return nil, nil // Content not found, return nil without error
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != 200 {
|
||||||
|
return nil, fmt.Errorf("API error: %s", resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("reading response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var item ContentItem
|
||||||
|
if err := json.Unmarshal(body, &item); err != nil {
|
||||||
|
return nil, fmt.Errorf("parsing response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetBulkContent fetches multiple content items by IDs
|
||||||
|
func (c *HTTPClient) GetBulkContent(siteID string, contentIDs []string) (map[string]ContentItem, error) {
|
||||||
|
if len(contentIDs) == 0 {
|
||||||
|
return make(map[string]ContentItem), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build query parameters
|
||||||
|
params := url.Values{}
|
||||||
|
params.Set("site_id", siteID)
|
||||||
|
for _, id := range contentIDs {
|
||||||
|
params.Add("ids", id)
|
||||||
|
}
|
||||||
|
|
||||||
|
url := fmt.Sprintf("%s/api/content/bulk?%s", c.BaseURL, params.Encode())
|
||||||
|
|
||||||
|
req, err := http.NewRequest("GET", url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("creating request: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.APIKey != "" {
|
||||||
|
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := c.HTTPClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("making request: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != 200 {
|
||||||
|
return nil, fmt.Errorf("API error: %s", resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("reading response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var response ContentResponse
|
||||||
|
if err := json.Unmarshal(body, &response); err != nil {
|
||||||
|
return nil, fmt.Errorf("parsing response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert slice to map for easy lookup
|
||||||
|
result := make(map[string]ContentItem)
|
||||||
|
for _, item := range response.Content {
|
||||||
|
result[item.ID] = item
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAllContent fetches all content for a site
|
||||||
|
func (c *HTTPClient) GetAllContent(siteID string) (map[string]ContentItem, error) {
|
||||||
|
url := fmt.Sprintf("%s/api/content?site_id=%s", c.BaseURL, siteID)
|
||||||
|
|
||||||
|
req, err := http.NewRequest("GET", url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("creating request: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.APIKey != "" {
|
||||||
|
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := c.HTTPClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("making request: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != 200 {
|
||||||
|
return nil, fmt.Errorf("API error: %s", resp.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("reading response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var response ContentResponse
|
||||||
|
if err := json.Unmarshal(body, &response); err != nil {
|
||||||
|
return nil, fmt.Errorf("parsing response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert slice to map for easy lookup
|
||||||
|
result := make(map[string]ContentItem)
|
||||||
|
for _, item := range response.Content {
|
||||||
|
result[item.ID] = item
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
216
internal/content/enhancer.go
Normal file
216
internal/content/enhancer.go
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
package content
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/net/html"
|
||||||
|
|
||||||
|
"github.com/insertr/insertr/internal/parser"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Enhancer combines parsing and content injection
|
||||||
|
type Enhancer struct {
|
||||||
|
parser *parser.Parser
|
||||||
|
injector *Injector
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewEnhancer creates a new HTML enhancer
|
||||||
|
func NewEnhancer(client ContentClient, siteID string) *Enhancer {
|
||||||
|
return &Enhancer{
|
||||||
|
parser: parser.New(),
|
||||||
|
injector: NewInjector(client, siteID),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnhanceFile processes an HTML file and injects content
|
||||||
|
func (e *Enhancer) EnhanceFile(inputPath, outputPath string) error {
|
||||||
|
// Use parser to get elements from file
|
||||||
|
result, err := e.parser.ParseDirectory(filepath.Dir(inputPath))
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("parsing file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter elements for this specific file
|
||||||
|
var fileElements []parser.Element
|
||||||
|
inputBaseName := filepath.Base(inputPath)
|
||||||
|
for _, elem := range result.Elements {
|
||||||
|
elemBaseName := filepath.Base(elem.FilePath)
|
||||||
|
if elemBaseName == inputBaseName {
|
||||||
|
fileElements = append(fileElements, elem)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(fileElements) == 0 {
|
||||||
|
// No insertr elements found, copy file as-is
|
||||||
|
return e.copyFile(inputPath, outputPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read and parse HTML for modification
|
||||||
|
htmlContent, err := os.ReadFile(inputPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("reading file %s: %w", inputPath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
doc, err := html.Parse(strings.NewReader(string(htmlContent)))
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("parsing HTML: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find and inject content for each element
|
||||||
|
for _, elem := range fileElements {
|
||||||
|
// Find the node in the parsed document
|
||||||
|
// Note: This is a simplified approach - in production we'd need more robust node matching
|
||||||
|
if err := e.injectElementContent(doc, elem); err != nil {
|
||||||
|
fmt.Printf("⚠️ Warning: failed to inject content for %s: %v\n", elem.ContentID, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Inject editor assets for development
|
||||||
|
libraryScript := GetLibraryScript(false) // Use non-minified for development debugging
|
||||||
|
e.injector.InjectEditorAssets(doc, true, libraryScript)
|
||||||
|
|
||||||
|
// Write enhanced HTML
|
||||||
|
if err := e.writeHTML(doc, outputPath); err != nil {
|
||||||
|
return fmt.Errorf("writing enhanced HTML: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("✅ Enhanced: %s → %s (%d elements)\n",
|
||||||
|
filepath.Base(inputPath),
|
||||||
|
filepath.Base(outputPath),
|
||||||
|
len(fileElements))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// injectElementContent finds and injects content for a specific element
|
||||||
|
func (e *Enhancer) injectElementContent(doc *html.Node, elem parser.Element) error {
|
||||||
|
// Fetch content from database
|
||||||
|
contentItem, err := e.injector.client.GetContent(e.injector.siteID, elem.ContentID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("fetching content: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find nodes with insertr class and inject content
|
||||||
|
e.findAndInjectNodes(doc, elem, contentItem)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// findAndInjectNodes recursively finds nodes and injects content
|
||||||
|
func (e *Enhancer) findAndInjectNodes(node *html.Node, elem parser.Element, contentItem *ContentItem) {
|
||||||
|
if node.Type == html.ElementNode {
|
||||||
|
// Check if this node matches our element criteria
|
||||||
|
classes := getClasses(node)
|
||||||
|
if containsClass(classes, "insertr") && node.Data == elem.Tag {
|
||||||
|
// This might be our target node - inject content
|
||||||
|
e.injector.addContentAttributes(node, elem.ContentID, string(elem.Type))
|
||||||
|
|
||||||
|
if contentItem != nil {
|
||||||
|
switch elem.Type {
|
||||||
|
case parser.ContentText:
|
||||||
|
e.injector.injectTextContent(node, contentItem.Value)
|
||||||
|
case parser.ContentMarkdown:
|
||||||
|
e.injector.injectMarkdownContent(node, contentItem.Value)
|
||||||
|
case parser.ContentLink:
|
||||||
|
e.injector.injectLinkContent(node, contentItem.Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recursively process children
|
||||||
|
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||||
|
e.findAndInjectNodes(child, elem, contentItem)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper functions from parser package
|
||||||
|
func getClasses(node *html.Node) []string {
|
||||||
|
for _, attr := range node.Attr {
|
||||||
|
if attr.Key == "class" {
|
||||||
|
return strings.Fields(attr.Val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func containsClass(classes []string, target string) bool {
|
||||||
|
for _, class := range classes {
|
||||||
|
if class == target {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnhanceDirectory processes all HTML files in a directory
|
||||||
|
func (e *Enhancer) EnhanceDirectory(inputDir, outputDir string) error {
|
||||||
|
// Create output directory
|
||||||
|
if err := os.MkdirAll(outputDir, 0755); err != nil {
|
||||||
|
return fmt.Errorf("creating output directory: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Walk input directory
|
||||||
|
return filepath.Walk(inputDir, func(path string, info os.FileInfo, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate relative path and output path
|
||||||
|
relPath, err := filepath.Rel(inputDir, path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
outputPath := filepath.Join(outputDir, relPath)
|
||||||
|
|
||||||
|
// Handle directories
|
||||||
|
if info.IsDir() {
|
||||||
|
return os.MkdirAll(outputPath, info.Mode())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle HTML files
|
||||||
|
if strings.HasSuffix(strings.ToLower(path), ".html") {
|
||||||
|
return e.EnhanceFile(path, outputPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy other files as-is
|
||||||
|
return e.copyFile(path, outputPath)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// copyFile copies a file from src to dst
|
||||||
|
func (e *Enhancer) copyFile(src, dst string) error {
|
||||||
|
// Create directory for destination
|
||||||
|
if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read source
|
||||||
|
data, err := os.ReadFile(src)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write destination
|
||||||
|
return os.WriteFile(dst, data, 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeHTML writes an HTML document to a file
|
||||||
|
func (e *Enhancer) writeHTML(doc *html.Node, outputPath string) error {
|
||||||
|
// Create directory for output
|
||||||
|
if err := os.MkdirAll(filepath.Dir(outputPath), 0755); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create output file
|
||||||
|
file, err := os.Create(outputPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
// Write HTML
|
||||||
|
return html.Render(file, doc)
|
||||||
|
}
|
||||||
236
internal/content/injector.go
Normal file
236
internal/content/injector.go
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
package content
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/net/html"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Injector handles content injection into HTML elements
|
||||||
|
type Injector struct {
|
||||||
|
client ContentClient
|
||||||
|
siteID string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewInjector creates a new content injector
|
||||||
|
func NewInjector(client ContentClient, siteID string) *Injector {
|
||||||
|
return &Injector{
|
||||||
|
client: client,
|
||||||
|
siteID: siteID,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// InjectContent replaces element content with database values and adds content IDs
|
||||||
|
func (i *Injector) InjectContent(element *Element, contentID string) error {
|
||||||
|
// Fetch content from database/API
|
||||||
|
contentItem, err := i.client.GetContent(i.siteID, contentID)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("fetching content for %s: %w", contentID, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no content found, keep original content but add data attributes
|
||||||
|
if contentItem == nil {
|
||||||
|
i.addContentAttributes(element.Node, contentID, element.Type)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Replace element content based on type
|
||||||
|
switch element.Type {
|
||||||
|
case "text":
|
||||||
|
i.injectTextContent(element.Node, contentItem.Value)
|
||||||
|
case "markdown":
|
||||||
|
i.injectMarkdownContent(element.Node, contentItem.Value)
|
||||||
|
case "link":
|
||||||
|
i.injectLinkContent(element.Node, contentItem.Value)
|
||||||
|
default:
|
||||||
|
i.injectTextContent(element.Node, contentItem.Value)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add data attributes for editor functionality
|
||||||
|
i.addContentAttributes(element.Node, contentID, element.Type)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// InjectBulkContent efficiently injects multiple content items
|
||||||
|
func (i *Injector) InjectBulkContent(elements []ElementWithID) error {
|
||||||
|
// Extract content IDs for bulk fetch
|
||||||
|
contentIDs := make([]string, len(elements))
|
||||||
|
for idx, elem := range elements {
|
||||||
|
contentIDs[idx] = elem.ContentID
|
||||||
|
}
|
||||||
|
|
||||||
|
// Bulk fetch content
|
||||||
|
contentMap, err := i.client.GetBulkContent(i.siteID, contentIDs)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("bulk fetching content: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Inject each element
|
||||||
|
for _, elem := range elements {
|
||||||
|
contentItem, exists := contentMap[elem.ContentID]
|
||||||
|
|
||||||
|
// Add content attributes regardless
|
||||||
|
i.addContentAttributes(elem.Element.Node, elem.ContentID, elem.Element.Type)
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
// Keep original content if not found in database
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Replace content based on type
|
||||||
|
switch elem.Element.Type {
|
||||||
|
case "text":
|
||||||
|
i.injectTextContent(elem.Element.Node, contentItem.Value)
|
||||||
|
case "markdown":
|
||||||
|
i.injectMarkdownContent(elem.Element.Node, contentItem.Value)
|
||||||
|
case "link":
|
||||||
|
i.injectLinkContent(elem.Element.Node, contentItem.Value)
|
||||||
|
default:
|
||||||
|
i.injectTextContent(elem.Element.Node, contentItem.Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// injectTextContent replaces text content in an element
|
||||||
|
func (i *Injector) injectTextContent(node *html.Node, content string) {
|
||||||
|
// Remove all child nodes
|
||||||
|
for child := node.FirstChild; child != nil; {
|
||||||
|
next := child.NextSibling
|
||||||
|
node.RemoveChild(child)
|
||||||
|
child = next
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add new text content
|
||||||
|
textNode := &html.Node{
|
||||||
|
Type: html.TextNode,
|
||||||
|
Data: content,
|
||||||
|
}
|
||||||
|
node.AppendChild(textNode)
|
||||||
|
}
|
||||||
|
|
||||||
|
// injectMarkdownContent handles markdown content (for now, just as text)
|
||||||
|
func (i *Injector) injectMarkdownContent(node *html.Node, content string) {
|
||||||
|
// For now, treat markdown as text content
|
||||||
|
// TODO: Implement markdown to HTML conversion
|
||||||
|
i.injectTextContent(node, content)
|
||||||
|
}
|
||||||
|
|
||||||
|
// injectLinkContent handles link/button content with URL extraction
|
||||||
|
func (i *Injector) injectLinkContent(node *html.Node, content string) {
|
||||||
|
// For now, just inject the text content
|
||||||
|
// TODO: Parse content for URL and text components
|
||||||
|
i.injectTextContent(node, content)
|
||||||
|
}
|
||||||
|
|
||||||
|
// addContentAttributes adds necessary data attributes and insertr class for editor functionality
|
||||||
|
func (i *Injector) addContentAttributes(node *html.Node, contentID string, contentType string) {
|
||||||
|
i.setAttribute(node, "data-content-id", contentID)
|
||||||
|
i.setAttribute(node, "data-content-type", contentType)
|
||||||
|
i.addClass(node, "insertr")
|
||||||
|
}
|
||||||
|
|
||||||
|
// InjectEditorAssets adds editor JavaScript to HTML document
|
||||||
|
func (i *Injector) InjectEditorAssets(doc *html.Node, isDevelopment bool, libraryScript string) {
|
||||||
|
// TODO: Implement script injection strategy when we have CDN hosting
|
||||||
|
// For now, script injection is disabled since HTML files should include their own script tags
|
||||||
|
// Future options:
|
||||||
|
// 1. Inject CDN script tag: <script src="https://cdn.jsdelivr.net/npm/@insertr/lib@1.0.0/dist/insertr.js"></script>
|
||||||
|
// 2. Inject local script tag for development: <script src="/insertr/insertr.js"></script>
|
||||||
|
// 3. Continue with inline injection for certain use cases
|
||||||
|
|
||||||
|
// Currently disabled to avoid duplicate scripts
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// findHeadElement finds the <head> element in the document
|
||||||
|
func (i *Injector) findHeadElement(node *html.Node) *html.Node {
|
||||||
|
if node.Type == html.ElementNode && node.Data == "head" {
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||||
|
if result := i.findHeadElement(child); result != nil {
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// setAttribute safely sets an attribute on an HTML node
|
||||||
|
func (i *Injector) setAttribute(node *html.Node, key, value string) {
|
||||||
|
// Remove existing attribute if present
|
||||||
|
for idx, attr := range node.Attr {
|
||||||
|
if attr.Key == key {
|
||||||
|
node.Attr = append(node.Attr[:idx], node.Attr[idx+1:]...)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add new attribute
|
||||||
|
node.Attr = append(node.Attr, html.Attribute{
|
||||||
|
Key: key,
|
||||||
|
Val: value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// addClass safely adds a class to an HTML node
|
||||||
|
func (i *Injector) addClass(node *html.Node, className string) {
|
||||||
|
var classAttr *html.Attribute
|
||||||
|
var classIndex int = -1
|
||||||
|
|
||||||
|
// Find existing class attribute
|
||||||
|
for idx, attr := range node.Attr {
|
||||||
|
if attr.Key == "class" {
|
||||||
|
classAttr = &attr
|
||||||
|
classIndex = idx
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var classes []string
|
||||||
|
if classAttr != nil {
|
||||||
|
classes = strings.Fields(classAttr.Val)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if class already exists
|
||||||
|
for _, class := range classes {
|
||||||
|
if class == className {
|
||||||
|
return // Class already exists
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add new class
|
||||||
|
classes = append(classes, className)
|
||||||
|
newClassValue := strings.Join(classes, " ")
|
||||||
|
|
||||||
|
if classIndex >= 0 {
|
||||||
|
// Update existing class attribute
|
||||||
|
node.Attr[classIndex].Val = newClassValue
|
||||||
|
} else {
|
||||||
|
// Add new class attribute
|
||||||
|
node.Attr = append(node.Attr, html.Attribute{
|
||||||
|
Key: "class",
|
||||||
|
Val: newClassValue,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Element represents a parsed HTML element with metadata
|
||||||
|
type Element struct {
|
||||||
|
Node *html.Node
|
||||||
|
Type string
|
||||||
|
Tag string
|
||||||
|
Classes []string
|
||||||
|
Content string
|
||||||
|
}
|
||||||
|
|
||||||
|
// ElementWithID combines an element with its generated content ID
|
||||||
|
type ElementWithID struct {
|
||||||
|
Element *Element
|
||||||
|
ContentID string
|
||||||
|
}
|
||||||
50
internal/content/library.go
Normal file
50
internal/content/library.go
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
package content
|
||||||
|
|
||||||
|
import (
|
||||||
|
_ "embed"
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Embedded library assets
|
||||||
|
//
|
||||||
|
//go:embed assets/insertr.min.js
|
||||||
|
var libraryMinJS string
|
||||||
|
|
||||||
|
//go:embed assets/insertr.js
|
||||||
|
var libraryJS string
|
||||||
|
|
||||||
|
// GetLibraryScript returns the appropriate library version
|
||||||
|
func GetLibraryScript(minified bool) string {
|
||||||
|
if minified {
|
||||||
|
return libraryMinJS
|
||||||
|
}
|
||||||
|
return libraryJS
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetLibraryVersion returns the current embedded library version
|
||||||
|
func GetLibraryVersion() string {
|
||||||
|
return "1.0.0"
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetLibraryURL returns the appropriate library URL for script injection
|
||||||
|
func GetLibraryURL(minified bool, isDevelopment bool) string {
|
||||||
|
if isDevelopment {
|
||||||
|
// Local development URLs - relative to served content
|
||||||
|
if minified {
|
||||||
|
return "/insertr/insertr.min.js"
|
||||||
|
}
|
||||||
|
return "/insertr/insertr.js"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Production URLs - use CDN
|
||||||
|
return GetLibraryCDNURL(minified)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetLibraryCDNURL returns the CDN URL for production use
|
||||||
|
func GetLibraryCDNURL(minified bool) string {
|
||||||
|
version := GetLibraryVersion()
|
||||||
|
if minified {
|
||||||
|
return fmt.Sprintf("https://cdn.jsdelivr.net/npm/@insertr/lib@%s/dist/insertr.min.js", version)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("https://cdn.jsdelivr.net/npm/@insertr/lib@%s/dist/insertr.js", version)
|
||||||
|
}
|
||||||
138
internal/content/mock.go
Normal file
138
internal/content/mock.go
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
package content
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MockClient implements ContentClient with mock data for development
|
||||||
|
type MockClient struct {
|
||||||
|
data map[string]ContentItem
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMockClient creates a new mock content client with sample data
|
||||||
|
func NewMockClient() *MockClient {
|
||||||
|
// Generate realistic mock content based on actual generated IDs
|
||||||
|
data := map[string]ContentItem{
|
||||||
|
// Navigation (index.html has collision suffix)
|
||||||
|
"navbar-logo-2b10ad": {
|
||||||
|
ID: "navbar-logo-2b10ad",
|
||||||
|
SiteID: "demo",
|
||||||
|
Value: "Acme Consulting Solutions",
|
||||||
|
Type: "text",
|
||||||
|
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||||
|
},
|
||||||
|
"navbar-logo-2b10ad-a44bad": {
|
||||||
|
ID: "navbar-logo-2b10ad-a44bad",
|
||||||
|
SiteID: "demo",
|
||||||
|
Value: "Acme Business Advisors",
|
||||||
|
Type: "text",
|
||||||
|
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||||
|
},
|
||||||
|
|
||||||
|
// Hero Section - index.html (updated with actual IDs)
|
||||||
|
"hero-title-7cfeea": {
|
||||||
|
ID: "hero-title-7cfeea",
|
||||||
|
SiteID: "demo",
|
||||||
|
Value: "Transform Your Business with Strategic Expertise",
|
||||||
|
Type: "text",
|
||||||
|
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||||
|
},
|
||||||
|
"hero-lead-e47475": {
|
||||||
|
ID: "hero-lead-e47475",
|
||||||
|
SiteID: "demo",
|
||||||
|
Value: "We help **ambitious businesses** grow through strategic planning, process optimization, and digital transformation. Our team brings 20+ years of experience to accelerate your success.",
|
||||||
|
Type: "markdown",
|
||||||
|
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||||
|
},
|
||||||
|
"hero-link-76c620": {
|
||||||
|
ID: "hero-link-76c620",
|
||||||
|
SiteID: "demo",
|
||||||
|
Value: "Schedule Free Consultation",
|
||||||
|
Type: "link",
|
||||||
|
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||||
|
},
|
||||||
|
|
||||||
|
// Hero Section - about.html
|
||||||
|
"hero-title-c70343": {
|
||||||
|
ID: "hero-title-c70343",
|
||||||
|
SiteID: "demo",
|
||||||
|
Value: "About Our Consulting Expertise",
|
||||||
|
Type: "text",
|
||||||
|
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||||
|
},
|
||||||
|
"hero-lead-673026": {
|
||||||
|
ID: "hero-lead-673026",
|
||||||
|
SiteID: "demo",
|
||||||
|
Value: "We're a team of **experienced consultants** dedicated to helping small businesses thrive in today's competitive marketplace through proven strategies.",
|
||||||
|
Type: "markdown",
|
||||||
|
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||||
|
},
|
||||||
|
|
||||||
|
// Services Section
|
||||||
|
"services-subtitle-c8927c": {
|
||||||
|
ID: "services-subtitle-c8927c",
|
||||||
|
SiteID: "demo",
|
||||||
|
Value: "Our Story",
|
||||||
|
Type: "text",
|
||||||
|
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||||
|
},
|
||||||
|
"services-text-0d96da": {
|
||||||
|
ID: "services-text-0d96da",
|
||||||
|
SiteID: "demo",
|
||||||
|
Value: "**Founded in 2020**, Acme Consulting emerged from a simple observation: small businesses needed access to the same high-quality strategic advice that large corporations receive, but in a format that was accessible, affordable, and actionable.",
|
||||||
|
Type: "markdown",
|
||||||
|
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||||
|
},
|
||||||
|
|
||||||
|
// Default fallback for any missing content
|
||||||
|
"default": {
|
||||||
|
ID: "default",
|
||||||
|
SiteID: "demo",
|
||||||
|
Value: "[Enhanced Content]",
|
||||||
|
Type: "text",
|
||||||
|
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return &MockClient{data: data}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetContent fetches a single content item by ID
|
||||||
|
func (m *MockClient) GetContent(siteID, contentID string) (*ContentItem, error) {
|
||||||
|
if item, exists := m.data[contentID]; exists && item.SiteID == siteID {
|
||||||
|
return &item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return nil for missing content - this will preserve original HTML content
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetBulkContent fetches multiple content items by IDs
|
||||||
|
func (m *MockClient) GetBulkContent(siteID string, contentIDs []string) (map[string]ContentItem, error) {
|
||||||
|
result := make(map[string]ContentItem)
|
||||||
|
|
||||||
|
for _, id := range contentIDs {
|
||||||
|
item, err := m.GetContent(siteID, id)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if item != nil {
|
||||||
|
result[id] = *item
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAllContent fetches all content for a site
|
||||||
|
func (m *MockClient) GetAllContent(siteID string) (map[string]ContentItem, error) {
|
||||||
|
result := make(map[string]ContentItem)
|
||||||
|
|
||||||
|
for _, item := range m.data {
|
||||||
|
if item.SiteID == siteID {
|
||||||
|
result[item.ID] = item
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
28
internal/content/types.go
Normal file
28
internal/content/types.go
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
package content
|
||||||
|
|
||||||
|
// ContentItem represents a piece of content from the database
|
||||||
|
type ContentItem struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
UpdatedAt string `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ContentResponse represents the API response structure
|
||||||
|
type ContentResponse struct {
|
||||||
|
Content []ContentItem `json:"content"`
|
||||||
|
Error string `json:"error,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ContentClient interface for content retrieval
|
||||||
|
type ContentClient interface {
|
||||||
|
// GetContent fetches content by ID
|
||||||
|
GetContent(siteID, contentID string) (*ContentItem, error)
|
||||||
|
|
||||||
|
// GetBulkContent fetches multiple content items by IDs
|
||||||
|
GetBulkContent(siteID string, contentIDs []string) (map[string]ContentItem, error)
|
||||||
|
|
||||||
|
// GetAllContent fetches all content for a site
|
||||||
|
GetAllContent(siteID string) (map[string]ContentItem, error)
|
||||||
|
}
|
||||||
184
internal/db/database.go
Normal file
184
internal/db/database.go
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
package db
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
_ "github.com/lib/pq"
|
||||||
|
_ "github.com/mattn/go-sqlite3"
|
||||||
|
|
||||||
|
"github.com/insertr/insertr/internal/db/postgresql"
|
||||||
|
"github.com/insertr/insertr/internal/db/sqlite"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Database wraps the database connection and queries
|
||||||
|
type Database struct {
|
||||||
|
conn *sql.DB
|
||||||
|
dbType string
|
||||||
|
|
||||||
|
// Type-specific query interfaces
|
||||||
|
sqliteQueries *sqlite.Queries
|
||||||
|
postgresqlQueries *postgresql.Queries
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDatabase creates a new database connection
|
||||||
|
func NewDatabase(dbPath string) (*Database, error) {
|
||||||
|
var conn *sql.DB
|
||||||
|
var dbType string
|
||||||
|
var err error
|
||||||
|
|
||||||
|
// Determine database type from connection string
|
||||||
|
if strings.Contains(dbPath, "postgres://") || strings.Contains(dbPath, "postgresql://") {
|
||||||
|
dbType = "postgresql"
|
||||||
|
conn, err = sql.Open("postgres", dbPath)
|
||||||
|
} else {
|
||||||
|
dbType = "sqlite3"
|
||||||
|
conn, err = sql.Open("sqlite3", dbPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to open database: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test connection
|
||||||
|
if err := conn.Ping(); err != nil {
|
||||||
|
conn.Close()
|
||||||
|
return nil, fmt.Errorf("failed to ping database: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize the appropriate queries
|
||||||
|
db := &Database{
|
||||||
|
conn: conn,
|
||||||
|
dbType: dbType,
|
||||||
|
}
|
||||||
|
|
||||||
|
switch dbType {
|
||||||
|
case "sqlite3":
|
||||||
|
// Initialize SQLite schema using generated functions
|
||||||
|
db.sqliteQueries = sqlite.New(conn)
|
||||||
|
if err := db.initializeSQLiteSchema(); err != nil {
|
||||||
|
conn.Close()
|
||||||
|
return nil, fmt.Errorf("failed to initialize SQLite schema: %w", err)
|
||||||
|
}
|
||||||
|
case "postgresql":
|
||||||
|
// Initialize PostgreSQL schema using generated functions
|
||||||
|
db.postgresqlQueries = postgresql.New(conn)
|
||||||
|
if err := db.initializePostgreSQLSchema(); err != nil {
|
||||||
|
conn.Close()
|
||||||
|
return nil, fmt.Errorf("failed to initialize PostgreSQL schema: %w", err)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unsupported database type: %s", dbType)
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close closes the database connection
|
||||||
|
func (db *Database) Close() error {
|
||||||
|
return db.conn.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetQueries returns the appropriate query interface
|
||||||
|
func (db *Database) GetSQLiteQueries() *sqlite.Queries {
|
||||||
|
return db.sqliteQueries
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *Database) GetPostgreSQLQueries() *postgresql.Queries {
|
||||||
|
return db.postgresqlQueries
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetDBType returns the database type
|
||||||
|
func (db *Database) GetDBType() string {
|
||||||
|
return db.dbType
|
||||||
|
}
|
||||||
|
|
||||||
|
// initializeSQLiteSchema sets up the SQLite database schema
|
||||||
|
func (db *Database) initializeSQLiteSchema() error {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
// Create tables
|
||||||
|
if err := db.sqliteQueries.InitializeSchema(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to create content table: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := db.sqliteQueries.InitializeVersionsTable(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to create content_versions table: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create indexes manually (sqlc doesn't generate CREATE INDEX functions for SQLite)
|
||||||
|
indexQueries := []string{
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_content_site_id ON content(site_id);",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_content_updated_at ON content(updated_at);",
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_content_versions_lookup ON content_versions(content_id, site_id, created_at DESC);",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, query := range indexQueries {
|
||||||
|
if _, err := db.conn.Exec(query); err != nil {
|
||||||
|
return fmt.Errorf("failed to create index: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create update trigger manually (sqlc doesn't generate trigger creation functions)
|
||||||
|
triggerQuery := `
|
||||||
|
CREATE TRIGGER IF NOT EXISTS update_content_updated_at
|
||||||
|
AFTER UPDATE ON content
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
UPDATE content SET updated_at = strftime('%s', 'now') WHERE id = NEW.id AND site_id = NEW.site_id;
|
||||||
|
END;`
|
||||||
|
|
||||||
|
if _, err := db.conn.Exec(triggerQuery); err != nil {
|
||||||
|
return fmt.Errorf("failed to create update trigger: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// initializePostgreSQLSchema sets up the PostgreSQL database schema
|
||||||
|
func (db *Database) initializePostgreSQLSchema() error {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
// Create tables using sqlc-generated functions
|
||||||
|
if err := db.postgresqlQueries.InitializeSchema(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to create content table: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := db.postgresqlQueries.InitializeVersionsTable(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to create content_versions table: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create indexes using sqlc-generated functions (PostgreSQL supports this)
|
||||||
|
if err := db.postgresqlQueries.CreateContentSiteIndex(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to create content site index: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := db.postgresqlQueries.CreateContentUpdatedAtIndex(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to create content updated_at index: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := db.postgresqlQueries.CreateVersionsLookupIndex(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to create versions lookup index: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create update function using sqlc-generated function
|
||||||
|
if err := db.postgresqlQueries.CreateUpdateFunction(ctx); err != nil {
|
||||||
|
return fmt.Errorf("failed to create update function: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create trigger manually (sqlc doesn't generate trigger creation functions)
|
||||||
|
triggerQuery := `
|
||||||
|
DROP TRIGGER IF EXISTS update_content_updated_at ON content;
|
||||||
|
CREATE TRIGGER update_content_updated_at
|
||||||
|
BEFORE UPDATE ON content
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION update_content_timestamp();`
|
||||||
|
|
||||||
|
if _, err := db.conn.Exec(triggerQuery); err != nil {
|
||||||
|
return fmt.Errorf("failed to create update trigger: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
214
internal/db/postgresql/content.sql.go
Normal file
214
internal/db/postgresql/content.sql.go
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// sqlc v1.29.0
|
||||||
|
// source: content.sql
|
||||||
|
|
||||||
|
package postgresql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
const createContent = `-- name: CreateContent :one
|
||||||
|
INSERT INTO content (id, site_id, value, type, last_edited_by)
|
||||||
|
VALUES ($1, $2, $3, $4, $5)
|
||||||
|
RETURNING id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||||
|
`
|
||||||
|
|
||||||
|
type CreateContentParams struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
LastEditedBy string `json:"last_edited_by"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) CreateContent(ctx context.Context, arg CreateContentParams) (Content, error) {
|
||||||
|
row := q.db.QueryRowContext(ctx, createContent,
|
||||||
|
arg.ID,
|
||||||
|
arg.SiteID,
|
||||||
|
arg.Value,
|
||||||
|
arg.Type,
|
||||||
|
arg.LastEditedBy,
|
||||||
|
)
|
||||||
|
var i Content
|
||||||
|
err := row.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.UpdatedAt,
|
||||||
|
&i.LastEditedBy,
|
||||||
|
)
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
|
|
||||||
|
const deleteContent = `-- name: DeleteContent :exec
|
||||||
|
DELETE FROM content
|
||||||
|
WHERE id = $1 AND site_id = $2
|
||||||
|
`
|
||||||
|
|
||||||
|
type DeleteContentParams struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) DeleteContent(ctx context.Context, arg DeleteContentParams) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, deleteContent, arg.ID, arg.SiteID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const getAllContent = `-- name: GetAllContent :many
|
||||||
|
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||||
|
FROM content
|
||||||
|
WHERE site_id = $1
|
||||||
|
ORDER BY updated_at DESC
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) GetAllContent(ctx context.Context, siteID string) ([]Content, error) {
|
||||||
|
rows, err := q.db.QueryContext(ctx, getAllContent, siteID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []Content
|
||||||
|
for rows.Next() {
|
||||||
|
var i Content
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.UpdatedAt,
|
||||||
|
&i.LastEditedBy,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const getBulkContent = `-- name: GetBulkContent :many
|
||||||
|
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||||
|
FROM content
|
||||||
|
WHERE site_id = $1 AND id IN ($2)
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetBulkContentParams struct {
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Ids []string `json:"ids"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetBulkContent(ctx context.Context, arg GetBulkContentParams) ([]Content, error) {
|
||||||
|
query := getBulkContent
|
||||||
|
var queryParams []interface{}
|
||||||
|
queryParams = append(queryParams, arg.SiteID)
|
||||||
|
if len(arg.Ids) > 0 {
|
||||||
|
for _, v := range arg.Ids {
|
||||||
|
queryParams = append(queryParams, v)
|
||||||
|
}
|
||||||
|
query = strings.Replace(query, "/*SLICE:ids*/?", strings.Repeat(",?", len(arg.Ids))[1:], 1)
|
||||||
|
} else {
|
||||||
|
query = strings.Replace(query, "/*SLICE:ids*/?", "NULL", 1)
|
||||||
|
}
|
||||||
|
rows, err := q.db.QueryContext(ctx, query, queryParams...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []Content
|
||||||
|
for rows.Next() {
|
||||||
|
var i Content
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.UpdatedAt,
|
||||||
|
&i.LastEditedBy,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const getContent = `-- name: GetContent :one
|
||||||
|
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||||
|
FROM content
|
||||||
|
WHERE id = $1 AND site_id = $2
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetContentParams struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetContent(ctx context.Context, arg GetContentParams) (Content, error) {
|
||||||
|
row := q.db.QueryRowContext(ctx, getContent, arg.ID, arg.SiteID)
|
||||||
|
var i Content
|
||||||
|
err := row.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.UpdatedAt,
|
||||||
|
&i.LastEditedBy,
|
||||||
|
)
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateContent = `-- name: UpdateContent :one
|
||||||
|
UPDATE content
|
||||||
|
SET value = $1, type = $2, last_edited_by = $3
|
||||||
|
WHERE id = $4 AND site_id = $5
|
||||||
|
RETURNING id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||||
|
`
|
||||||
|
|
||||||
|
type UpdateContentParams struct {
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
LastEditedBy string `json:"last_edited_by"`
|
||||||
|
ID string `json:"id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) UpdateContent(ctx context.Context, arg UpdateContentParams) (Content, error) {
|
||||||
|
row := q.db.QueryRowContext(ctx, updateContent,
|
||||||
|
arg.Value,
|
||||||
|
arg.Type,
|
||||||
|
arg.LastEditedBy,
|
||||||
|
arg.ID,
|
||||||
|
arg.SiteID,
|
||||||
|
)
|
||||||
|
var i Content
|
||||||
|
err := row.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.UpdatedAt,
|
||||||
|
&i.LastEditedBy,
|
||||||
|
)
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
31
internal/db/postgresql/db.go
Normal file
31
internal/db/postgresql/db.go
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// sqlc v1.29.0
|
||||||
|
|
||||||
|
package postgresql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
type DBTX interface {
|
||||||
|
ExecContext(context.Context, string, ...interface{}) (sql.Result, error)
|
||||||
|
PrepareContext(context.Context, string) (*sql.Stmt, error)
|
||||||
|
QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error)
|
||||||
|
QueryRowContext(context.Context, string, ...interface{}) *sql.Row
|
||||||
|
}
|
||||||
|
|
||||||
|
func New(db DBTX) *Queries {
|
||||||
|
return &Queries{db: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
type Queries struct {
|
||||||
|
db DBTX
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) WithTx(tx *sql.Tx) *Queries {
|
||||||
|
return &Queries{
|
||||||
|
db: tx,
|
||||||
|
}
|
||||||
|
}
|
||||||
25
internal/db/postgresql/models.go
Normal file
25
internal/db/postgresql/models.go
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// sqlc v1.29.0
|
||||||
|
|
||||||
|
package postgresql
|
||||||
|
|
||||||
|
type Content struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
CreatedAt int64 `json:"created_at"`
|
||||||
|
UpdatedAt int64 `json:"updated_at"`
|
||||||
|
LastEditedBy string `json:"last_edited_by"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ContentVersion struct {
|
||||||
|
VersionID int32 `json:"version_id"`
|
||||||
|
ContentID string `json:"content_id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
CreatedAt int64 `json:"created_at"`
|
||||||
|
CreatedBy string `json:"created_by"`
|
||||||
|
}
|
||||||
31
internal/db/postgresql/querier.go
Normal file
31
internal/db/postgresql/querier.go
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// sqlc v1.29.0
|
||||||
|
|
||||||
|
package postgresql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Querier interface {
|
||||||
|
CreateContent(ctx context.Context, arg CreateContentParams) (Content, error)
|
||||||
|
CreateContentSiteIndex(ctx context.Context) error
|
||||||
|
CreateContentUpdatedAtIndex(ctx context.Context) error
|
||||||
|
CreateContentVersion(ctx context.Context, arg CreateContentVersionParams) error
|
||||||
|
CreateUpdateFunction(ctx context.Context) error
|
||||||
|
CreateVersionsLookupIndex(ctx context.Context) error
|
||||||
|
DeleteContent(ctx context.Context, arg DeleteContentParams) error
|
||||||
|
DeleteOldVersions(ctx context.Context, arg DeleteOldVersionsParams) error
|
||||||
|
GetAllContent(ctx context.Context, siteID string) ([]Content, error)
|
||||||
|
GetAllVersionsForSite(ctx context.Context, arg GetAllVersionsForSiteParams) ([]GetAllVersionsForSiteRow, error)
|
||||||
|
GetBulkContent(ctx context.Context, arg GetBulkContentParams) ([]Content, error)
|
||||||
|
GetContent(ctx context.Context, arg GetContentParams) (Content, error)
|
||||||
|
GetContentVersion(ctx context.Context, versionID int32) (ContentVersion, error)
|
||||||
|
GetContentVersionHistory(ctx context.Context, arg GetContentVersionHistoryParams) ([]ContentVersion, error)
|
||||||
|
InitializeSchema(ctx context.Context) error
|
||||||
|
InitializeVersionsTable(ctx context.Context) error
|
||||||
|
UpdateContent(ctx context.Context, arg UpdateContentParams) (Content, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ Querier = (*Queries)(nil)
|
||||||
87
internal/db/postgresql/setup.sql.go
Normal file
87
internal/db/postgresql/setup.sql.go
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// sqlc v1.29.0
|
||||||
|
// source: setup.sql
|
||||||
|
|
||||||
|
package postgresql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
)
|
||||||
|
|
||||||
|
const createContentSiteIndex = `-- name: CreateContentSiteIndex :exec
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_site_id ON content(site_id)
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) CreateContentSiteIndex(ctx context.Context) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, createContentSiteIndex)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const createContentUpdatedAtIndex = `-- name: CreateContentUpdatedAtIndex :exec
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_updated_at ON content(updated_at)
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) CreateContentUpdatedAtIndex(ctx context.Context) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, createContentUpdatedAtIndex)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const createUpdateFunction = `-- name: CreateUpdateFunction :exec
|
||||||
|
CREATE OR REPLACE FUNCTION update_content_timestamp()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
NEW.updated_at = EXTRACT(EPOCH FROM NOW());
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) CreateUpdateFunction(ctx context.Context) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, createUpdateFunction)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const createVersionsLookupIndex = `-- name: CreateVersionsLookupIndex :exec
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_content_versions_lookup ON content_versions(content_id, site_id, created_at DESC)
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) CreateVersionsLookupIndex(ctx context.Context) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, createVersionsLookupIndex)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const initializeSchema = `-- name: InitializeSchema :exec
|
||||||
|
CREATE TABLE IF NOT EXISTS content (
|
||||||
|
id TEXT NOT NULL,
|
||||||
|
site_id TEXT NOT NULL,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
type TEXT NOT NULL CHECK (type IN ('text', 'markdown', 'link')),
|
||||||
|
created_at BIGINT DEFAULT (EXTRACT(EPOCH FROM NOW())) NOT NULL,
|
||||||
|
updated_at BIGINT DEFAULT (EXTRACT(EPOCH FROM NOW())) NOT NULL,
|
||||||
|
last_edited_by TEXT DEFAULT 'system' NOT NULL,
|
||||||
|
PRIMARY KEY (id, site_id)
|
||||||
|
)
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) InitializeSchema(ctx context.Context) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, initializeSchema)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const initializeVersionsTable = `-- name: InitializeVersionsTable :exec
|
||||||
|
CREATE TABLE IF NOT EXISTS content_versions (
|
||||||
|
version_id SERIAL PRIMARY KEY,
|
||||||
|
content_id TEXT NOT NULL,
|
||||||
|
site_id TEXT NOT NULL,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
created_at BIGINT DEFAULT (EXTRACT(EPOCH FROM NOW())) NOT NULL,
|
||||||
|
created_by TEXT DEFAULT 'system' NOT NULL
|
||||||
|
)
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) InitializeVersionsTable(ctx context.Context) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, initializeVersionsTable)
|
||||||
|
return err
|
||||||
|
}
|
||||||
175
internal/db/postgresql/versions.sql.go
Normal file
175
internal/db/postgresql/versions.sql.go
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// sqlc v1.29.0
|
||||||
|
// source: versions.sql
|
||||||
|
|
||||||
|
package postgresql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
const createContentVersion = `-- name: CreateContentVersion :exec
|
||||||
|
INSERT INTO content_versions (content_id, site_id, value, type, created_by)
|
||||||
|
VALUES ($1, $2, $3, $4, $5)
|
||||||
|
`
|
||||||
|
|
||||||
|
type CreateContentVersionParams struct {
|
||||||
|
ContentID string `json:"content_id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
CreatedBy string `json:"created_by"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) CreateContentVersion(ctx context.Context, arg CreateContentVersionParams) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, createContentVersion,
|
||||||
|
arg.ContentID,
|
||||||
|
arg.SiteID,
|
||||||
|
arg.Value,
|
||||||
|
arg.Type,
|
||||||
|
arg.CreatedBy,
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const deleteOldVersions = `-- name: DeleteOldVersions :exec
|
||||||
|
DELETE FROM content_versions
|
||||||
|
WHERE created_at < $1 AND site_id = $2
|
||||||
|
`
|
||||||
|
|
||||||
|
type DeleteOldVersionsParams struct {
|
||||||
|
CreatedBefore int64 `json:"created_before"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) DeleteOldVersions(ctx context.Context, arg DeleteOldVersionsParams) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, deleteOldVersions, arg.CreatedBefore, arg.SiteID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const getAllVersionsForSite = `-- name: GetAllVersionsForSite :many
|
||||||
|
SELECT
|
||||||
|
cv.version_id, cv.content_id, cv.site_id, cv.value, cv.type, cv.created_at, cv.created_by,
|
||||||
|
c.value as current_value
|
||||||
|
FROM content_versions cv
|
||||||
|
LEFT JOIN content c ON cv.content_id = c.id AND cv.site_id = c.site_id
|
||||||
|
WHERE cv.site_id = $1
|
||||||
|
ORDER BY cv.created_at DESC
|
||||||
|
LIMIT $2
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetAllVersionsForSiteParams struct {
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
LimitCount int32 `json:"limit_count"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetAllVersionsForSiteRow struct {
|
||||||
|
VersionID int32 `json:"version_id"`
|
||||||
|
ContentID string `json:"content_id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
CreatedAt int64 `json:"created_at"`
|
||||||
|
CreatedBy string `json:"created_by"`
|
||||||
|
CurrentValue sql.NullString `json:"current_value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetAllVersionsForSite(ctx context.Context, arg GetAllVersionsForSiteParams) ([]GetAllVersionsForSiteRow, error) {
|
||||||
|
rows, err := q.db.QueryContext(ctx, getAllVersionsForSite, arg.SiteID, arg.LimitCount)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []GetAllVersionsForSiteRow
|
||||||
|
for rows.Next() {
|
||||||
|
var i GetAllVersionsForSiteRow
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.VersionID,
|
||||||
|
&i.ContentID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.CreatedBy,
|
||||||
|
&i.CurrentValue,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const getContentVersion = `-- name: GetContentVersion :one
|
||||||
|
SELECT version_id, content_id, site_id, value, type, created_at, created_by
|
||||||
|
FROM content_versions
|
||||||
|
WHERE version_id = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) GetContentVersion(ctx context.Context, versionID int32) (ContentVersion, error) {
|
||||||
|
row := q.db.QueryRowContext(ctx, getContentVersion, versionID)
|
||||||
|
var i ContentVersion
|
||||||
|
err := row.Scan(
|
||||||
|
&i.VersionID,
|
||||||
|
&i.ContentID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.CreatedBy,
|
||||||
|
)
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
|
|
||||||
|
const getContentVersionHistory = `-- name: GetContentVersionHistory :many
|
||||||
|
SELECT version_id, content_id, site_id, value, type, created_at, created_by
|
||||||
|
FROM content_versions
|
||||||
|
WHERE content_id = $1 AND site_id = $2
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT $3
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetContentVersionHistoryParams struct {
|
||||||
|
ContentID string `json:"content_id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
LimitCount int32 `json:"limit_count"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetContentVersionHistory(ctx context.Context, arg GetContentVersionHistoryParams) ([]ContentVersion, error) {
|
||||||
|
rows, err := q.db.QueryContext(ctx, getContentVersionHistory, arg.ContentID, arg.SiteID, arg.LimitCount)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []ContentVersion
|
||||||
|
for rows.Next() {
|
||||||
|
var i ContentVersion
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.VersionID,
|
||||||
|
&i.ContentID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.CreatedBy,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
214
internal/db/sqlite/content.sql.go
Normal file
214
internal/db/sqlite/content.sql.go
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// sqlc v1.29.0
|
||||||
|
// source: content.sql
|
||||||
|
|
||||||
|
package sqlite
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
const createContent = `-- name: CreateContent :one
|
||||||
|
INSERT INTO content (id, site_id, value, type, last_edited_by)
|
||||||
|
VALUES (?1, ?2, ?3, ?4, ?5)
|
||||||
|
RETURNING id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||||
|
`
|
||||||
|
|
||||||
|
type CreateContentParams struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
LastEditedBy string `json:"last_edited_by"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) CreateContent(ctx context.Context, arg CreateContentParams) (Content, error) {
|
||||||
|
row := q.db.QueryRowContext(ctx, createContent,
|
||||||
|
arg.ID,
|
||||||
|
arg.SiteID,
|
||||||
|
arg.Value,
|
||||||
|
arg.Type,
|
||||||
|
arg.LastEditedBy,
|
||||||
|
)
|
||||||
|
var i Content
|
||||||
|
err := row.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.UpdatedAt,
|
||||||
|
&i.LastEditedBy,
|
||||||
|
)
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
|
|
||||||
|
const deleteContent = `-- name: DeleteContent :exec
|
||||||
|
DELETE FROM content
|
||||||
|
WHERE id = ?1 AND site_id = ?2
|
||||||
|
`
|
||||||
|
|
||||||
|
type DeleteContentParams struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) DeleteContent(ctx context.Context, arg DeleteContentParams) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, deleteContent, arg.ID, arg.SiteID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const getAllContent = `-- name: GetAllContent :many
|
||||||
|
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||||
|
FROM content
|
||||||
|
WHERE site_id = ?1
|
||||||
|
ORDER BY updated_at DESC
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) GetAllContent(ctx context.Context, siteID string) ([]Content, error) {
|
||||||
|
rows, err := q.db.QueryContext(ctx, getAllContent, siteID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []Content
|
||||||
|
for rows.Next() {
|
||||||
|
var i Content
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.UpdatedAt,
|
||||||
|
&i.LastEditedBy,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const getBulkContent = `-- name: GetBulkContent :many
|
||||||
|
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||||
|
FROM content
|
||||||
|
WHERE site_id = ?1 AND id IN (/*SLICE:ids*/?)
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetBulkContentParams struct {
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Ids []string `json:"ids"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetBulkContent(ctx context.Context, arg GetBulkContentParams) ([]Content, error) {
|
||||||
|
query := getBulkContent
|
||||||
|
var queryParams []interface{}
|
||||||
|
queryParams = append(queryParams, arg.SiteID)
|
||||||
|
if len(arg.Ids) > 0 {
|
||||||
|
for _, v := range arg.Ids {
|
||||||
|
queryParams = append(queryParams, v)
|
||||||
|
}
|
||||||
|
query = strings.Replace(query, "/*SLICE:ids*/?", strings.Repeat(",?", len(arg.Ids))[1:], 1)
|
||||||
|
} else {
|
||||||
|
query = strings.Replace(query, "/*SLICE:ids*/?", "NULL", 1)
|
||||||
|
}
|
||||||
|
rows, err := q.db.QueryContext(ctx, query, queryParams...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []Content
|
||||||
|
for rows.Next() {
|
||||||
|
var i Content
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.UpdatedAt,
|
||||||
|
&i.LastEditedBy,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const getContent = `-- name: GetContent :one
|
||||||
|
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||||
|
FROM content
|
||||||
|
WHERE id = ?1 AND site_id = ?2
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetContentParams struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetContent(ctx context.Context, arg GetContentParams) (Content, error) {
|
||||||
|
row := q.db.QueryRowContext(ctx, getContent, arg.ID, arg.SiteID)
|
||||||
|
var i Content
|
||||||
|
err := row.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.UpdatedAt,
|
||||||
|
&i.LastEditedBy,
|
||||||
|
)
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateContent = `-- name: UpdateContent :one
|
||||||
|
UPDATE content
|
||||||
|
SET value = ?1, type = ?2, last_edited_by = ?3
|
||||||
|
WHERE id = ?4 AND site_id = ?5
|
||||||
|
RETURNING id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||||
|
`
|
||||||
|
|
||||||
|
type UpdateContentParams struct {
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
LastEditedBy string `json:"last_edited_by"`
|
||||||
|
ID string `json:"id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) UpdateContent(ctx context.Context, arg UpdateContentParams) (Content, error) {
|
||||||
|
row := q.db.QueryRowContext(ctx, updateContent,
|
||||||
|
arg.Value,
|
||||||
|
arg.Type,
|
||||||
|
arg.LastEditedBy,
|
||||||
|
arg.ID,
|
||||||
|
arg.SiteID,
|
||||||
|
)
|
||||||
|
var i Content
|
||||||
|
err := row.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.UpdatedAt,
|
||||||
|
&i.LastEditedBy,
|
||||||
|
)
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
31
internal/db/sqlite/db.go
Normal file
31
internal/db/sqlite/db.go
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// sqlc v1.29.0
|
||||||
|
|
||||||
|
package sqlite
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
type DBTX interface {
|
||||||
|
ExecContext(context.Context, string, ...interface{}) (sql.Result, error)
|
||||||
|
PrepareContext(context.Context, string) (*sql.Stmt, error)
|
||||||
|
QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error)
|
||||||
|
QueryRowContext(context.Context, string, ...interface{}) *sql.Row
|
||||||
|
}
|
||||||
|
|
||||||
|
func New(db DBTX) *Queries {
|
||||||
|
return &Queries{db: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
type Queries struct {
|
||||||
|
db DBTX
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) WithTx(tx *sql.Tx) *Queries {
|
||||||
|
return &Queries{
|
||||||
|
db: tx,
|
||||||
|
}
|
||||||
|
}
|
||||||
25
internal/db/sqlite/models.go
Normal file
25
internal/db/sqlite/models.go
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// sqlc v1.29.0
|
||||||
|
|
||||||
|
package sqlite
|
||||||
|
|
||||||
|
type Content struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
CreatedAt int64 `json:"created_at"`
|
||||||
|
UpdatedAt int64 `json:"updated_at"`
|
||||||
|
LastEditedBy string `json:"last_edited_by"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ContentVersion struct {
|
||||||
|
VersionID int64 `json:"version_id"`
|
||||||
|
ContentID string `json:"content_id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
CreatedAt int64 `json:"created_at"`
|
||||||
|
CreatedBy string `json:"created_by"`
|
||||||
|
}
|
||||||
27
internal/db/sqlite/querier.go
Normal file
27
internal/db/sqlite/querier.go
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// sqlc v1.29.0
|
||||||
|
|
||||||
|
package sqlite
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Querier interface {
|
||||||
|
CreateContent(ctx context.Context, arg CreateContentParams) (Content, error)
|
||||||
|
CreateContentVersion(ctx context.Context, arg CreateContentVersionParams) error
|
||||||
|
DeleteContent(ctx context.Context, arg DeleteContentParams) error
|
||||||
|
DeleteOldVersions(ctx context.Context, arg DeleteOldVersionsParams) error
|
||||||
|
GetAllContent(ctx context.Context, siteID string) ([]Content, error)
|
||||||
|
GetAllVersionsForSite(ctx context.Context, arg GetAllVersionsForSiteParams) ([]GetAllVersionsForSiteRow, error)
|
||||||
|
GetBulkContent(ctx context.Context, arg GetBulkContentParams) ([]Content, error)
|
||||||
|
GetContent(ctx context.Context, arg GetContentParams) (Content, error)
|
||||||
|
GetContentVersion(ctx context.Context, versionID int64) (ContentVersion, error)
|
||||||
|
GetContentVersionHistory(ctx context.Context, arg GetContentVersionHistoryParams) ([]ContentVersion, error)
|
||||||
|
InitializeSchema(ctx context.Context) error
|
||||||
|
InitializeVersionsTable(ctx context.Context) error
|
||||||
|
UpdateContent(ctx context.Context, arg UpdateContentParams) (Content, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ Querier = (*Queries)(nil)
|
||||||
45
internal/db/sqlite/setup.sql.go
Normal file
45
internal/db/sqlite/setup.sql.go
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// sqlc v1.29.0
|
||||||
|
// source: setup.sql
|
||||||
|
|
||||||
|
package sqlite
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
)
|
||||||
|
|
||||||
|
const initializeSchema = `-- name: InitializeSchema :exec
|
||||||
|
CREATE TABLE IF NOT EXISTS content (
|
||||||
|
id TEXT NOT NULL,
|
||||||
|
site_id TEXT NOT NULL,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
type TEXT NOT NULL CHECK (type IN ('text', 'markdown', 'link')),
|
||||||
|
created_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||||
|
updated_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||||
|
last_edited_by TEXT DEFAULT 'system' NOT NULL,
|
||||||
|
PRIMARY KEY (id, site_id)
|
||||||
|
)
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) InitializeSchema(ctx context.Context) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, initializeSchema)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const initializeVersionsTable = `-- name: InitializeVersionsTable :exec
|
||||||
|
CREATE TABLE IF NOT EXISTS content_versions (
|
||||||
|
version_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
content_id TEXT NOT NULL,
|
||||||
|
site_id TEXT NOT NULL,
|
||||||
|
value TEXT NOT NULL,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
created_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||||
|
created_by TEXT DEFAULT 'system' NOT NULL
|
||||||
|
)
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) InitializeVersionsTable(ctx context.Context) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, initializeVersionsTable)
|
||||||
|
return err
|
||||||
|
}
|
||||||
175
internal/db/sqlite/versions.sql.go
Normal file
175
internal/db/sqlite/versions.sql.go
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
|
// versions:
|
||||||
|
// sqlc v1.29.0
|
||||||
|
// source: versions.sql
|
||||||
|
|
||||||
|
package sqlite
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
const createContentVersion = `-- name: CreateContentVersion :exec
|
||||||
|
INSERT INTO content_versions (content_id, site_id, value, type, created_by)
|
||||||
|
VALUES (?1, ?2, ?3, ?4, ?5)
|
||||||
|
`
|
||||||
|
|
||||||
|
type CreateContentVersionParams struct {
|
||||||
|
ContentID string `json:"content_id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
CreatedBy string `json:"created_by"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) CreateContentVersion(ctx context.Context, arg CreateContentVersionParams) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, createContentVersion,
|
||||||
|
arg.ContentID,
|
||||||
|
arg.SiteID,
|
||||||
|
arg.Value,
|
||||||
|
arg.Type,
|
||||||
|
arg.CreatedBy,
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const deleteOldVersions = `-- name: DeleteOldVersions :exec
|
||||||
|
DELETE FROM content_versions
|
||||||
|
WHERE created_at < ?1 AND site_id = ?2
|
||||||
|
`
|
||||||
|
|
||||||
|
type DeleteOldVersionsParams struct {
|
||||||
|
CreatedBefore int64 `json:"created_before"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) DeleteOldVersions(ctx context.Context, arg DeleteOldVersionsParams) error {
|
||||||
|
_, err := q.db.ExecContext(ctx, deleteOldVersions, arg.CreatedBefore, arg.SiteID)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const getAllVersionsForSite = `-- name: GetAllVersionsForSite :many
|
||||||
|
SELECT
|
||||||
|
cv.version_id, cv.content_id, cv.site_id, cv.value, cv.type, cv.created_at, cv.created_by,
|
||||||
|
c.value as current_value
|
||||||
|
FROM content_versions cv
|
||||||
|
LEFT JOIN content c ON cv.content_id = c.id AND cv.site_id = c.site_id
|
||||||
|
WHERE cv.site_id = ?1
|
||||||
|
ORDER BY cv.created_at DESC
|
||||||
|
LIMIT ?2
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetAllVersionsForSiteParams struct {
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
LimitCount int64 `json:"limit_count"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetAllVersionsForSiteRow struct {
|
||||||
|
VersionID int64 `json:"version_id"`
|
||||||
|
ContentID string `json:"content_id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
CreatedAt int64 `json:"created_at"`
|
||||||
|
CreatedBy string `json:"created_by"`
|
||||||
|
CurrentValue sql.NullString `json:"current_value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetAllVersionsForSite(ctx context.Context, arg GetAllVersionsForSiteParams) ([]GetAllVersionsForSiteRow, error) {
|
||||||
|
rows, err := q.db.QueryContext(ctx, getAllVersionsForSite, arg.SiteID, arg.LimitCount)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []GetAllVersionsForSiteRow
|
||||||
|
for rows.Next() {
|
||||||
|
var i GetAllVersionsForSiteRow
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.VersionID,
|
||||||
|
&i.ContentID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.CreatedBy,
|
||||||
|
&i.CurrentValue,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const getContentVersion = `-- name: GetContentVersion :one
|
||||||
|
SELECT version_id, content_id, site_id, value, type, created_at, created_by
|
||||||
|
FROM content_versions
|
||||||
|
WHERE version_id = ?1
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) GetContentVersion(ctx context.Context, versionID int64) (ContentVersion, error) {
|
||||||
|
row := q.db.QueryRowContext(ctx, getContentVersion, versionID)
|
||||||
|
var i ContentVersion
|
||||||
|
err := row.Scan(
|
||||||
|
&i.VersionID,
|
||||||
|
&i.ContentID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.CreatedBy,
|
||||||
|
)
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
|
|
||||||
|
const getContentVersionHistory = `-- name: GetContentVersionHistory :many
|
||||||
|
SELECT version_id, content_id, site_id, value, type, created_at, created_by
|
||||||
|
FROM content_versions
|
||||||
|
WHERE content_id = ?1 AND site_id = ?2
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT ?3
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetContentVersionHistoryParams struct {
|
||||||
|
ContentID string `json:"content_id"`
|
||||||
|
SiteID string `json:"site_id"`
|
||||||
|
LimitCount int64 `json:"limit_count"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetContentVersionHistory(ctx context.Context, arg GetContentVersionHistoryParams) ([]ContentVersion, error) {
|
||||||
|
rows, err := q.db.QueryContext(ctx, getContentVersionHistory, arg.ContentID, arg.SiteID, arg.LimitCount)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []ContentVersion
|
||||||
|
for rows.Next() {
|
||||||
|
var i ContentVersion
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.VersionID,
|
||||||
|
&i.ContentID,
|
||||||
|
&i.SiteID,
|
||||||
|
&i.Value,
|
||||||
|
&i.Type,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.CreatedBy,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
167
internal/parser/id_generator.go
Normal file
167
internal/parser/id_generator.go
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
package parser
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/sha1"
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/net/html"
|
||||||
|
)
|
||||||
|
|
||||||
|
// IDGenerator generates unique content IDs for elements
|
||||||
|
type IDGenerator struct {
|
||||||
|
usedIDs map[string]bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewIDGenerator creates a new ID generator
|
||||||
|
func NewIDGenerator() *IDGenerator {
|
||||||
|
return &IDGenerator{
|
||||||
|
usedIDs: make(map[string]bool),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate creates a content ID for an HTML element
|
||||||
|
func (g *IDGenerator) Generate(node *html.Node) string {
|
||||||
|
context := g.getSemanticContext(node)
|
||||||
|
purpose := g.getPurpose(node)
|
||||||
|
contentHash := g.getContentHash(node)
|
||||||
|
|
||||||
|
baseID := g.createBaseID(context, purpose, contentHash)
|
||||||
|
return g.ensureUnique(baseID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSemanticContext determines the semantic context from parent elements
|
||||||
|
func (g *IDGenerator) getSemanticContext(node *html.Node) string {
|
||||||
|
// Walk up the tree to find semantic containers
|
||||||
|
parent := node.Parent
|
||||||
|
for parent != nil && parent.Type == html.ElementNode {
|
||||||
|
classes := getClasses(parent)
|
||||||
|
|
||||||
|
// Check for common semantic section classes
|
||||||
|
for _, class := range []string{"hero", "services", "nav", "navbar", "footer", "about", "contact", "testimonial"} {
|
||||||
|
if containsClass(classes, class) {
|
||||||
|
return class
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for semantic HTML elements
|
||||||
|
switch parent.Data {
|
||||||
|
case "nav":
|
||||||
|
return "nav"
|
||||||
|
case "header":
|
||||||
|
return "header"
|
||||||
|
case "footer":
|
||||||
|
return "footer"
|
||||||
|
case "main":
|
||||||
|
return "main"
|
||||||
|
case "aside":
|
||||||
|
return "aside"
|
||||||
|
}
|
||||||
|
|
||||||
|
parent = parent.Parent
|
||||||
|
}
|
||||||
|
|
||||||
|
return "content"
|
||||||
|
}
|
||||||
|
|
||||||
|
// getPurpose determines the purpose/role of the element
|
||||||
|
func (g *IDGenerator) getPurpose(node *html.Node) string {
|
||||||
|
tag := strings.ToLower(node.Data)
|
||||||
|
classes := getClasses(node)
|
||||||
|
|
||||||
|
// Check for specific CSS classes that indicate purpose
|
||||||
|
for _, class := range classes {
|
||||||
|
switch {
|
||||||
|
case strings.Contains(class, "title"):
|
||||||
|
return "title"
|
||||||
|
case strings.Contains(class, "headline"):
|
||||||
|
return "headline"
|
||||||
|
case strings.Contains(class, "description"):
|
||||||
|
return "description"
|
||||||
|
case strings.Contains(class, "subtitle"):
|
||||||
|
return "subtitle"
|
||||||
|
case strings.Contains(class, "cta"):
|
||||||
|
return "cta"
|
||||||
|
case strings.Contains(class, "button"):
|
||||||
|
return "button"
|
||||||
|
case strings.Contains(class, "logo"):
|
||||||
|
return "logo"
|
||||||
|
case strings.Contains(class, "lead"):
|
||||||
|
return "lead"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Infer purpose from HTML tag
|
||||||
|
switch tag {
|
||||||
|
case "h1":
|
||||||
|
return "title"
|
||||||
|
case "h2":
|
||||||
|
return "subtitle"
|
||||||
|
case "h3", "h4", "h5", "h6":
|
||||||
|
return "heading"
|
||||||
|
case "p":
|
||||||
|
return "text"
|
||||||
|
case "a":
|
||||||
|
return "link"
|
||||||
|
case "button":
|
||||||
|
return "button"
|
||||||
|
default:
|
||||||
|
return "content"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// getContentHash creates a short hash of the content for ID generation
|
||||||
|
func (g *IDGenerator) getContentHash(node *html.Node) string {
|
||||||
|
text := extractTextContent(node)
|
||||||
|
|
||||||
|
// Create hash of the text content
|
||||||
|
hash := fmt.Sprintf("%x", sha1.Sum([]byte(text)))
|
||||||
|
|
||||||
|
// Return first 6 characters for brevity
|
||||||
|
return hash[:6]
|
||||||
|
}
|
||||||
|
|
||||||
|
// createBaseID creates the base ID from components
|
||||||
|
func (g *IDGenerator) createBaseID(context, purpose, contentHash string) string {
|
||||||
|
parts := []string{}
|
||||||
|
|
||||||
|
// Add context if meaningful
|
||||||
|
if context != "content" {
|
||||||
|
parts = append(parts, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add purpose
|
||||||
|
parts = append(parts, purpose)
|
||||||
|
|
||||||
|
// Always add content hash for uniqueness
|
||||||
|
parts = append(parts, contentHash)
|
||||||
|
|
||||||
|
baseID := strings.Join(parts, "-")
|
||||||
|
|
||||||
|
// Clean up the ID
|
||||||
|
baseID = regexp.MustCompile(`-+`).ReplaceAllString(baseID, "-")
|
||||||
|
baseID = strings.Trim(baseID, "-")
|
||||||
|
|
||||||
|
// Ensure it's not empty
|
||||||
|
if baseID == "" {
|
||||||
|
baseID = fmt.Sprintf("content-%s", contentHash)
|
||||||
|
}
|
||||||
|
|
||||||
|
return baseID
|
||||||
|
}
|
||||||
|
|
||||||
|
// ensureUnique makes sure the ID is unique by adding a suffix if needed
|
||||||
|
func (g *IDGenerator) ensureUnique(baseID string) string {
|
||||||
|
if !g.usedIDs[baseID] {
|
||||||
|
g.usedIDs[baseID] = true
|
||||||
|
return baseID
|
||||||
|
}
|
||||||
|
|
||||||
|
// If base ID is taken, add a hash suffix
|
||||||
|
hash := fmt.Sprintf("%x", sha1.Sum([]byte(baseID)))[:6]
|
||||||
|
uniqueID := fmt.Sprintf("%s-%s", baseID, hash)
|
||||||
|
|
||||||
|
g.usedIDs[uniqueID] = true
|
||||||
|
return uniqueID
|
||||||
|
}
|
||||||
229
internal/parser/parser.go
Normal file
229
internal/parser/parser.go
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
package parser
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/fs"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/net/html"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Parser handles HTML parsing and element detection
|
||||||
|
type Parser struct {
|
||||||
|
idGenerator *IDGenerator
|
||||||
|
}
|
||||||
|
|
||||||
|
// New creates a new Parser instance
|
||||||
|
func New() *Parser {
|
||||||
|
return &Parser{
|
||||||
|
idGenerator: NewIDGenerator(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseDirectory parses all HTML files in the given directory
|
||||||
|
func (p *Parser) ParseDirectory(dir string) (*ParseResult, error) {
|
||||||
|
result := &ParseResult{
|
||||||
|
Elements: []Element{},
|
||||||
|
Warnings: []string{},
|
||||||
|
Stats: ParseStats{
|
||||||
|
TypeBreakdown: make(map[ContentType]int),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := filepath.WalkDir(dir, func(path string, d fs.DirEntry, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only process HTML files
|
||||||
|
if d.IsDir() || !strings.HasSuffix(strings.ToLower(path), ".html") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
elements, warnings, err := p.parseFile(path)
|
||||||
|
if err != nil {
|
||||||
|
result.Warnings = append(result.Warnings,
|
||||||
|
fmt.Sprintf("Error parsing %s: %v", path, err))
|
||||||
|
return nil // Continue processing other files
|
||||||
|
}
|
||||||
|
|
||||||
|
result.Elements = append(result.Elements, elements...)
|
||||||
|
result.Warnings = append(result.Warnings, warnings...)
|
||||||
|
result.Stats.FilesProcessed++
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error walking directory: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate statistics
|
||||||
|
p.calculateStats(result)
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseFile parses a single HTML file
|
||||||
|
func (p *Parser) parseFile(filePath string) ([]Element, []string, error) {
|
||||||
|
file, err := os.Open(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("error opening file: %w", err)
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
doc, err := html.Parse(file)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("error parsing HTML: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var elements []Element
|
||||||
|
var warnings []string
|
||||||
|
|
||||||
|
p.findInsertrElements(doc, filePath, &elements, &warnings)
|
||||||
|
|
||||||
|
return elements, warnings, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// findInsertrElements recursively finds all elements with "insertr" class
|
||||||
|
func (p *Parser) findInsertrElements(node *html.Node, filePath string, elements *[]Element, warnings *[]string) {
|
||||||
|
if node.Type == html.ElementNode {
|
||||||
|
classes := getClasses(node)
|
||||||
|
|
||||||
|
// Check if element has "insertr" class
|
||||||
|
if containsClass(classes, "insertr") {
|
||||||
|
if isContainer(node) {
|
||||||
|
// Container element - expand to viable children
|
||||||
|
viableChildren := findViableChildren(node)
|
||||||
|
for _, child := range viableChildren {
|
||||||
|
childClasses := getClasses(child)
|
||||||
|
element, warning := p.createElement(child, filePath, childClasses)
|
||||||
|
*elements = append(*elements, element)
|
||||||
|
if warning != "" {
|
||||||
|
*warnings = append(*warnings, warning)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Don't process children recursively since we've handled the container's children
|
||||||
|
return
|
||||||
|
} else {
|
||||||
|
// Regular element - process as before
|
||||||
|
element, warning := p.createElement(node, filePath, classes)
|
||||||
|
*elements = append(*elements, element)
|
||||||
|
if warning != "" {
|
||||||
|
*warnings = append(*warnings, warning)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recursively check children
|
||||||
|
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||||
|
p.findInsertrElements(child, filePath, elements, warnings)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// createElement creates an Element from an HTML node
|
||||||
|
func (p *Parser) createElement(node *html.Node, filePath string, classes []string) (Element, string) {
|
||||||
|
var warning string
|
||||||
|
|
||||||
|
// Resolve content ID (existing or generated)
|
||||||
|
contentID, hasExistingID := p.resolveContentID(node)
|
||||||
|
if !hasExistingID {
|
||||||
|
contentID = p.idGenerator.Generate(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect content type
|
||||||
|
contentType := p.detectContentType(node, classes)
|
||||||
|
|
||||||
|
// Extract text content
|
||||||
|
content := extractTextContent(node)
|
||||||
|
|
||||||
|
element := Element{
|
||||||
|
FilePath: filePath,
|
||||||
|
Node: node,
|
||||||
|
ContentID: contentID,
|
||||||
|
Type: contentType,
|
||||||
|
Tag: strings.ToLower(node.Data),
|
||||||
|
Classes: classes,
|
||||||
|
Content: content,
|
||||||
|
HasID: hasExistingID,
|
||||||
|
Generated: !hasExistingID,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate warnings for edge cases
|
||||||
|
if content == "" {
|
||||||
|
warning = fmt.Sprintf("Element <%s> with id '%s' has no text content",
|
||||||
|
element.Tag, element.ContentID)
|
||||||
|
}
|
||||||
|
|
||||||
|
return element, warning
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveContentID gets the content ID from existing attributes
|
||||||
|
func (p *Parser) resolveContentID(node *html.Node) (string, bool) {
|
||||||
|
// 1. Check for existing HTML id attribute
|
||||||
|
if id := getAttribute(node, "id"); id != "" {
|
||||||
|
return id, true
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Check for data-content-id attribute
|
||||||
|
if contentID := getAttribute(node, "data-content-id"); contentID != "" {
|
||||||
|
return contentID, true
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. No existing ID found
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
// detectContentType determines the content type based on element and classes
|
||||||
|
func (p *Parser) detectContentType(node *html.Node, classes []string) ContentType {
|
||||||
|
// Check for explicit type classes first
|
||||||
|
if containsClass(classes, "insertr-markdown") {
|
||||||
|
return ContentMarkdown
|
||||||
|
}
|
||||||
|
if containsClass(classes, "insertr-link") {
|
||||||
|
return ContentLink
|
||||||
|
}
|
||||||
|
if containsClass(classes, "insertr-text") {
|
||||||
|
return ContentText
|
||||||
|
}
|
||||||
|
|
||||||
|
// Infer from HTML tag and context
|
||||||
|
tag := strings.ToLower(node.Data)
|
||||||
|
switch tag {
|
||||||
|
case "h1", "h2", "h3", "h4", "h5", "h6":
|
||||||
|
return ContentText
|
||||||
|
case "p":
|
||||||
|
// Paragraphs default to markdown for rich content
|
||||||
|
return ContentMarkdown
|
||||||
|
case "a", "button":
|
||||||
|
return ContentLink
|
||||||
|
case "div", "section":
|
||||||
|
// Default divs/sections to markdown for rich content
|
||||||
|
return ContentMarkdown
|
||||||
|
case "span":
|
||||||
|
return ContentText
|
||||||
|
default:
|
||||||
|
return ContentText
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// calculateStats computes statistics for the parse result
|
||||||
|
func (p *Parser) calculateStats(result *ParseResult) {
|
||||||
|
result.Stats.TotalElements = len(result.Elements)
|
||||||
|
|
||||||
|
for _, element := range result.Elements {
|
||||||
|
// Count existing vs generated IDs
|
||||||
|
if element.HasID {
|
||||||
|
result.Stats.ExistingIDs++
|
||||||
|
} else {
|
||||||
|
result.Stats.GeneratedIDs++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count content types
|
||||||
|
result.Stats.TypeBreakdown[element.Type]++
|
||||||
|
}
|
||||||
|
}
|
||||||
41
internal/parser/types.go
Normal file
41
internal/parser/types.go
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
package parser
|
||||||
|
|
||||||
|
import "golang.org/x/net/html"
|
||||||
|
|
||||||
|
// ContentType represents the type of editable content
|
||||||
|
type ContentType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
ContentText ContentType = "text"
|
||||||
|
ContentMarkdown ContentType = "markdown"
|
||||||
|
ContentLink ContentType = "link"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Element represents a parsed editable element
|
||||||
|
type Element struct {
|
||||||
|
FilePath string `json:"file_path"`
|
||||||
|
Node *html.Node `json:"-"` // Don't serialize HTML node
|
||||||
|
ContentID string `json:"content_id"`
|
||||||
|
Type ContentType `json:"type"`
|
||||||
|
Tag string `json:"tag"`
|
||||||
|
Classes []string `json:"classes"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
HasID bool `json:"has_id"` // Whether element had existing ID
|
||||||
|
Generated bool `json:"generated"` // Whether ID was generated
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseResult contains the results of parsing HTML files
|
||||||
|
type ParseResult struct {
|
||||||
|
Elements []Element `json:"elements"`
|
||||||
|
Warnings []string `json:"warnings"`
|
||||||
|
Stats ParseStats `json:"stats"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseStats provides statistics about the parsing operation
|
||||||
|
type ParseStats struct {
|
||||||
|
FilesProcessed int `json:"files_processed"`
|
||||||
|
TotalElements int `json:"total_elements"`
|
||||||
|
ExistingIDs int `json:"existing_ids"`
|
||||||
|
GeneratedIDs int `json:"generated_ids"`
|
||||||
|
TypeBreakdown map[ContentType]int `json:"type_breakdown"`
|
||||||
|
}
|
||||||
159
internal/parser/utils.go
Normal file
159
internal/parser/utils.go
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
package parser
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/net/html"
|
||||||
|
)
|
||||||
|
|
||||||
|
// getClasses extracts CSS classes from an HTML node
|
||||||
|
func getClasses(node *html.Node) []string {
|
||||||
|
classAttr := getAttribute(node, "class")
|
||||||
|
if classAttr == "" {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
classes := strings.Fields(classAttr)
|
||||||
|
return classes
|
||||||
|
}
|
||||||
|
|
||||||
|
// containsClass checks if a class list contains a specific class
|
||||||
|
func containsClass(classes []string, target string) bool {
|
||||||
|
for _, class := range classes {
|
||||||
|
if class == target {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// getAttribute gets an attribute value from an HTML node
|
||||||
|
func getAttribute(node *html.Node, key string) string {
|
||||||
|
for _, attr := range node.Attr {
|
||||||
|
if attr.Key == key {
|
||||||
|
return attr.Val
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractTextContent gets the text content from an HTML node
|
||||||
|
func extractTextContent(node *html.Node) string {
|
||||||
|
var text strings.Builder
|
||||||
|
extractTextRecursive(node, &text)
|
||||||
|
return strings.TrimSpace(text.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractTextRecursive recursively extracts text from node and children
|
||||||
|
func extractTextRecursive(node *html.Node, text *strings.Builder) {
|
||||||
|
if node.Type == html.TextNode {
|
||||||
|
text.WriteString(node.Data)
|
||||||
|
}
|
||||||
|
|
||||||
|
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||||
|
// Skip script and style elements
|
||||||
|
if child.Type == html.ElementNode &&
|
||||||
|
(child.Data == "script" || child.Data == "style") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
extractTextRecursive(child, text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasOnlyTextContent checks if a node contains only text content (no nested HTML elements)
|
||||||
|
func hasOnlyTextContent(node *html.Node) bool {
|
||||||
|
if node.Type != html.ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||||
|
switch child.Type {
|
||||||
|
case html.ElementNode:
|
||||||
|
// Found a nested HTML element - not text-only
|
||||||
|
return false
|
||||||
|
case html.TextNode:
|
||||||
|
// Text nodes are fine, continue checking
|
||||||
|
continue
|
||||||
|
default:
|
||||||
|
// Comments, etc. - continue checking
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// isContainer checks if a tag is typically used as a container element
|
||||||
|
func isContainer(node *html.Node) bool {
|
||||||
|
if node.Type != html.ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
containerTags := map[string]bool{
|
||||||
|
"div": true,
|
||||||
|
"section": true,
|
||||||
|
"article": true,
|
||||||
|
"header": true,
|
||||||
|
"footer": true,
|
||||||
|
"main": true,
|
||||||
|
"aside": true,
|
||||||
|
"nav": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
return containerTags[node.Data]
|
||||||
|
}
|
||||||
|
|
||||||
|
// findViableChildren finds all child elements that are viable for editing
|
||||||
|
func findViableChildren(node *html.Node) []*html.Node {
|
||||||
|
var viable []*html.Node
|
||||||
|
|
||||||
|
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||||
|
// Skip whitespace-only text nodes
|
||||||
|
if child.Type == html.TextNode {
|
||||||
|
if strings.TrimSpace(child.Data) == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only consider element nodes
|
||||||
|
if child.Type != html.ElementNode {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip self-closing elements for now
|
||||||
|
if isSelfClosing(child) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if element has only text content
|
||||||
|
if hasOnlyTextContent(child) {
|
||||||
|
viable = append(viable, child)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return viable
|
||||||
|
}
|
||||||
|
|
||||||
|
// isSelfClosing checks if an element is typically self-closing
|
||||||
|
func isSelfClosing(node *html.Node) bool {
|
||||||
|
if node.Type != html.ElementNode {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
selfClosingTags := map[string]bool{
|
||||||
|
"img": true,
|
||||||
|
"input": true,
|
||||||
|
"br": true,
|
||||||
|
"hr": true,
|
||||||
|
"meta": true,
|
||||||
|
"link": true,
|
||||||
|
"area": true,
|
||||||
|
"base": true,
|
||||||
|
"col": true,
|
||||||
|
"embed": true,
|
||||||
|
"source": true,
|
||||||
|
"track": true,
|
||||||
|
"wbr": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
return selfClosingTags[node.Data]
|
||||||
|
}
|
||||||
92
justfile
92
justfile
@@ -11,7 +11,7 @@ install:
|
|||||||
cd lib && npm install
|
cd lib && npm install
|
||||||
|
|
||||||
# Start full-stack development (primary workflow)
|
# Start full-stack development (primary workflow)
|
||||||
dev: build-lib server-build
|
dev: build-lib build
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
echo "🚀 Starting Full-Stack Insertr Development..."
|
echo "🚀 Starting Full-Stack Insertr Development..."
|
||||||
echo "================================================"
|
echo "================================================"
|
||||||
@@ -34,9 +34,8 @@ dev: build-lib server-build
|
|||||||
|
|
||||||
# Start API server with prefixed output
|
# Start API server with prefixed output
|
||||||
echo "🔌 Starting API server (localhost:8080)..."
|
echo "🔌 Starting API server (localhost:8080)..."
|
||||||
cd insertr-server && ./insertr-server --port 8080 2>&1 | sed 's/^/🔌 [SERVER] /' &
|
./insertr serve --dev-mode --db ./dev.db 2>&1 | sed 's/^/🔌 [SERVER] /' &
|
||||||
SERVER_PID=$!
|
SERVER_PID=$!
|
||||||
cd ..
|
|
||||||
|
|
||||||
# Wait for server startup
|
# Wait for server startup
|
||||||
echo "⏳ Waiting for API server startup..."
|
echo "⏳ Waiting for API server startup..."
|
||||||
@@ -68,10 +67,10 @@ demo-only:
|
|||||||
npx --prefer-offline live-server demo-site --port=3000 --host=localhost --open=/index.html
|
npx --prefer-offline live-server demo-site --port=3000 --host=localhost --open=/index.html
|
||||||
|
|
||||||
# Start development server for about page
|
# Start development server for about page
|
||||||
dev-about: build-lib server-build
|
dev-about: build-lib build
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
echo "🚀 Starting full-stack development (about page)..."
|
echo "🚀 Starting full-stack development (about page)..."
|
||||||
cd insertr-server && ./insertr-server --port 8080 &
|
./insertr serve --dev-mode --db ./dev.db &
|
||||||
SERVER_PID=$!
|
SERVER_PID=$!
|
||||||
sleep 3
|
sleep 3
|
||||||
npx --prefer-offline live-server demo-site --port=3000 --host=localhost --open=/about.html
|
npx --prefer-offline live-server demo-site --port=3000 --host=localhost --open=/about.html
|
||||||
@@ -79,13 +78,13 @@ dev-about: build-lib server-build
|
|||||||
|
|
||||||
# Check project status and validate setup
|
# Check project status and validate setup
|
||||||
check:
|
check:
|
||||||
npm run dev:check
|
npm run check
|
||||||
|
|
||||||
# Show demo instructions
|
# Show demo instructions
|
||||||
demo:
|
demo:
|
||||||
npm run dev:demo
|
npm run demo
|
||||||
|
|
||||||
# Build the entire project (library + CLI)
|
# Build the entire project (library + unified binary)
|
||||||
build:
|
build:
|
||||||
npm run build
|
npm run build
|
||||||
|
|
||||||
@@ -97,61 +96,55 @@ build-lib:
|
|||||||
watch:
|
watch:
|
||||||
cd lib && npm run dev
|
cd lib && npm run dev
|
||||||
|
|
||||||
# Start Air hot-reload for Go CLI development
|
# Start Air hot-reload for unified binary development
|
||||||
air:
|
air:
|
||||||
cd insertr-cli && air
|
air
|
||||||
|
|
||||||
# Build Go CLI only
|
# Build unified binary only
|
||||||
build-cli:
|
build-insertr:
|
||||||
cd insertr-cli && go build -o insertr
|
go build -o insertr .
|
||||||
|
|
||||||
# Run CLI help
|
# Run insertr help
|
||||||
cli-help:
|
help:
|
||||||
cd insertr-cli && go run main.go --help
|
./insertr --help
|
||||||
|
|
||||||
# Parse demo site with CLI
|
# Parse demo site with CLI
|
||||||
parse:
|
parse:
|
||||||
cd insertr-cli && go run main.go parse ../demo-site/
|
./insertr enhance demo-site/ --output ./dist --mock
|
||||||
|
|
||||||
# Start CLI development server
|
# Enhance demo site (build-time content injection)
|
||||||
servedev:
|
enhance input="demo-site" output="dist":
|
||||||
cd insertr-cli && go run main.go servedev -i ../demo-site -p 3000
|
./insertr enhance {{input}} --output {{output}} --mock
|
||||||
|
|
||||||
# === Content API Server Commands ===
|
# === Content API Server Commands ===
|
||||||
|
|
||||||
# Generate Go code from SQL (using sqlc)
|
|
||||||
server-generate:
|
|
||||||
cd insertr-server && sqlc generate
|
|
||||||
|
|
||||||
# Build the content API server binary
|
|
||||||
server-build:
|
|
||||||
cd insertr-server && go build -o insertr-server ./cmd/server
|
|
||||||
|
|
||||||
# Start content API server (default port 8080)
|
# Start content API server (default port 8080)
|
||||||
server port="8080":
|
serve port="8080":
|
||||||
cd insertr-server && ./insertr-server --port {{port}}
|
./insertr serve --port {{port}} --dev-mode --db ./dev.db
|
||||||
|
|
||||||
|
# Start API server in production mode
|
||||||
|
serve-prod port="8080" db="./insertr.db":
|
||||||
|
./insertr serve --port {{port}} --db {{db}}
|
||||||
|
|
||||||
# Start API server with auto-restart on Go file changes
|
# Start API server with auto-restart on Go file changes
|
||||||
server-dev port="8080":
|
serve-dev port="8080":
|
||||||
cd insertr-server && find . -name "*.go" | entr -r go run ./cmd/server --port {{port}}
|
find . -name "*.go" | entr -r ./insertr serve --port {{port}} --dev-mode --db ./dev.db
|
||||||
|
|
||||||
# Check API server health
|
# Check API server health
|
||||||
server-health port="8080":
|
health port="8080":
|
||||||
@echo "🔍 Checking API server health..."
|
@echo "🔍 Checking API server health..."
|
||||||
@curl -s http://localhost:{{port}}/health | jq . || echo "❌ Server not responding at localhost:{{port}}"
|
@curl -s http://localhost:{{port}}/health | jq . || echo "❌ Server not responding at localhost:{{port}}"
|
||||||
|
|
||||||
# Clean database (development only - removes all content!)
|
|
||||||
server-clean-db:
|
|
||||||
@echo "🗑️ Removing development database..."
|
|
||||||
rm -f insertr-server/insertr.db
|
|
||||||
@echo "✅ Database cleaned (will be recreated on next server start)"
|
|
||||||
|
|
||||||
# Clean all build artifacts
|
# Clean all build artifacts
|
||||||
clean:
|
clean:
|
||||||
rm -rf lib/dist
|
rm -rf lib/dist
|
||||||
rm -rf insertr-cli/insertr
|
rm -rf insertr
|
||||||
|
rm -rf tmp
|
||||||
|
rm -rf dist
|
||||||
rm -rf node_modules
|
rm -rf node_modules
|
||||||
rm -rf lib/node_modules
|
rm -rf lib/node_modules
|
||||||
|
rm -f dev.db
|
||||||
|
rm -f insertr.db
|
||||||
|
|
||||||
# Lint code (placeholder for now)
|
# Lint code (placeholder for now)
|
||||||
lint:
|
lint:
|
||||||
@@ -167,26 +160,27 @@ dev-setup: install build-lib dev
|
|||||||
# Production workflow: install deps, build everything
|
# Production workflow: install deps, build everything
|
||||||
prod-build: install build
|
prod-build: install build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Show project status
|
# Show project status
|
||||||
status:
|
status:
|
||||||
@echo "🏗️ Insertr Project Status"
|
@echo "🏗️ Insertr Project Status"
|
||||||
@echo "========================="
|
@echo "========================="
|
||||||
@echo "📁 Root files:"
|
@echo "📁 Root files:"
|
||||||
@ls -la package.json justfile 2>/dev/null || echo " Missing files"
|
@ls -la package.json justfile go.mod insertr.yaml 2>/dev/null || echo " Missing files"
|
||||||
@echo "\n📚 Library files:"
|
@echo "\n📚 Library files:"
|
||||||
@ls -la lib/package.json lib/src lib/dist 2>/dev/null || echo " Missing library components"
|
@ls -la lib/package.json lib/src lib/dist 2>/dev/null || echo " Missing library components"
|
||||||
@echo "\n🔧 CLI files:"
|
@echo "\n🔧 Unified binary:"
|
||||||
@ls -la insertr-cli/main.go insertr-cli/insertr 2>/dev/null || echo " Missing CLI components"
|
@ls -la insertr main.go cmd/ internal/ 2>/dev/null || echo " Missing unified binary components"
|
||||||
@echo "\n🔌 Server files:"
|
|
||||||
@ls -la insertr-server/cmd insertr-server/insertr-server 2>/dev/null || echo " Missing server components"
|
|
||||||
@echo "\n🌐 Demo site:"
|
@echo "\n🌐 Demo site:"
|
||||||
@ls -la demo-site/index.html demo-site/about.html 2>/dev/null || echo " Missing demo files"
|
@ls -la demo-site/index.html demo-site/about.html 2>/dev/null || echo " Missing demo files"
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "🚀 Development Commands:"
|
@echo "🚀 Development Commands:"
|
||||||
@echo " just dev - Full-stack development (recommended)"
|
@echo " just dev - Full-stack development (recommended)"
|
||||||
@echo " just demo-only - Demo site only (no persistence)"
|
@echo " just demo-only - Demo site only (no persistence)"
|
||||||
@echo " just server - API server only (localhost:8080)"
|
@echo " just serve - API server only (localhost:8080)"
|
||||||
|
@echo " just enhance - Build-time content injection"
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "🔍 Check server: just server-health"
|
@echo "🔍 Check server: just health"
|
||||||
|
|
||||||
|
# Generate sqlc code (for database schema changes)
|
||||||
|
sqlc:
|
||||||
|
sqlc generate
|
||||||
9
main.go
Normal file
9
main.go
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/insertr/insertr/cmd"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
cmd.Execute()
|
||||||
|
}
|
||||||
@@ -1,15 +1,15 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build script for Insertr library and CLI integration
|
* Build script for Insertr unified binary
|
||||||
* This ensures the CLI always has the latest library version embedded
|
* This ensures the unified binary always has the latest library version embedded
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { execSync } from 'child_process';
|
import { execSync } from 'child_process';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
console.log('🔨 Building Insertr library and CLI...\n');
|
console.log('🔨 Building Insertr unified binary...\n');
|
||||||
|
|
||||||
// 1. Build the library
|
// 1. Build the library
|
||||||
console.log('📦 Building JavaScript library...');
|
console.log('📦 Building JavaScript library...');
|
||||||
@@ -21,10 +21,10 @@ try {
|
|||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2. Copy built library to CLI assets
|
// 2. Copy built library to unified binary assets
|
||||||
console.log('📁 Copying library to CLI assets...');
|
console.log('📁 Copying library to unified binary assets...');
|
||||||
const srcDir = './lib/dist';
|
const srcDir = './lib/dist';
|
||||||
const destDir = './insertr-cli/pkg/content/assets';
|
const destDir = './internal/content/assets';
|
||||||
|
|
||||||
// Ensure destination directory exists
|
// Ensure destination directory exists
|
||||||
fs.mkdirSync(destDir, { recursive: true });
|
fs.mkdirSync(destDir, { recursive: true });
|
||||||
@@ -40,32 +40,21 @@ files.forEach(file => {
|
|||||||
|
|
||||||
console.log('📁 Assets copied successfully\n');
|
console.log('📁 Assets copied successfully\n');
|
||||||
|
|
||||||
// 3. Build the CLI
|
// 3. Build the unified binary
|
||||||
console.log('🔧 Building Go CLI...');
|
console.log('🔧 Building unified Insertr binary...');
|
||||||
try {
|
try {
|
||||||
execSync('go build -o insertr', { cwd: './insertr-cli', stdio: 'inherit' });
|
execSync('go build -o insertr .', { stdio: 'inherit' });
|
||||||
console.log('✅ CLI built successfully\n');
|
console.log('✅ Unified binary built successfully\n');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('❌ CLI build failed:', error.message);
|
console.error('❌ Unified binary build failed:', error.message);
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// 4. Build the API Server
|
|
||||||
console.log('🔌 Building API Server...');
|
|
||||||
try {
|
|
||||||
execSync('go build -o insertr-server ./cmd/server', { cwd: './insertr-server', stdio: 'inherit' });
|
|
||||||
console.log('✅ API Server built successfully\n');
|
|
||||||
} catch (error) {
|
|
||||||
console.error('❌ API Server build failed:', error.message);
|
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('🎉 Build complete!\n');
|
console.log('🎉 Build complete!\n');
|
||||||
console.log('📋 What was built:');
|
console.log('📋 What was built:');
|
||||||
console.log(' • JavaScript library (lib/dist/)');
|
console.log(' • JavaScript library (lib/dist/)');
|
||||||
console.log(' • Go CLI with embedded library (insertr-cli/insertr)');
|
console.log(' • Unified Insertr binary with embedded library (./insertr)');
|
||||||
console.log(' • Content API server (insertr-server/insertr-server)');
|
|
||||||
console.log('\n🚀 Ready to use:');
|
console.log('\n🚀 Ready to use:');
|
||||||
console.log(' just dev # Full-stack development');
|
console.log(' just dev # Full-stack development');
|
||||||
console.log(' just server # API server only');
|
console.log(' just serve # API server only');
|
||||||
console.log(' cd insertr-cli && ./insertr --help # CLI tools');
|
console.log(' ./insertr --help # See all commands');
|
||||||
31
sqlc.yaml
Normal file
31
sqlc.yaml
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
version: "2"
|
||||||
|
sql:
|
||||||
|
# SQLite configuration for development
|
||||||
|
- name: "sqlite"
|
||||||
|
engine: "sqlite"
|
||||||
|
queries: ["db/queries/", "db/sqlite/setup.sql"]
|
||||||
|
schema: "db/sqlite/schema.sql"
|
||||||
|
gen:
|
||||||
|
go:
|
||||||
|
package: "sqlite"
|
||||||
|
out: "internal/db/sqlite"
|
||||||
|
emit_json_tags: true
|
||||||
|
emit_prepared_queries: false
|
||||||
|
emit_interface: true
|
||||||
|
emit_exact_table_names: false
|
||||||
|
emit_pointers_for_null_types: false # All fields are NOT NULL now
|
||||||
|
|
||||||
|
# PostgreSQL configuration for production
|
||||||
|
- name: "postgresql"
|
||||||
|
engine: "postgresql"
|
||||||
|
queries: ["db/queries/", "db/postgresql/setup.sql"]
|
||||||
|
schema: "db/postgresql/schema.sql"
|
||||||
|
gen:
|
||||||
|
go:
|
||||||
|
package: "postgresql"
|
||||||
|
out: "internal/db/postgresql"
|
||||||
|
emit_json_tags: true
|
||||||
|
emit_prepared_queries: false
|
||||||
|
emit_interface: true
|
||||||
|
emit_exact_table_names: false
|
||||||
|
emit_pointers_for_null_types: false # All fields are NOT NULL now
|
||||||
Reference in New Issue
Block a user