cleanup: remove legacy insertr-cli and insertr-server directories
🧹 **Legacy Code Cleanup** **Removed Directories:** - `insertr-cli/` - 23 files removed (replaced by unified binary) - `insertr-server/` - 24 files removed (replaced by unified binary) **Updated References:** - `TODO.md` - Updated file paths to reflect unified structure - `scripts/dev.js` - Updated required/optional file checks **Benefits:** ✅ **Single Source of Truth** - No more duplicate code or confusion ✅ **Smaller Repository** - Removed ~47 legacy files and binaries ✅ **Cleaner Development** - Only unified binary approach remains ✅ **Better Maintenance** - No legacy code paths to maintain **All Functionality Preserved:** - Database layer: `internal/db/` - Content logic: `internal/content/` - API handlers: `internal/api/` - Commands: `cmd/enhance.go`, `cmd/serve.go` The unified `insertr` binary now provides all functionality with enhanced architecture and no backwards compatibility baggage.
This commit is contained in:
27
TODO.md
27
TODO.md
@@ -7,7 +7,7 @@
|
||||
## ✅ What's Already Built & Working
|
||||
|
||||
### **Complete Foundation**
|
||||
- ✅ **Go CLI Client** - Full REST API client with all CRUD operations (`insertr-cli/pkg/content/client.go`)
|
||||
- ✅ **Go Content Client** - Full REST API client with all CRUD operations (`internal/content/client.go`)
|
||||
- ✅ **JavaScript API Client** - Browser client with same API endpoints (`lib/src/core/api-client.js`)
|
||||
- ✅ **Content Types** - Well-defined data structures (`ContentItem`, `ContentResponse`)
|
||||
- ✅ **Mock Backend** - Working development server with realistic test data
|
||||
@@ -111,23 +111,16 @@ Static Site Build ← CLI Enhancement ← Database Content
|
||||
|
||||
## 🗂️ **Next Steps: Server Implementation**
|
||||
|
||||
### **Files to Create**
|
||||
### **✅ Implemented - Unified Binary Architecture**
|
||||
```
|
||||
insertr-server/ # New HTTP server application
|
||||
├── cmd/
|
||||
│ └── server/
|
||||
│ └── main.go # Server entry point
|
||||
├── internal/
|
||||
│ ├── api/
|
||||
│ │ ├── handlers.go # HTTP handlers for content endpoints
|
||||
│ │ └── middleware.go # Auth, CORS, logging middleware
|
||||
│ ├── db/
|
||||
│ │ ├── sqlite.go # SQLite implementation
|
||||
│ │ └── migrations/ # Database schema versions
|
||||
│ └── models/
|
||||
│ └── content.go # Content model (matches existing ContentItem)
|
||||
├── go.mod
|
||||
└── go.sum
|
||||
✅ COMPLETED: All server functionality integrated into unified binary
|
||||
cmd/
|
||||
├── serve.go # Runtime API server command
|
||||
└── enhance.go # Build-time enhancement command
|
||||
internal/
|
||||
├── api/ # HTTP handlers and middleware
|
||||
├── db/ # Multi-database layer with sqlc
|
||||
└── content/ # Content management logic
|
||||
```
|
||||
|
||||
### **Files to Modify**
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
root = "."
|
||||
testdata_dir = "testdata"
|
||||
tmp_dir = "tmp"
|
||||
|
||||
[build]
|
||||
args_bin = []
|
||||
bin = "./tmp/insertr"
|
||||
cmd = "go build -o ./tmp/insertr ."
|
||||
delay = 1000
|
||||
exclude_dir = ["tmp", "vendor", "testdata", "node_modules", "dist"]
|
||||
exclude_file = []
|
||||
exclude_regex = ["_test.go"]
|
||||
exclude_unchanged = false
|
||||
follow_symlink = false
|
||||
full_bin = "./tmp/insertr servedev -i ../demo-site -p 3000"
|
||||
include_dir = ["../lib/src"]
|
||||
include_ext = ["go", "tpl", "tmpl", "html", "js"]
|
||||
include_file = []
|
||||
kill_delay = "0s"
|
||||
log = "build-errors.log"
|
||||
poll = false
|
||||
poll_interval = 0
|
||||
post_cmd = []
|
||||
pre_cmd = ["./scripts/rebuild-library.sh"]
|
||||
rerun = false
|
||||
rerun_delay = 500
|
||||
send_interrupt = false
|
||||
stop_on_root = false
|
||||
|
||||
[color]
|
||||
app = ""
|
||||
build = "yellow"
|
||||
main = "magenta"
|
||||
runner = "green"
|
||||
watcher = "cyan"
|
||||
|
||||
[log]
|
||||
main_only = false
|
||||
time = false
|
||||
|
||||
[misc]
|
||||
clean_on_exit = true
|
||||
|
||||
[screen]
|
||||
clear_on_rebuild = false
|
||||
keep_scroll = true
|
||||
@@ -1,76 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/insertr/cli/pkg/content"
|
||||
)
|
||||
|
||||
var enhanceCmd = &cobra.Command{
|
||||
Use: "enhance [input-dir]",
|
||||
Short: "Enhance HTML files by injecting content from database",
|
||||
Long: `Enhance processes HTML files and injects latest content from the database
|
||||
while adding editing capabilities. This is the core build-time enhancement
|
||||
process that transforms static HTML into an editable CMS.`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: runEnhance,
|
||||
}
|
||||
|
||||
var (
|
||||
outputDir string
|
||||
apiURL string
|
||||
apiKey string
|
||||
siteID string
|
||||
mockContent bool
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(enhanceCmd)
|
||||
|
||||
enhanceCmd.Flags().StringVarP(&outputDir, "output", "o", "./dist", "Output directory for enhanced files")
|
||||
enhanceCmd.Flags().StringVar(&apiURL, "api-url", "", "Content API URL")
|
||||
enhanceCmd.Flags().StringVar(&apiKey, "api-key", "", "API key for authentication")
|
||||
enhanceCmd.Flags().StringVarP(&siteID, "site-id", "s", "demo", "Site ID for content lookup")
|
||||
enhanceCmd.Flags().BoolVar(&mockContent, "mock", true, "Use mock content for development")
|
||||
}
|
||||
|
||||
func runEnhance(cmd *cobra.Command, args []string) {
|
||||
inputDir := args[0]
|
||||
|
||||
// Validate input directory
|
||||
if _, err := os.Stat(inputDir); os.IsNotExist(err) {
|
||||
log.Fatalf("Input directory does not exist: %s", inputDir)
|
||||
}
|
||||
|
||||
// Create content client
|
||||
var client content.ContentClient
|
||||
if mockContent {
|
||||
fmt.Printf("🧪 Using mock content for development\n")
|
||||
client = content.NewMockClient()
|
||||
} else {
|
||||
if apiURL == "" {
|
||||
log.Fatal("API URL required when not using mock content (use --api-url)")
|
||||
}
|
||||
fmt.Printf("🌐 Using content API: %s\n", apiURL)
|
||||
client = content.NewHTTPClient(apiURL, apiKey)
|
||||
}
|
||||
|
||||
// Create enhancer
|
||||
enhancer := content.NewEnhancer(client, siteID)
|
||||
|
||||
fmt.Printf("🚀 Starting enhancement process...\n")
|
||||
fmt.Printf("📁 Input: %s\n", inputDir)
|
||||
fmt.Printf("📁 Output: %s\n", outputDir)
|
||||
fmt.Printf("🏷️ Site ID: %s\n\n", siteID)
|
||||
|
||||
// Enhance directory
|
||||
if err := enhancer.EnhanceDirectory(inputDir, outputDir); err != nil {
|
||||
log.Fatalf("Enhancement failed: %v", err)
|
||||
}
|
||||
|
||||
fmt.Printf("\n✅ Enhancement complete! Enhanced files available in: %s\n", outputDir)
|
||||
}
|
||||
@@ -1,71 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/insertr/cli/pkg/parser"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var parseCmd = &cobra.Command{
|
||||
Use: "parse [input-dir]",
|
||||
Short: "Parse HTML files and detect editable elements",
|
||||
Long: `Parse HTML files in the specified directory and detect elements
|
||||
with the 'insertr' class. This command analyzes the HTML structure
|
||||
and reports what editable elements would be enhanced.`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
inputDir := args[0]
|
||||
|
||||
if _, err := os.Stat(inputDir); os.IsNotExist(err) {
|
||||
fmt.Fprintf(os.Stderr, "Error: Directory %s does not exist\n", inputDir)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
fmt.Printf("🔍 Parsing HTML files in: %s\n\n", inputDir)
|
||||
|
||||
p := parser.New()
|
||||
result, err := p.ParseDirectory(inputDir)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error parsing directory: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
printParseResults(result)
|
||||
},
|
||||
}
|
||||
|
||||
func printParseResults(result *parser.ParseResult) {
|
||||
fmt.Printf("📊 Parse Results:\n")
|
||||
fmt.Printf(" Files processed: %d\n", result.Stats.FilesProcessed)
|
||||
fmt.Printf(" Elements found: %d\n", result.Stats.TotalElements)
|
||||
fmt.Printf(" Existing IDs: %d\n", result.Stats.ExistingIDs)
|
||||
fmt.Printf(" Generated IDs: %d\n", result.Stats.GeneratedIDs)
|
||||
|
||||
if len(result.Stats.TypeBreakdown) > 0 {
|
||||
fmt.Printf("\n📝 Content Types:\n")
|
||||
for contentType, count := range result.Stats.TypeBreakdown {
|
||||
fmt.Printf(" %s: %d\n", contentType, count)
|
||||
}
|
||||
}
|
||||
|
||||
if len(result.Elements) > 0 {
|
||||
fmt.Printf("\n🎯 Found Elements:\n")
|
||||
for _, element := range result.Elements {
|
||||
fmt.Printf(" %s <%s> id=%s type=%s\n",
|
||||
filepath.Base(element.FilePath),
|
||||
element.Tag,
|
||||
element.ContentID,
|
||||
element.Type)
|
||||
}
|
||||
}
|
||||
|
||||
if len(result.Warnings) > 0 {
|
||||
fmt.Printf("\n⚠️ Warnings:\n")
|
||||
for _, warning := range result.Warnings {
|
||||
fmt.Printf(" %s\n", warning)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
Use: "insertr",
|
||||
Short: "Insertr CLI - HTML enhancement for static sites",
|
||||
Long: `Insertr CLI adds editing capabilities to static HTML sites by detecting
|
||||
editable elements and injecting content management functionality.
|
||||
|
||||
The tool parses HTML files, finds elements with the 'insertr' class,
|
||||
and enhances them with editing capabilities while preserving
|
||||
static site performance.`,
|
||||
Version: "0.0.1",
|
||||
}
|
||||
|
||||
func Execute() {
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(parseCmd)
|
||||
}
|
||||
@@ -1,189 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/insertr/cli/pkg/content"
|
||||
)
|
||||
|
||||
var servedevCmd = &cobra.Command{
|
||||
Use: "servedev",
|
||||
Short: "Development server that parses and serves enhanced HTML files",
|
||||
Long: `Servedev starts a development HTTP server that automatically parses HTML files
|
||||
for insertr elements and serves the enhanced content. Perfect for development workflow
|
||||
with live rebuilds via Air.`,
|
||||
Run: runServedev,
|
||||
}
|
||||
|
||||
var (
|
||||
inputDir string
|
||||
port int
|
||||
useMockContent bool
|
||||
devSiteID string
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(servedevCmd)
|
||||
|
||||
servedevCmd.Flags().StringVarP(&inputDir, "input", "i", ".", "Input directory to serve")
|
||||
servedevCmd.Flags().IntVarP(&port, "port", "p", 3000, "Port to serve on")
|
||||
servedevCmd.Flags().BoolVar(&useMockContent, "mock", true, "Use mock content for development")
|
||||
servedevCmd.Flags().StringVarP(&devSiteID, "site-id", "s", "demo", "Site ID for content lookup")
|
||||
}
|
||||
|
||||
func runServedev(cmd *cobra.Command, args []string) {
|
||||
// Resolve absolute path for input directory
|
||||
absInputDir, err := filepath.Abs(inputDir)
|
||||
if err != nil {
|
||||
log.Fatalf("Error resolving input directory: %v", err)
|
||||
}
|
||||
|
||||
// Check if input directory exists
|
||||
if _, err := os.Stat(absInputDir); os.IsNotExist(err) {
|
||||
log.Fatalf("Input directory does not exist: %s", absInputDir)
|
||||
}
|
||||
|
||||
// Create content client
|
||||
var client content.ContentClient
|
||||
if useMockContent {
|
||||
fmt.Printf("🧪 Using mock content for development\n")
|
||||
client = content.NewMockClient()
|
||||
} else {
|
||||
// For now, default to mock if no API URL provided
|
||||
fmt.Printf("🧪 Using mock content for development (no API configured)\n")
|
||||
client = content.NewMockClient()
|
||||
}
|
||||
|
||||
fmt.Printf("🚀 Starting development server with content enhancement...\n")
|
||||
fmt.Printf("📁 Serving directory: %s\n", absInputDir)
|
||||
fmt.Printf("🌐 Server running at: http://localhost:%d\n", port)
|
||||
fmt.Printf("🏷️ Site ID: %s\n", devSiteID)
|
||||
fmt.Printf("🔄 Manually refresh browser to see changes\n\n")
|
||||
|
||||
// Create enhanced file server
|
||||
fileServer := http.FileServer(&enhancedFileSystem{
|
||||
fs: http.Dir(absInputDir),
|
||||
dir: absInputDir,
|
||||
enhancer: content.NewEnhancer(client, devSiteID),
|
||||
})
|
||||
|
||||
// Handle editor assets
|
||||
http.HandleFunc("/_insertr/", func(w http.ResponseWriter, r *http.Request) {
|
||||
assetPath := strings.TrimPrefix(r.URL.Path, "/_insertr/")
|
||||
serveEditorAsset(w, r, assetPath)
|
||||
})
|
||||
|
||||
// Handle insertr library files
|
||||
http.HandleFunc("/insertr/", func(w http.ResponseWriter, r *http.Request) {
|
||||
assetPath := strings.TrimPrefix(r.URL.Path, "/insertr/")
|
||||
serveLibraryAsset(w, r, assetPath)
|
||||
})
|
||||
|
||||
// Handle all other requests with our enhanced file server
|
||||
http.Handle("/", fileServer)
|
||||
|
||||
// Start server
|
||||
addr := fmt.Sprintf(":%d", port)
|
||||
log.Fatal(http.ListenAndServe(addr, nil))
|
||||
}
|
||||
|
||||
// serveEditorAsset serves editor JavaScript and CSS files
|
||||
func serveEditorAsset(w http.ResponseWriter, r *http.Request, assetPath string) {
|
||||
// Get the path to the CLI binary directory
|
||||
execPath, err := os.Executable()
|
||||
if err != nil {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
// Look for assets relative to the CLI binary (for built version)
|
||||
assetsDir := filepath.Join(filepath.Dir(execPath), "assets", "editor")
|
||||
assetFile := filepath.Join(assetsDir, assetPath)
|
||||
|
||||
// If not found, look for assets relative to source (for development)
|
||||
if _, err := os.Stat(assetFile); os.IsNotExist(err) {
|
||||
// Assume we're running from source
|
||||
cwd, _ := os.Getwd()
|
||||
assetsDir = filepath.Join(cwd, "assets", "editor")
|
||||
assetFile = filepath.Join(assetsDir, assetPath)
|
||||
}
|
||||
|
||||
// Set appropriate content type
|
||||
if strings.HasSuffix(assetPath, ".js") {
|
||||
w.Header().Set("Content-Type", "application/javascript")
|
||||
} else if strings.HasSuffix(assetPath, ".css") {
|
||||
w.Header().Set("Content-Type", "text/css")
|
||||
}
|
||||
|
||||
// Serve the file
|
||||
http.ServeFile(w, r, assetFile)
|
||||
}
|
||||
|
||||
// serveLibraryAsset serves the insertr library files from embedded assets
|
||||
func serveLibraryAsset(w http.ResponseWriter, r *http.Request, assetPath string) {
|
||||
w.Header().Set("Content-Type", "application/javascript")
|
||||
|
||||
var script string
|
||||
switch assetPath {
|
||||
case "insertr.js":
|
||||
script = content.GetLibraryScript(false)
|
||||
case "insertr.min.js":
|
||||
script = content.GetLibraryScript(true)
|
||||
default:
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write([]byte(script))
|
||||
}
|
||||
|
||||
// enhancedFileSystem wraps http.FileSystem to provide enhanced HTML serving
|
||||
type enhancedFileSystem struct {
|
||||
fs http.FileSystem
|
||||
dir string
|
||||
enhancer *content.Enhancer
|
||||
}
|
||||
|
||||
func (efs *enhancedFileSystem) Open(name string) (http.File, error) {
|
||||
// For HTML files, enhance them on-the-fly
|
||||
if strings.HasSuffix(name, ".html") {
|
||||
fmt.Printf("📄 Enhancing HTML: %s\n", name)
|
||||
return efs.serveEnhancedHTML(name)
|
||||
}
|
||||
|
||||
// For non-HTML files, serve as-is
|
||||
return efs.fs.Open(name)
|
||||
}
|
||||
|
||||
// serveEnhancedHTML enhances an HTML file and returns it as an http.File
|
||||
func (efs *enhancedFileSystem) serveEnhancedHTML(name string) (http.File, error) {
|
||||
// Get the full file path
|
||||
inputPath := filepath.Join(efs.dir, name)
|
||||
|
||||
// Create a temporary output path (in-memory would be better, but this is simpler for now)
|
||||
tempDir := filepath.Join(os.TempDir(), "insertr-dev")
|
||||
outputPath := filepath.Join(tempDir, name)
|
||||
|
||||
// Ensure temp directory exists
|
||||
if err := os.MkdirAll(filepath.Dir(outputPath), 0755); err != nil {
|
||||
fmt.Printf("⚠️ Failed to create temp directory: %v\n", err)
|
||||
return efs.fs.Open(name) // Fallback to original file
|
||||
}
|
||||
|
||||
// Enhance the file
|
||||
if err := efs.enhancer.EnhanceFile(inputPath, outputPath); err != nil {
|
||||
fmt.Printf("⚠️ Enhancement failed for %s: %v\n", name, err)
|
||||
return efs.fs.Open(name) // Fallback to original file
|
||||
}
|
||||
|
||||
// Serve the enhanced file
|
||||
tempFS := http.Dir(tempDir)
|
||||
return tempFS.Open(name)
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
module github.com/insertr/cli
|
||||
|
||||
go 1.23.0
|
||||
|
||||
toolchain go1.24.6
|
||||
|
||||
require (
|
||||
github.com/spf13/cobra v1.8.0
|
||||
golang.org/x/net v0.43.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
)
|
||||
@@ -1,12 +0,0 @@
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0=
|
||||
github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
|
||||
golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
@@ -1,9 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/insertr/cli/cmd"
|
||||
)
|
||||
|
||||
func main() {
|
||||
cmd.Execute()
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,164 +0,0 @@
|
||||
package content
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// HTTPClient implements ContentClient for HTTP API access
|
||||
type HTTPClient struct {
|
||||
BaseURL string
|
||||
APIKey string
|
||||
HTTPClient *http.Client
|
||||
}
|
||||
|
||||
// NewHTTPClient creates a new HTTP content client
|
||||
func NewHTTPClient(baseURL, apiKey string) *HTTPClient {
|
||||
return &HTTPClient{
|
||||
BaseURL: strings.TrimSuffix(baseURL, "/"),
|
||||
APIKey: apiKey,
|
||||
HTTPClient: &http.Client{
|
||||
Timeout: 30 * time.Second,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// GetContent fetches a single content item by ID
|
||||
func (c *HTTPClient) GetContent(siteID, contentID string) (*ContentItem, error) {
|
||||
url := fmt.Sprintf("%s/api/content/%s?site_id=%s", c.BaseURL, contentID, siteID)
|
||||
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("creating request: %w", err)
|
||||
}
|
||||
|
||||
if c.APIKey != "" {
|
||||
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||
}
|
||||
|
||||
resp, err := c.HTTPClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("making request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode == 404 {
|
||||
return nil, nil // Content not found, return nil without error
|
||||
}
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("API error: %s", resp.Status)
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("reading response: %w", err)
|
||||
}
|
||||
|
||||
var item ContentItem
|
||||
if err := json.Unmarshal(body, &item); err != nil {
|
||||
return nil, fmt.Errorf("parsing response: %w", err)
|
||||
}
|
||||
|
||||
return &item, nil
|
||||
}
|
||||
|
||||
// GetBulkContent fetches multiple content items by IDs
|
||||
func (c *HTTPClient) GetBulkContent(siteID string, contentIDs []string) (map[string]ContentItem, error) {
|
||||
if len(contentIDs) == 0 {
|
||||
return make(map[string]ContentItem), nil
|
||||
}
|
||||
|
||||
// Build query parameters
|
||||
params := url.Values{}
|
||||
params.Set("site_id", siteID)
|
||||
for _, id := range contentIDs {
|
||||
params.Add("ids", id)
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/api/content/bulk?%s", c.BaseURL, params.Encode())
|
||||
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("creating request: %w", err)
|
||||
}
|
||||
|
||||
if c.APIKey != "" {
|
||||
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||
}
|
||||
|
||||
resp, err := c.HTTPClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("making request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("API error: %s", resp.Status)
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("reading response: %w", err)
|
||||
}
|
||||
|
||||
var response ContentResponse
|
||||
if err := json.Unmarshal(body, &response); err != nil {
|
||||
return nil, fmt.Errorf("parsing response: %w", err)
|
||||
}
|
||||
|
||||
// Convert slice to map for easy lookup
|
||||
result := make(map[string]ContentItem)
|
||||
for _, item := range response.Content {
|
||||
result[item.ID] = item
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// GetAllContent fetches all content for a site
|
||||
func (c *HTTPClient) GetAllContent(siteID string) (map[string]ContentItem, error) {
|
||||
url := fmt.Sprintf("%s/api/content?site_id=%s", c.BaseURL, siteID)
|
||||
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("creating request: %w", err)
|
||||
}
|
||||
|
||||
if c.APIKey != "" {
|
||||
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||
}
|
||||
|
||||
resp, err := c.HTTPClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("making request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
return nil, fmt.Errorf("API error: %s", resp.Status)
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("reading response: %w", err)
|
||||
}
|
||||
|
||||
var response ContentResponse
|
||||
if err := json.Unmarshal(body, &response); err != nil {
|
||||
return nil, fmt.Errorf("parsing response: %w", err)
|
||||
}
|
||||
|
||||
// Convert slice to map for easy lookup
|
||||
result := make(map[string]ContentItem)
|
||||
for _, item := range response.Content {
|
||||
result[item.ID] = item
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
@@ -1,216 +0,0 @@
|
||||
package content
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
|
||||
"github.com/insertr/cli/pkg/parser"
|
||||
)
|
||||
|
||||
// Enhancer combines parsing and content injection
|
||||
type Enhancer struct {
|
||||
parser *parser.Parser
|
||||
injector *Injector
|
||||
}
|
||||
|
||||
// NewEnhancer creates a new HTML enhancer
|
||||
func NewEnhancer(client ContentClient, siteID string) *Enhancer {
|
||||
return &Enhancer{
|
||||
parser: parser.New(),
|
||||
injector: NewInjector(client, siteID),
|
||||
}
|
||||
}
|
||||
|
||||
// EnhanceFile processes an HTML file and injects content
|
||||
func (e *Enhancer) EnhanceFile(inputPath, outputPath string) error {
|
||||
// Use parser to get elements from file
|
||||
result, err := e.parser.ParseDirectory(filepath.Dir(inputPath))
|
||||
if err != nil {
|
||||
return fmt.Errorf("parsing file: %w", err)
|
||||
}
|
||||
|
||||
// Filter elements for this specific file
|
||||
var fileElements []parser.Element
|
||||
inputBaseName := filepath.Base(inputPath)
|
||||
for _, elem := range result.Elements {
|
||||
elemBaseName := filepath.Base(elem.FilePath)
|
||||
if elemBaseName == inputBaseName {
|
||||
fileElements = append(fileElements, elem)
|
||||
}
|
||||
}
|
||||
|
||||
if len(fileElements) == 0 {
|
||||
// No insertr elements found, copy file as-is
|
||||
return e.copyFile(inputPath, outputPath)
|
||||
}
|
||||
|
||||
// Read and parse HTML for modification
|
||||
htmlContent, err := os.ReadFile(inputPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("reading file %s: %w", inputPath, err)
|
||||
}
|
||||
|
||||
doc, err := html.Parse(strings.NewReader(string(htmlContent)))
|
||||
if err != nil {
|
||||
return fmt.Errorf("parsing HTML: %w", err)
|
||||
}
|
||||
|
||||
// Find and inject content for each element
|
||||
for _, elem := range fileElements {
|
||||
// Find the node in the parsed document
|
||||
// Note: This is a simplified approach - in production we'd need more robust node matching
|
||||
if err := e.injectElementContent(doc, elem); err != nil {
|
||||
fmt.Printf("⚠️ Warning: failed to inject content for %s: %v\n", elem.ContentID, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Inject editor assets for development
|
||||
libraryScript := GetLibraryScript(false) // Use non-minified for development debugging
|
||||
e.injector.InjectEditorAssets(doc, true, libraryScript)
|
||||
|
||||
// Write enhanced HTML
|
||||
if err := e.writeHTML(doc, outputPath); err != nil {
|
||||
return fmt.Errorf("writing enhanced HTML: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("✅ Enhanced: %s → %s (%d elements)\n",
|
||||
filepath.Base(inputPath),
|
||||
filepath.Base(outputPath),
|
||||
len(fileElements))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// injectElementContent finds and injects content for a specific element
|
||||
func (e *Enhancer) injectElementContent(doc *html.Node, elem parser.Element) error {
|
||||
// Fetch content from database
|
||||
contentItem, err := e.injector.client.GetContent(e.injector.siteID, elem.ContentID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("fetching content: %w", err)
|
||||
}
|
||||
|
||||
// Find nodes with insertr class and inject content
|
||||
e.findAndInjectNodes(doc, elem, contentItem)
|
||||
return nil
|
||||
}
|
||||
|
||||
// findAndInjectNodes recursively finds nodes and injects content
|
||||
func (e *Enhancer) findAndInjectNodes(node *html.Node, elem parser.Element, contentItem *ContentItem) {
|
||||
if node.Type == html.ElementNode {
|
||||
// Check if this node matches our element criteria
|
||||
classes := getClasses(node)
|
||||
if containsClass(classes, "insertr") && node.Data == elem.Tag {
|
||||
// This might be our target node - inject content
|
||||
e.injector.addContentAttributes(node, elem.ContentID, string(elem.Type))
|
||||
|
||||
if contentItem != nil {
|
||||
switch elem.Type {
|
||||
case parser.ContentText:
|
||||
e.injector.injectTextContent(node, contentItem.Value)
|
||||
case parser.ContentMarkdown:
|
||||
e.injector.injectMarkdownContent(node, contentItem.Value)
|
||||
case parser.ContentLink:
|
||||
e.injector.injectLinkContent(node, contentItem.Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Recursively process children
|
||||
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||
e.findAndInjectNodes(child, elem, contentItem)
|
||||
}
|
||||
}
|
||||
|
||||
// Helper functions from parser package
|
||||
func getClasses(node *html.Node) []string {
|
||||
for _, attr := range node.Attr {
|
||||
if attr.Key == "class" {
|
||||
return strings.Fields(attr.Val)
|
||||
}
|
||||
}
|
||||
return []string{}
|
||||
}
|
||||
|
||||
func containsClass(classes []string, target string) bool {
|
||||
for _, class := range classes {
|
||||
if class == target {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// EnhanceDirectory processes all HTML files in a directory
|
||||
func (e *Enhancer) EnhanceDirectory(inputDir, outputDir string) error {
|
||||
// Create output directory
|
||||
if err := os.MkdirAll(outputDir, 0755); err != nil {
|
||||
return fmt.Errorf("creating output directory: %w", err)
|
||||
}
|
||||
|
||||
// Walk input directory
|
||||
return filepath.Walk(inputDir, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Calculate relative path and output path
|
||||
relPath, err := filepath.Rel(inputDir, path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
outputPath := filepath.Join(outputDir, relPath)
|
||||
|
||||
// Handle directories
|
||||
if info.IsDir() {
|
||||
return os.MkdirAll(outputPath, info.Mode())
|
||||
}
|
||||
|
||||
// Handle HTML files
|
||||
if strings.HasSuffix(strings.ToLower(path), ".html") {
|
||||
return e.EnhanceFile(path, outputPath)
|
||||
}
|
||||
|
||||
// Copy other files as-is
|
||||
return e.copyFile(path, outputPath)
|
||||
})
|
||||
}
|
||||
|
||||
// copyFile copies a file from src to dst
|
||||
func (e *Enhancer) copyFile(src, dst string) error {
|
||||
// Create directory for destination
|
||||
if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Read source
|
||||
data, err := os.ReadFile(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Write destination
|
||||
return os.WriteFile(dst, data, 0644)
|
||||
}
|
||||
|
||||
// writeHTML writes an HTML document to a file
|
||||
func (e *Enhancer) writeHTML(doc *html.Node, outputPath string) error {
|
||||
// Create directory for output
|
||||
if err := os.MkdirAll(filepath.Dir(outputPath), 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Create output file
|
||||
file, err := os.Create(outputPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
// Write HTML
|
||||
return html.Render(file, doc)
|
||||
}
|
||||
@@ -1,236 +0,0 @@
|
||||
package content
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// Injector handles content injection into HTML elements
|
||||
type Injector struct {
|
||||
client ContentClient
|
||||
siteID string
|
||||
}
|
||||
|
||||
// NewInjector creates a new content injector
|
||||
func NewInjector(client ContentClient, siteID string) *Injector {
|
||||
return &Injector{
|
||||
client: client,
|
||||
siteID: siteID,
|
||||
}
|
||||
}
|
||||
|
||||
// InjectContent replaces element content with database values and adds content IDs
|
||||
func (i *Injector) InjectContent(element *Element, contentID string) error {
|
||||
// Fetch content from database/API
|
||||
contentItem, err := i.client.GetContent(i.siteID, contentID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("fetching content for %s: %w", contentID, err)
|
||||
}
|
||||
|
||||
// If no content found, keep original content but add data attributes
|
||||
if contentItem == nil {
|
||||
i.addContentAttributes(element.Node, contentID, element.Type)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Replace element content based on type
|
||||
switch element.Type {
|
||||
case "text":
|
||||
i.injectTextContent(element.Node, contentItem.Value)
|
||||
case "markdown":
|
||||
i.injectMarkdownContent(element.Node, contentItem.Value)
|
||||
case "link":
|
||||
i.injectLinkContent(element.Node, contentItem.Value)
|
||||
default:
|
||||
i.injectTextContent(element.Node, contentItem.Value)
|
||||
}
|
||||
|
||||
// Add data attributes for editor functionality
|
||||
i.addContentAttributes(element.Node, contentID, element.Type)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// InjectBulkContent efficiently injects multiple content items
|
||||
func (i *Injector) InjectBulkContent(elements []ElementWithID) error {
|
||||
// Extract content IDs for bulk fetch
|
||||
contentIDs := make([]string, len(elements))
|
||||
for idx, elem := range elements {
|
||||
contentIDs[idx] = elem.ContentID
|
||||
}
|
||||
|
||||
// Bulk fetch content
|
||||
contentMap, err := i.client.GetBulkContent(i.siteID, contentIDs)
|
||||
if err != nil {
|
||||
return fmt.Errorf("bulk fetching content: %w", err)
|
||||
}
|
||||
|
||||
// Inject each element
|
||||
for _, elem := range elements {
|
||||
contentItem, exists := contentMap[elem.ContentID]
|
||||
|
||||
// Add content attributes regardless
|
||||
i.addContentAttributes(elem.Element.Node, elem.ContentID, elem.Element.Type)
|
||||
|
||||
if !exists {
|
||||
// Keep original content if not found in database
|
||||
continue
|
||||
}
|
||||
|
||||
// Replace content based on type
|
||||
switch elem.Element.Type {
|
||||
case "text":
|
||||
i.injectTextContent(elem.Element.Node, contentItem.Value)
|
||||
case "markdown":
|
||||
i.injectMarkdownContent(elem.Element.Node, contentItem.Value)
|
||||
case "link":
|
||||
i.injectLinkContent(elem.Element.Node, contentItem.Value)
|
||||
default:
|
||||
i.injectTextContent(elem.Element.Node, contentItem.Value)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// injectTextContent replaces text content in an element
|
||||
func (i *Injector) injectTextContent(node *html.Node, content string) {
|
||||
// Remove all child nodes
|
||||
for child := node.FirstChild; child != nil; {
|
||||
next := child.NextSibling
|
||||
node.RemoveChild(child)
|
||||
child = next
|
||||
}
|
||||
|
||||
// Add new text content
|
||||
textNode := &html.Node{
|
||||
Type: html.TextNode,
|
||||
Data: content,
|
||||
}
|
||||
node.AppendChild(textNode)
|
||||
}
|
||||
|
||||
// injectMarkdownContent handles markdown content (for now, just as text)
|
||||
func (i *Injector) injectMarkdownContent(node *html.Node, content string) {
|
||||
// For now, treat markdown as text content
|
||||
// TODO: Implement markdown to HTML conversion
|
||||
i.injectTextContent(node, content)
|
||||
}
|
||||
|
||||
// injectLinkContent handles link/button content with URL extraction
|
||||
func (i *Injector) injectLinkContent(node *html.Node, content string) {
|
||||
// For now, just inject the text content
|
||||
// TODO: Parse content for URL and text components
|
||||
i.injectTextContent(node, content)
|
||||
}
|
||||
|
||||
// addContentAttributes adds necessary data attributes and insertr class for editor functionality
|
||||
func (i *Injector) addContentAttributes(node *html.Node, contentID string, contentType string) {
|
||||
i.setAttribute(node, "data-content-id", contentID)
|
||||
i.setAttribute(node, "data-content-type", contentType)
|
||||
i.addClass(node, "insertr")
|
||||
}
|
||||
|
||||
// InjectEditorAssets adds editor JavaScript to HTML document
|
||||
func (i *Injector) InjectEditorAssets(doc *html.Node, isDevelopment bool, libraryScript string) {
|
||||
// TODO: Implement script injection strategy when we have CDN hosting
|
||||
// For now, script injection is disabled since HTML files should include their own script tags
|
||||
// Future options:
|
||||
// 1. Inject CDN script tag: <script src="https://cdn.jsdelivr.net/npm/@insertr/lib@1.0.0/dist/insertr.js"></script>
|
||||
// 2. Inject local script tag for development: <script src="/insertr/insertr.js"></script>
|
||||
// 3. Continue with inline injection for certain use cases
|
||||
|
||||
// Currently disabled to avoid duplicate scripts
|
||||
return
|
||||
}
|
||||
|
||||
// findHeadElement finds the <head> element in the document
|
||||
func (i *Injector) findHeadElement(node *html.Node) *html.Node {
|
||||
if node.Type == html.ElementNode && node.Data == "head" {
|
||||
return node
|
||||
}
|
||||
|
||||
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||
if result := i.findHeadElement(child); result != nil {
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// setAttribute safely sets an attribute on an HTML node
|
||||
func (i *Injector) setAttribute(node *html.Node, key, value string) {
|
||||
// Remove existing attribute if present
|
||||
for idx, attr := range node.Attr {
|
||||
if attr.Key == key {
|
||||
node.Attr = append(node.Attr[:idx], node.Attr[idx+1:]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Add new attribute
|
||||
node.Attr = append(node.Attr, html.Attribute{
|
||||
Key: key,
|
||||
Val: value,
|
||||
})
|
||||
}
|
||||
|
||||
// addClass safely adds a class to an HTML node
|
||||
func (i *Injector) addClass(node *html.Node, className string) {
|
||||
var classAttr *html.Attribute
|
||||
var classIndex int = -1
|
||||
|
||||
// Find existing class attribute
|
||||
for idx, attr := range node.Attr {
|
||||
if attr.Key == "class" {
|
||||
classAttr = &attr
|
||||
classIndex = idx
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
var classes []string
|
||||
if classAttr != nil {
|
||||
classes = strings.Fields(classAttr.Val)
|
||||
}
|
||||
|
||||
// Check if class already exists
|
||||
for _, class := range classes {
|
||||
if class == className {
|
||||
return // Class already exists
|
||||
}
|
||||
}
|
||||
|
||||
// Add new class
|
||||
classes = append(classes, className)
|
||||
newClassValue := strings.Join(classes, " ")
|
||||
|
||||
if classIndex >= 0 {
|
||||
// Update existing class attribute
|
||||
node.Attr[classIndex].Val = newClassValue
|
||||
} else {
|
||||
// Add new class attribute
|
||||
node.Attr = append(node.Attr, html.Attribute{
|
||||
Key: "class",
|
||||
Val: newClassValue,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Element represents a parsed HTML element with metadata
|
||||
type Element struct {
|
||||
Node *html.Node
|
||||
Type string
|
||||
Tag string
|
||||
Classes []string
|
||||
Content string
|
||||
}
|
||||
|
||||
// ElementWithID combines an element with its generated content ID
|
||||
type ElementWithID struct {
|
||||
Element *Element
|
||||
ContentID string
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
package content
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// Embedded library assets
|
||||
//
|
||||
//go:embed assets/insertr.min.js
|
||||
var libraryMinJS string
|
||||
|
||||
//go:embed assets/insertr.js
|
||||
var libraryJS string
|
||||
|
||||
// GetLibraryScript returns the appropriate library version
|
||||
func GetLibraryScript(minified bool) string {
|
||||
if minified {
|
||||
return libraryMinJS
|
||||
}
|
||||
return libraryJS
|
||||
}
|
||||
|
||||
// GetLibraryVersion returns the current embedded library version
|
||||
func GetLibraryVersion() string {
|
||||
return "1.0.0"
|
||||
}
|
||||
|
||||
// GetLibraryURL returns the appropriate library URL for script injection
|
||||
func GetLibraryURL(minified bool, isDevelopment bool) string {
|
||||
if isDevelopment {
|
||||
// Local development URLs - relative to served content
|
||||
if minified {
|
||||
return "/insertr/insertr.min.js"
|
||||
}
|
||||
return "/insertr/insertr.js"
|
||||
}
|
||||
|
||||
// Production URLs - use CDN
|
||||
return GetLibraryCDNURL(minified)
|
||||
}
|
||||
|
||||
// GetLibraryCDNURL returns the CDN URL for production use
|
||||
func GetLibraryCDNURL(minified bool) string {
|
||||
version := GetLibraryVersion()
|
||||
if minified {
|
||||
return fmt.Sprintf("https://cdn.jsdelivr.net/npm/@insertr/lib@%s/dist/insertr.min.js", version)
|
||||
}
|
||||
return fmt.Sprintf("https://cdn.jsdelivr.net/npm/@insertr/lib@%s/dist/insertr.js", version)
|
||||
}
|
||||
@@ -1,138 +0,0 @@
|
||||
package content
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
// MockClient implements ContentClient with mock data for development
|
||||
type MockClient struct {
|
||||
data map[string]ContentItem
|
||||
}
|
||||
|
||||
// NewMockClient creates a new mock content client with sample data
|
||||
func NewMockClient() *MockClient {
|
||||
// Generate realistic mock content based on actual generated IDs
|
||||
data := map[string]ContentItem{
|
||||
// Navigation (index.html has collision suffix)
|
||||
"navbar-logo-2b10ad": {
|
||||
ID: "navbar-logo-2b10ad",
|
||||
SiteID: "demo",
|
||||
Value: "Acme Consulting Solutions",
|
||||
Type: "text",
|
||||
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||
},
|
||||
"navbar-logo-2b10ad-a44bad": {
|
||||
ID: "navbar-logo-2b10ad-a44bad",
|
||||
SiteID: "demo",
|
||||
Value: "Acme Business Advisors",
|
||||
Type: "text",
|
||||
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||
},
|
||||
|
||||
// Hero Section - index.html (updated with actual IDs)
|
||||
"hero-title-7cfeea": {
|
||||
ID: "hero-title-7cfeea",
|
||||
SiteID: "demo",
|
||||
Value: "Transform Your Business with Strategic Expertise",
|
||||
Type: "text",
|
||||
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||
},
|
||||
"hero-lead-e47475": {
|
||||
ID: "hero-lead-e47475",
|
||||
SiteID: "demo",
|
||||
Value: "We help **ambitious businesses** grow through strategic planning, process optimization, and digital transformation. Our team brings 20+ years of experience to accelerate your success.",
|
||||
Type: "markdown",
|
||||
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||
},
|
||||
"hero-link-76c620": {
|
||||
ID: "hero-link-76c620",
|
||||
SiteID: "demo",
|
||||
Value: "Schedule Free Consultation",
|
||||
Type: "link",
|
||||
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||
},
|
||||
|
||||
// Hero Section - about.html
|
||||
"hero-title-c70343": {
|
||||
ID: "hero-title-c70343",
|
||||
SiteID: "demo",
|
||||
Value: "About Our Consulting Expertise",
|
||||
Type: "text",
|
||||
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||
},
|
||||
"hero-lead-673026": {
|
||||
ID: "hero-lead-673026",
|
||||
SiteID: "demo",
|
||||
Value: "We're a team of **experienced consultants** dedicated to helping small businesses thrive in today's competitive marketplace through proven strategies.",
|
||||
Type: "markdown",
|
||||
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||
},
|
||||
|
||||
// Services Section
|
||||
"services-subtitle-c8927c": {
|
||||
ID: "services-subtitle-c8927c",
|
||||
SiteID: "demo",
|
||||
Value: "Our Story",
|
||||
Type: "text",
|
||||
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||
},
|
||||
"services-text-0d96da": {
|
||||
ID: "services-text-0d96da",
|
||||
SiteID: "demo",
|
||||
Value: "**Founded in 2020**, Acme Consulting emerged from a simple observation: small businesses needed access to the same high-quality strategic advice that large corporations receive, but in a format that was accessible, affordable, and actionable.",
|
||||
Type: "markdown",
|
||||
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||
},
|
||||
|
||||
// Default fallback for any missing content
|
||||
"default": {
|
||||
ID: "default",
|
||||
SiteID: "demo",
|
||||
Value: "[Enhanced Content]",
|
||||
Type: "text",
|
||||
UpdatedAt: time.Now().Format(time.RFC3339),
|
||||
},
|
||||
}
|
||||
|
||||
return &MockClient{data: data}
|
||||
}
|
||||
|
||||
// GetContent fetches a single content item by ID
|
||||
func (m *MockClient) GetContent(siteID, contentID string) (*ContentItem, error) {
|
||||
if item, exists := m.data[contentID]; exists && item.SiteID == siteID {
|
||||
return &item, nil
|
||||
}
|
||||
|
||||
// Return nil for missing content - this will preserve original HTML content
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// GetBulkContent fetches multiple content items by IDs
|
||||
func (m *MockClient) GetBulkContent(siteID string, contentIDs []string) (map[string]ContentItem, error) {
|
||||
result := make(map[string]ContentItem)
|
||||
|
||||
for _, id := range contentIDs {
|
||||
item, err := m.GetContent(siteID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if item != nil {
|
||||
result[id] = *item
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// GetAllContent fetches all content for a site
|
||||
func (m *MockClient) GetAllContent(siteID string) (map[string]ContentItem, error) {
|
||||
result := make(map[string]ContentItem)
|
||||
|
||||
for _, item := range m.data {
|
||||
if item.SiteID == siteID {
|
||||
result[item.ID] = item
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
package content
|
||||
|
||||
// ContentItem represents a piece of content from the database
|
||||
type ContentItem struct {
|
||||
ID string `json:"id"`
|
||||
SiteID string `json:"site_id"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
UpdatedAt string `json:"updated_at"`
|
||||
}
|
||||
|
||||
// ContentResponse represents the API response structure
|
||||
type ContentResponse struct {
|
||||
Content []ContentItem `json:"content"`
|
||||
Error string `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
// ContentClient interface for content retrieval
|
||||
type ContentClient interface {
|
||||
// GetContent fetches content by ID
|
||||
GetContent(siteID, contentID string) (*ContentItem, error)
|
||||
|
||||
// GetBulkContent fetches multiple content items by IDs
|
||||
GetBulkContent(siteID string, contentIDs []string) (map[string]ContentItem, error)
|
||||
|
||||
// GetAllContent fetches all content for a site
|
||||
GetAllContent(siteID string) (map[string]ContentItem, error)
|
||||
}
|
||||
@@ -1,167 +0,0 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// IDGenerator generates unique content IDs for elements
|
||||
type IDGenerator struct {
|
||||
usedIDs map[string]bool
|
||||
}
|
||||
|
||||
// NewIDGenerator creates a new ID generator
|
||||
func NewIDGenerator() *IDGenerator {
|
||||
return &IDGenerator{
|
||||
usedIDs: make(map[string]bool),
|
||||
}
|
||||
}
|
||||
|
||||
// Generate creates a content ID for an HTML element
|
||||
func (g *IDGenerator) Generate(node *html.Node) string {
|
||||
context := g.getSemanticContext(node)
|
||||
purpose := g.getPurpose(node)
|
||||
contentHash := g.getContentHash(node)
|
||||
|
||||
baseID := g.createBaseID(context, purpose, contentHash)
|
||||
return g.ensureUnique(baseID)
|
||||
}
|
||||
|
||||
// getSemanticContext determines the semantic context from parent elements
|
||||
func (g *IDGenerator) getSemanticContext(node *html.Node) string {
|
||||
// Walk up the tree to find semantic containers
|
||||
parent := node.Parent
|
||||
for parent != nil && parent.Type == html.ElementNode {
|
||||
classes := getClasses(parent)
|
||||
|
||||
// Check for common semantic section classes
|
||||
for _, class := range []string{"hero", "services", "nav", "navbar", "footer", "about", "contact", "testimonial"} {
|
||||
if containsClass(classes, class) {
|
||||
return class
|
||||
}
|
||||
}
|
||||
|
||||
// Check for semantic HTML elements
|
||||
switch parent.Data {
|
||||
case "nav":
|
||||
return "nav"
|
||||
case "header":
|
||||
return "header"
|
||||
case "footer":
|
||||
return "footer"
|
||||
case "main":
|
||||
return "main"
|
||||
case "aside":
|
||||
return "aside"
|
||||
}
|
||||
|
||||
parent = parent.Parent
|
||||
}
|
||||
|
||||
return "content"
|
||||
}
|
||||
|
||||
// getPurpose determines the purpose/role of the element
|
||||
func (g *IDGenerator) getPurpose(node *html.Node) string {
|
||||
tag := strings.ToLower(node.Data)
|
||||
classes := getClasses(node)
|
||||
|
||||
// Check for specific CSS classes that indicate purpose
|
||||
for _, class := range classes {
|
||||
switch {
|
||||
case strings.Contains(class, "title"):
|
||||
return "title"
|
||||
case strings.Contains(class, "headline"):
|
||||
return "headline"
|
||||
case strings.Contains(class, "description"):
|
||||
return "description"
|
||||
case strings.Contains(class, "subtitle"):
|
||||
return "subtitle"
|
||||
case strings.Contains(class, "cta"):
|
||||
return "cta"
|
||||
case strings.Contains(class, "button"):
|
||||
return "button"
|
||||
case strings.Contains(class, "logo"):
|
||||
return "logo"
|
||||
case strings.Contains(class, "lead"):
|
||||
return "lead"
|
||||
}
|
||||
}
|
||||
|
||||
// Infer purpose from HTML tag
|
||||
switch tag {
|
||||
case "h1":
|
||||
return "title"
|
||||
case "h2":
|
||||
return "subtitle"
|
||||
case "h3", "h4", "h5", "h6":
|
||||
return "heading"
|
||||
case "p":
|
||||
return "text"
|
||||
case "a":
|
||||
return "link"
|
||||
case "button":
|
||||
return "button"
|
||||
default:
|
||||
return "content"
|
||||
}
|
||||
}
|
||||
|
||||
// getContentHash creates a short hash of the content for ID generation
|
||||
func (g *IDGenerator) getContentHash(node *html.Node) string {
|
||||
text := extractTextContent(node)
|
||||
|
||||
// Create hash of the text content
|
||||
hash := fmt.Sprintf("%x", sha1.Sum([]byte(text)))
|
||||
|
||||
// Return first 6 characters for brevity
|
||||
return hash[:6]
|
||||
}
|
||||
|
||||
// createBaseID creates the base ID from components
|
||||
func (g *IDGenerator) createBaseID(context, purpose, contentHash string) string {
|
||||
parts := []string{}
|
||||
|
||||
// Add context if meaningful
|
||||
if context != "content" {
|
||||
parts = append(parts, context)
|
||||
}
|
||||
|
||||
// Add purpose
|
||||
parts = append(parts, purpose)
|
||||
|
||||
// Always add content hash for uniqueness
|
||||
parts = append(parts, contentHash)
|
||||
|
||||
baseID := strings.Join(parts, "-")
|
||||
|
||||
// Clean up the ID
|
||||
baseID = regexp.MustCompile(`-+`).ReplaceAllString(baseID, "-")
|
||||
baseID = strings.Trim(baseID, "-")
|
||||
|
||||
// Ensure it's not empty
|
||||
if baseID == "" {
|
||||
baseID = fmt.Sprintf("content-%s", contentHash)
|
||||
}
|
||||
|
||||
return baseID
|
||||
}
|
||||
|
||||
// ensureUnique makes sure the ID is unique by adding a suffix if needed
|
||||
func (g *IDGenerator) ensureUnique(baseID string) string {
|
||||
if !g.usedIDs[baseID] {
|
||||
g.usedIDs[baseID] = true
|
||||
return baseID
|
||||
}
|
||||
|
||||
// If base ID is taken, add a hash suffix
|
||||
hash := fmt.Sprintf("%x", sha1.Sum([]byte(baseID)))[:6]
|
||||
uniqueID := fmt.Sprintf("%s-%s", baseID, hash)
|
||||
|
||||
g.usedIDs[uniqueID] = true
|
||||
return uniqueID
|
||||
}
|
||||
@@ -1,229 +0,0 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// Parser handles HTML parsing and element detection
|
||||
type Parser struct {
|
||||
idGenerator *IDGenerator
|
||||
}
|
||||
|
||||
// New creates a new Parser instance
|
||||
func New() *Parser {
|
||||
return &Parser{
|
||||
idGenerator: NewIDGenerator(),
|
||||
}
|
||||
}
|
||||
|
||||
// ParseDirectory parses all HTML files in the given directory
|
||||
func (p *Parser) ParseDirectory(dir string) (*ParseResult, error) {
|
||||
result := &ParseResult{
|
||||
Elements: []Element{},
|
||||
Warnings: []string{},
|
||||
Stats: ParseStats{
|
||||
TypeBreakdown: make(map[ContentType]int),
|
||||
},
|
||||
}
|
||||
|
||||
err := filepath.WalkDir(dir, func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Only process HTML files
|
||||
if d.IsDir() || !strings.HasSuffix(strings.ToLower(path), ".html") {
|
||||
return nil
|
||||
}
|
||||
|
||||
elements, warnings, err := p.parseFile(path)
|
||||
if err != nil {
|
||||
result.Warnings = append(result.Warnings,
|
||||
fmt.Sprintf("Error parsing %s: %v", path, err))
|
||||
return nil // Continue processing other files
|
||||
}
|
||||
|
||||
result.Elements = append(result.Elements, elements...)
|
||||
result.Warnings = append(result.Warnings, warnings...)
|
||||
result.Stats.FilesProcessed++
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error walking directory: %w", err)
|
||||
}
|
||||
|
||||
// Calculate statistics
|
||||
p.calculateStats(result)
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// parseFile parses a single HTML file
|
||||
func (p *Parser) parseFile(filePath string) ([]Element, []string, error) {
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("error opening file: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
doc, err := html.Parse(file)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("error parsing HTML: %w", err)
|
||||
}
|
||||
|
||||
var elements []Element
|
||||
var warnings []string
|
||||
|
||||
p.findInsertrElements(doc, filePath, &elements, &warnings)
|
||||
|
||||
return elements, warnings, nil
|
||||
}
|
||||
|
||||
// findInsertrElements recursively finds all elements with "insertr" class
|
||||
func (p *Parser) findInsertrElements(node *html.Node, filePath string, elements *[]Element, warnings *[]string) {
|
||||
if node.Type == html.ElementNode {
|
||||
classes := getClasses(node)
|
||||
|
||||
// Check if element has "insertr" class
|
||||
if containsClass(classes, "insertr") {
|
||||
if isContainer(node) {
|
||||
// Container element - expand to viable children
|
||||
viableChildren := findViableChildren(node)
|
||||
for _, child := range viableChildren {
|
||||
childClasses := getClasses(child)
|
||||
element, warning := p.createElement(child, filePath, childClasses)
|
||||
*elements = append(*elements, element)
|
||||
if warning != "" {
|
||||
*warnings = append(*warnings, warning)
|
||||
}
|
||||
}
|
||||
|
||||
// Don't process children recursively since we've handled the container's children
|
||||
return
|
||||
} else {
|
||||
// Regular element - process as before
|
||||
element, warning := p.createElement(node, filePath, classes)
|
||||
*elements = append(*elements, element)
|
||||
if warning != "" {
|
||||
*warnings = append(*warnings, warning)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Recursively check children
|
||||
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||
p.findInsertrElements(child, filePath, elements, warnings)
|
||||
}
|
||||
}
|
||||
|
||||
// createElement creates an Element from an HTML node
|
||||
func (p *Parser) createElement(node *html.Node, filePath string, classes []string) (Element, string) {
|
||||
var warning string
|
||||
|
||||
// Resolve content ID (existing or generated)
|
||||
contentID, hasExistingID := p.resolveContentID(node)
|
||||
if !hasExistingID {
|
||||
contentID = p.idGenerator.Generate(node)
|
||||
}
|
||||
|
||||
// Detect content type
|
||||
contentType := p.detectContentType(node, classes)
|
||||
|
||||
// Extract text content
|
||||
content := extractTextContent(node)
|
||||
|
||||
element := Element{
|
||||
FilePath: filePath,
|
||||
Node: node,
|
||||
ContentID: contentID,
|
||||
Type: contentType,
|
||||
Tag: strings.ToLower(node.Data),
|
||||
Classes: classes,
|
||||
Content: content,
|
||||
HasID: hasExistingID,
|
||||
Generated: !hasExistingID,
|
||||
}
|
||||
|
||||
// Generate warnings for edge cases
|
||||
if content == "" {
|
||||
warning = fmt.Sprintf("Element <%s> with id '%s' has no text content",
|
||||
element.Tag, element.ContentID)
|
||||
}
|
||||
|
||||
return element, warning
|
||||
}
|
||||
|
||||
// resolveContentID gets the content ID from existing attributes
|
||||
func (p *Parser) resolveContentID(node *html.Node) (string, bool) {
|
||||
// 1. Check for existing HTML id attribute
|
||||
if id := getAttribute(node, "id"); id != "" {
|
||||
return id, true
|
||||
}
|
||||
|
||||
// 2. Check for data-content-id attribute
|
||||
if contentID := getAttribute(node, "data-content-id"); contentID != "" {
|
||||
return contentID, true
|
||||
}
|
||||
|
||||
// 3. No existing ID found
|
||||
return "", false
|
||||
}
|
||||
|
||||
// detectContentType determines the content type based on element and classes
|
||||
func (p *Parser) detectContentType(node *html.Node, classes []string) ContentType {
|
||||
// Check for explicit type classes first
|
||||
if containsClass(classes, "insertr-markdown") {
|
||||
return ContentMarkdown
|
||||
}
|
||||
if containsClass(classes, "insertr-link") {
|
||||
return ContentLink
|
||||
}
|
||||
if containsClass(classes, "insertr-text") {
|
||||
return ContentText
|
||||
}
|
||||
|
||||
// Infer from HTML tag and context
|
||||
tag := strings.ToLower(node.Data)
|
||||
switch tag {
|
||||
case "h1", "h2", "h3", "h4", "h5", "h6":
|
||||
return ContentText
|
||||
case "p":
|
||||
// Paragraphs default to markdown for rich content
|
||||
return ContentMarkdown
|
||||
case "a", "button":
|
||||
return ContentLink
|
||||
case "div", "section":
|
||||
// Default divs/sections to markdown for rich content
|
||||
return ContentMarkdown
|
||||
case "span":
|
||||
return ContentText
|
||||
default:
|
||||
return ContentText
|
||||
}
|
||||
}
|
||||
|
||||
// calculateStats computes statistics for the parse result
|
||||
func (p *Parser) calculateStats(result *ParseResult) {
|
||||
result.Stats.TotalElements = len(result.Elements)
|
||||
|
||||
for _, element := range result.Elements {
|
||||
// Count existing vs generated IDs
|
||||
if element.HasID {
|
||||
result.Stats.ExistingIDs++
|
||||
} else {
|
||||
result.Stats.GeneratedIDs++
|
||||
}
|
||||
|
||||
// Count content types
|
||||
result.Stats.TypeBreakdown[element.Type]++
|
||||
}
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
package parser
|
||||
|
||||
import "golang.org/x/net/html"
|
||||
|
||||
// ContentType represents the type of editable content
|
||||
type ContentType string
|
||||
|
||||
const (
|
||||
ContentText ContentType = "text"
|
||||
ContentMarkdown ContentType = "markdown"
|
||||
ContentLink ContentType = "link"
|
||||
)
|
||||
|
||||
// Element represents a parsed editable element
|
||||
type Element struct {
|
||||
FilePath string `json:"file_path"`
|
||||
Node *html.Node `json:"-"` // Don't serialize HTML node
|
||||
ContentID string `json:"content_id"`
|
||||
Type ContentType `json:"type"`
|
||||
Tag string `json:"tag"`
|
||||
Classes []string `json:"classes"`
|
||||
Content string `json:"content"`
|
||||
HasID bool `json:"has_id"` // Whether element had existing ID
|
||||
Generated bool `json:"generated"` // Whether ID was generated
|
||||
}
|
||||
|
||||
// ParseResult contains the results of parsing HTML files
|
||||
type ParseResult struct {
|
||||
Elements []Element `json:"elements"`
|
||||
Warnings []string `json:"warnings"`
|
||||
Stats ParseStats `json:"stats"`
|
||||
}
|
||||
|
||||
// ParseStats provides statistics about the parsing operation
|
||||
type ParseStats struct {
|
||||
FilesProcessed int `json:"files_processed"`
|
||||
TotalElements int `json:"total_elements"`
|
||||
ExistingIDs int `json:"existing_ids"`
|
||||
GeneratedIDs int `json:"generated_ids"`
|
||||
TypeBreakdown map[ContentType]int `json:"type_breakdown"`
|
||||
}
|
||||
@@ -1,159 +0,0 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// getClasses extracts CSS classes from an HTML node
|
||||
func getClasses(node *html.Node) []string {
|
||||
classAttr := getAttribute(node, "class")
|
||||
if classAttr == "" {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
classes := strings.Fields(classAttr)
|
||||
return classes
|
||||
}
|
||||
|
||||
// containsClass checks if a class list contains a specific class
|
||||
func containsClass(classes []string, target string) bool {
|
||||
for _, class := range classes {
|
||||
if class == target {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// getAttribute gets an attribute value from an HTML node
|
||||
func getAttribute(node *html.Node, key string) string {
|
||||
for _, attr := range node.Attr {
|
||||
if attr.Key == key {
|
||||
return attr.Val
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// extractTextContent gets the text content from an HTML node
|
||||
func extractTextContent(node *html.Node) string {
|
||||
var text strings.Builder
|
||||
extractTextRecursive(node, &text)
|
||||
return strings.TrimSpace(text.String())
|
||||
}
|
||||
|
||||
// extractTextRecursive recursively extracts text from node and children
|
||||
func extractTextRecursive(node *html.Node, text *strings.Builder) {
|
||||
if node.Type == html.TextNode {
|
||||
text.WriteString(node.Data)
|
||||
}
|
||||
|
||||
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||
// Skip script and style elements
|
||||
if child.Type == html.ElementNode &&
|
||||
(child.Data == "script" || child.Data == "style") {
|
||||
continue
|
||||
}
|
||||
extractTextRecursive(child, text)
|
||||
}
|
||||
}
|
||||
|
||||
// hasOnlyTextContent checks if a node contains only text content (no nested HTML elements)
|
||||
func hasOnlyTextContent(node *html.Node) bool {
|
||||
if node.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
||||
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||
switch child.Type {
|
||||
case html.ElementNode:
|
||||
// Found a nested HTML element - not text-only
|
||||
return false
|
||||
case html.TextNode:
|
||||
// Text nodes are fine, continue checking
|
||||
continue
|
||||
default:
|
||||
// Comments, etc. - continue checking
|
||||
continue
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// isContainer checks if a tag is typically used as a container element
|
||||
func isContainer(node *html.Node) bool {
|
||||
if node.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
||||
containerTags := map[string]bool{
|
||||
"div": true,
|
||||
"section": true,
|
||||
"article": true,
|
||||
"header": true,
|
||||
"footer": true,
|
||||
"main": true,
|
||||
"aside": true,
|
||||
"nav": true,
|
||||
}
|
||||
|
||||
return containerTags[node.Data]
|
||||
}
|
||||
|
||||
// findViableChildren finds all child elements that are viable for editing
|
||||
func findViableChildren(node *html.Node) []*html.Node {
|
||||
var viable []*html.Node
|
||||
|
||||
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||
// Skip whitespace-only text nodes
|
||||
if child.Type == html.TextNode {
|
||||
if strings.TrimSpace(child.Data) == "" {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Only consider element nodes
|
||||
if child.Type != html.ElementNode {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip self-closing elements for now
|
||||
if isSelfClosing(child) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if element has only text content
|
||||
if hasOnlyTextContent(child) {
|
||||
viable = append(viable, child)
|
||||
}
|
||||
}
|
||||
|
||||
return viable
|
||||
}
|
||||
|
||||
// isSelfClosing checks if an element is typically self-closing
|
||||
func isSelfClosing(node *html.Node) bool {
|
||||
if node.Type != html.ElementNode {
|
||||
return false
|
||||
}
|
||||
|
||||
selfClosingTags := map[string]bool{
|
||||
"img": true,
|
||||
"input": true,
|
||||
"br": true,
|
||||
"hr": true,
|
||||
"meta": true,
|
||||
"link": true,
|
||||
"area": true,
|
||||
"base": true,
|
||||
"col": true,
|
||||
"embed": true,
|
||||
"source": true,
|
||||
"track": true,
|
||||
"wbr": true,
|
||||
}
|
||||
|
||||
return selfClosingTags[node.Data]
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Rebuild library and copy to CLI assets
|
||||
# Used by Air for hot reloading when library changes
|
||||
|
||||
set -e
|
||||
|
||||
echo "🔄 Library changed, rebuilding..."
|
||||
|
||||
# Build the library (this will also copy to demo-site)
|
||||
cd ../lib
|
||||
npm run build --silent
|
||||
|
||||
# Copy to CLI assets
|
||||
echo "📁 Copying updated library to CLI assets..."
|
||||
cp dist/* ../insertr-cli/pkg/content/assets/
|
||||
|
||||
# Get library version for confirmation
|
||||
VERSION=$(node -e "console.log(require('./package.json').version)")
|
||||
echo "✅ Library v$VERSION ready for CLI embedding"
|
||||
@@ -1,144 +0,0 @@
|
||||
# ⚠️ Deprecated: Insertr Content Server
|
||||
|
||||
> **This standalone server has been replaced by the unified `insertr` binary.**
|
||||
> **Please see the [main README](../README.md) for updated instructions.**
|
||||
|
||||
## 🔄 Migration to Unified Binary
|
||||
|
||||
The Insertr Content Server functionality has been integrated into the unified `insertr` binary. Instead of running a separate server binary, use:
|
||||
|
||||
```bash
|
||||
# Old approach (deprecated)
|
||||
./insertr-server --port 8080
|
||||
|
||||
# New unified approach
|
||||
./insertr serve --port 8080 --dev-mode
|
||||
```
|
||||
|
||||
## ✅ All Features Preserved
|
||||
|
||||
The unified binary includes all server functionality:
|
||||
|
||||
- **Content Management**: Full CRUD operations for content items
|
||||
- **Version Control**: Complete edit history with rollback functionality
|
||||
- **User Attribution**: Track who made each change
|
||||
- **Type-Safe Database**: Uses sqlc for generated Go code from SQL
|
||||
- **SQLite & PostgreSQL**: Database flexibility for development to production
|
||||
|
||||
## 🚀 Updated API Endpoints
|
||||
|
||||
### Content Operations (Unchanged)
|
||||
- `GET /api/content?site_id={site}` - Get all content for a site
|
||||
- `GET /api/content/{id}?site_id={site}` - Get single content item
|
||||
- `GET /api/content/bulk?site_id={site}&ids[]={id1}&ids[]={id2}` - Get multiple content items
|
||||
- `POST /api/content` - Create new content
|
||||
- `PUT /api/content/{id}?site_id={site}` - Update existing content
|
||||
- `DELETE /api/content/{id}?site_id={site}` - Delete content
|
||||
|
||||
### Version Control (Unchanged)
|
||||
- `GET /api/content/{id}/versions?site_id={site}` - Get version history
|
||||
- `POST /api/content/{id}/rollback?site_id={site}` - Rollback to specific version
|
||||
|
||||
### Health & Status (Unchanged)
|
||||
- `GET /health` - Server health check
|
||||
|
||||
## 🚀 New Quick Start (Unified Binary)
|
||||
|
||||
```bash
|
||||
# Build unified binary (from project root)
|
||||
go build -o insertr .
|
||||
|
||||
# Start server with development mode
|
||||
./insertr serve --dev-mode --port 8080
|
||||
|
||||
# Start production server
|
||||
./insertr serve --port 8080 --db "postgresql://user:pass@host/db"
|
||||
|
||||
# Check health (same endpoint)
|
||||
curl http://localhost:8080/health
|
||||
```
|
||||
|
||||
## User Attribution (Unchanged)
|
||||
|
||||
All content operations still support user attribution via the `X-User-ID` header:
|
||||
|
||||
```bash
|
||||
curl -X PUT "http://localhost:8080/api/content/hero-title?site_id=demo" \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "X-User-ID: john@example.com" \
|
||||
-d '{"value": "Updated content"}'
|
||||
```
|
||||
|
||||
## 🛠️ Development (Updated for Unified Binary)
|
||||
|
||||
### Using sqlc (From Project Root)
|
||||
|
||||
```bash
|
||||
# Install sqlc
|
||||
go install github.com/sqlc-dev/sqlc/cmd/sqlc@latest
|
||||
|
||||
# Generate Go code from SQL (from project root)
|
||||
sqlc generate
|
||||
|
||||
# Build unified binary
|
||||
go build -o insertr .
|
||||
|
||||
# Development with hot reload
|
||||
just dev # Full-stack development
|
||||
air # Hot reload unified binary only
|
||||
```
|
||||
|
||||
### Database Schema (Location Updated)
|
||||
|
||||
See `db/sqlite/schema.sql` and `db/postgresql/schema.sql` for database-specific schemas. Key tables:
|
||||
|
||||
- `content` - Current content versions
|
||||
- `content_versions` - Complete version history
|
||||
|
||||
### Example Version Control Workflow
|
||||
|
||||
```bash
|
||||
# Create content
|
||||
curl -X POST "http://localhost:8080/api/content" \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "X-User-ID: alice@example.com" \
|
||||
-d '{
|
||||
"id": "hero-title",
|
||||
"site_id": "demo",
|
||||
"value": "Original Title",
|
||||
"type": "text"
|
||||
}'
|
||||
|
||||
# Update content (creates version)
|
||||
curl -X PUT "http://localhost:8080/api/content/hero-title?site_id=demo" \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "X-User-ID: bob@example.com" \
|
||||
-d '{"value": "Updated Title"}'
|
||||
|
||||
# View version history
|
||||
curl "http://localhost:8080/api/content/hero-title/versions?site_id=demo"
|
||||
|
||||
# Rollback to version 1
|
||||
curl -X POST "http://localhost:8080/api/content/hero-title/rollback?site_id=demo" \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "X-User-ID: admin@example.com" \
|
||||
-d '{"version_id": 1}'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📖 For Complete Documentation
|
||||
|
||||
**➡️ See the [main README](../README.md) for:**
|
||||
- Unified binary installation and usage
|
||||
- Complete configuration options (YAML, environment variables, CLI flags)
|
||||
- Development workflow with `just dev`
|
||||
- Production deployment guidance
|
||||
- Full architecture documentation
|
||||
|
||||
**➡️ See [INTEGRATION-SUMMARY.md](../INTEGRATION-SUMMARY.md) for:**
|
||||
- Technical architecture details
|
||||
- Database schema information
|
||||
- API integration examples
|
||||
|
||||
The unified `insertr` binary provides all server functionality with improved developer experience and simplified deployment.
|
||||
@@ -1,107 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/insertr/server/internal/api"
|
||||
"github.com/insertr/server/internal/db"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Command line flags
|
||||
var (
|
||||
port = flag.Int("port", 8080, "Server port")
|
||||
dbPath = flag.String("db", "./insertr.db", "SQLite database path")
|
||||
)
|
||||
flag.Parse()
|
||||
|
||||
// Initialize database
|
||||
database, err := db.NewDatabase(*dbPath)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to initialize database: %v", err)
|
||||
}
|
||||
defer database.Close()
|
||||
|
||||
// Initialize handlers
|
||||
contentHandler := api.NewContentHandler(database)
|
||||
|
||||
// Setup router
|
||||
router := mux.NewRouter()
|
||||
|
||||
// Add middleware
|
||||
router.Use(api.CORSMiddleware)
|
||||
router.Use(api.LoggingMiddleware)
|
||||
router.Use(api.ContentTypeMiddleware)
|
||||
|
||||
// Health check endpoint
|
||||
router.HandleFunc("/health", api.HealthMiddleware())
|
||||
|
||||
// API routes
|
||||
apiRouter := router.PathPrefix("/api/content").Subrouter()
|
||||
|
||||
// Content endpoints matching the expected API contract
|
||||
apiRouter.HandleFunc("/bulk", contentHandler.GetBulkContent).Methods("GET")
|
||||
apiRouter.HandleFunc("/{id}/versions", contentHandler.GetContentVersions).Methods("GET")
|
||||
apiRouter.HandleFunc("/{id}/rollback", contentHandler.RollbackContent).Methods("POST")
|
||||
apiRouter.HandleFunc("/{id}", contentHandler.GetContent).Methods("GET")
|
||||
apiRouter.HandleFunc("/{id}", contentHandler.UpdateContent).Methods("PUT")
|
||||
apiRouter.HandleFunc("/{id}", contentHandler.DeleteContent).Methods("DELETE")
|
||||
apiRouter.HandleFunc("", contentHandler.GetAllContent).Methods("GET")
|
||||
apiRouter.HandleFunc("", contentHandler.CreateContent).Methods("POST")
|
||||
|
||||
// Handle CORS preflight requests explicitly
|
||||
apiRouter.HandleFunc("/{id}/versions", api.CORSPreflightHandler).Methods("OPTIONS")
|
||||
apiRouter.HandleFunc("/{id}/rollback", api.CORSPreflightHandler).Methods("OPTIONS")
|
||||
apiRouter.HandleFunc("/{id}", api.CORSPreflightHandler).Methods("OPTIONS")
|
||||
apiRouter.HandleFunc("", api.CORSPreflightHandler).Methods("OPTIONS")
|
||||
apiRouter.HandleFunc("/bulk", api.CORSPreflightHandler).Methods("OPTIONS")
|
||||
|
||||
// Start server
|
||||
addr := fmt.Sprintf(":%d", *port)
|
||||
fmt.Printf("🚀 Insertr Content Server starting...\n")
|
||||
fmt.Printf("📁 Database: %s\n", *dbPath)
|
||||
fmt.Printf("🌐 Server running at: http://localhost%s\n", addr)
|
||||
fmt.Printf("💚 Health check: http://localhost%s/health\n", addr)
|
||||
fmt.Printf("📊 API endpoints:\n")
|
||||
fmt.Printf(" GET /api/content?site_id={site}\n")
|
||||
fmt.Printf(" GET /api/content/{id}?site_id={site}\n")
|
||||
fmt.Printf(" GET /api/content/bulk?site_id={site}&ids[]={id1}&ids[]={id2}\n")
|
||||
fmt.Printf(" GET /api/content/{id}/versions?site_id={site}\n")
|
||||
fmt.Printf(" POST /api/content\n")
|
||||
fmt.Printf(" PUT /api/content/{id}\n")
|
||||
fmt.Printf(" POST /api/content/{id}/rollback\n")
|
||||
fmt.Printf(" DELETE /api/content/{id}?site_id={site}\n")
|
||||
fmt.Printf("\n🔄 Press Ctrl+C to shutdown gracefully\n\n")
|
||||
|
||||
// Setup graceful shutdown
|
||||
server := &http.Server{
|
||||
Addr: addr,
|
||||
Handler: router,
|
||||
}
|
||||
|
||||
// Start server in a goroutine
|
||||
go func() {
|
||||
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||
log.Fatalf("Server failed to start: %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
// Wait for interrupt signal
|
||||
quit := make(chan os.Signal, 1)
|
||||
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||
<-quit
|
||||
|
||||
fmt.Println("\n🛑 Shutting down server...")
|
||||
if err := server.Close(); err != nil {
|
||||
log.Fatalf("Server forced to shutdown: %v", err)
|
||||
}
|
||||
|
||||
fmt.Println("✅ Server shutdown complete")
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
-- PostgreSQL-specific schema with BIGINT UNIX timestamps
|
||||
-- Main content table (current versions only)
|
||||
CREATE TABLE content (
|
||||
id TEXT NOT NULL,
|
||||
site_id TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
type TEXT NOT NULL CHECK (type IN ('text', 'markdown', 'link')),
|
||||
created_at BIGINT DEFAULT EXTRACT(EPOCH FROM NOW()) NOT NULL,
|
||||
updated_at BIGINT DEFAULT EXTRACT(EPOCH FROM NOW()) NOT NULL,
|
||||
last_edited_by TEXT DEFAULT 'system' NOT NULL,
|
||||
PRIMARY KEY (id, site_id)
|
||||
);
|
||||
|
||||
-- Version history table for rollback functionality
|
||||
CREATE TABLE content_versions (
|
||||
version_id SERIAL PRIMARY KEY,
|
||||
content_id TEXT NOT NULL,
|
||||
site_id TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
created_at BIGINT DEFAULT EXTRACT(EPOCH FROM NOW()) NOT NULL,
|
||||
created_by TEXT DEFAULT 'system' NOT NULL
|
||||
);
|
||||
|
||||
-- Indexes for performance
|
||||
CREATE INDEX IF NOT EXISTS idx_content_site_id ON content(site_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_content_updated_at ON content(updated_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_content_versions_lookup ON content_versions(content_id, site_id, created_at DESC);
|
||||
|
||||
-- Function and trigger to automatically update updated_at timestamp
|
||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = EXTRACT(EPOCH FROM NOW());
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ language 'plpgsql';
|
||||
|
||||
CREATE TRIGGER update_content_updated_at
|
||||
BEFORE UPDATE ON content
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_updated_at_column();
|
||||
@@ -1,47 +0,0 @@
|
||||
-- name: InitializeSchema :exec
|
||||
CREATE TABLE IF NOT EXISTS content (
|
||||
id TEXT NOT NULL,
|
||||
site_id TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
type TEXT NOT NULL CHECK (type IN ('text', 'markdown', 'link')),
|
||||
created_at BIGINT DEFAULT (EXTRACT(EPOCH FROM NOW())) NOT NULL,
|
||||
updated_at BIGINT DEFAULT (EXTRACT(EPOCH FROM NOW())) NOT NULL,
|
||||
last_edited_by TEXT DEFAULT 'system' NOT NULL,
|
||||
PRIMARY KEY (id, site_id)
|
||||
);
|
||||
|
||||
-- name: InitializeVersionsTable :exec
|
||||
CREATE TABLE IF NOT EXISTS content_versions (
|
||||
version_id SERIAL PRIMARY KEY,
|
||||
content_id TEXT NOT NULL,
|
||||
site_id TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
created_at BIGINT DEFAULT (EXTRACT(EPOCH FROM NOW())) NOT NULL,
|
||||
created_by TEXT DEFAULT 'system' NOT NULL
|
||||
);
|
||||
|
||||
-- name: CreateContentSiteIndex :exec
|
||||
CREATE INDEX IF NOT EXISTS idx_content_site_id ON content(site_id);
|
||||
|
||||
-- name: CreateContentUpdatedAtIndex :exec
|
||||
CREATE INDEX IF NOT EXISTS idx_content_updated_at ON content(updated_at);
|
||||
|
||||
-- name: CreateVersionsLookupIndex :exec
|
||||
CREATE INDEX IF NOT EXISTS idx_content_versions_lookup ON content_versions(content_id, site_id, created_at DESC);
|
||||
|
||||
-- name: CreateUpdateFunction :exec
|
||||
CREATE OR REPLACE FUNCTION update_content_timestamp()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = EXTRACT(EPOCH FROM NOW());
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- name: CreateUpdateTrigger :exec
|
||||
DROP TRIGGER IF EXISTS update_content_updated_at ON content;
|
||||
CREATE TRIGGER update_content_updated_at
|
||||
BEFORE UPDATE ON content
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_content_timestamp();
|
||||
@@ -1,30 +0,0 @@
|
||||
-- name: GetContent :one
|
||||
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||
FROM content
|
||||
WHERE id = sqlc.arg(id) AND site_id = sqlc.arg(site_id);
|
||||
|
||||
-- name: GetAllContent :many
|
||||
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||
FROM content
|
||||
WHERE site_id = sqlc.arg(site_id)
|
||||
ORDER BY updated_at DESC;
|
||||
|
||||
-- name: GetBulkContent :many
|
||||
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||
FROM content
|
||||
WHERE site_id = sqlc.arg(site_id) AND id IN (sqlc.slice('ids'));
|
||||
|
||||
-- name: CreateContent :one
|
||||
INSERT INTO content (id, site_id, value, type, last_edited_by)
|
||||
VALUES (sqlc.arg(id), sqlc.arg(site_id), sqlc.arg(value), sqlc.arg(type), sqlc.arg(last_edited_by))
|
||||
RETURNING id, site_id, value, type, created_at, updated_at, last_edited_by;
|
||||
|
||||
-- name: UpdateContent :one
|
||||
UPDATE content
|
||||
SET value = sqlc.arg(value), type = sqlc.arg(type), last_edited_by = sqlc.arg(last_edited_by)
|
||||
WHERE id = sqlc.arg(id) AND site_id = sqlc.arg(site_id)
|
||||
RETURNING id, site_id, value, type, created_at, updated_at, last_edited_by;
|
||||
|
||||
-- name: DeleteContent :exec
|
||||
DELETE FROM content
|
||||
WHERE id = sqlc.arg(id) AND site_id = sqlc.arg(site_id);
|
||||
@@ -1,29 +0,0 @@
|
||||
-- name: CreateContentVersion :exec
|
||||
INSERT INTO content_versions (content_id, site_id, value, type, created_by)
|
||||
VALUES (sqlc.arg(content_id), sqlc.arg(site_id), sqlc.arg(value), sqlc.arg(type), sqlc.arg(created_by));
|
||||
|
||||
-- name: GetContentVersionHistory :many
|
||||
SELECT version_id, content_id, site_id, value, type, created_at, created_by
|
||||
FROM content_versions
|
||||
WHERE content_id = sqlc.arg(content_id) AND site_id = sqlc.arg(site_id)
|
||||
ORDER BY created_at DESC
|
||||
LIMIT sqlc.arg(limit_count);
|
||||
|
||||
-- name: GetContentVersion :one
|
||||
SELECT version_id, content_id, site_id, value, type, created_at, created_by
|
||||
FROM content_versions
|
||||
WHERE version_id = sqlc.arg(version_id);
|
||||
|
||||
-- name: GetAllVersionsForSite :many
|
||||
SELECT
|
||||
cv.version_id, cv.content_id, cv.site_id, cv.value, cv.type, cv.created_at, cv.created_by,
|
||||
c.value as current_value
|
||||
FROM content_versions cv
|
||||
LEFT JOIN content c ON cv.content_id = c.id AND cv.site_id = c.site_id
|
||||
WHERE cv.site_id = sqlc.arg(site_id)
|
||||
ORDER BY cv.created_at DESC
|
||||
LIMIT sqlc.arg(limit_count);
|
||||
|
||||
-- name: DeleteOldVersions :exec
|
||||
DELETE FROM content_versions
|
||||
WHERE created_at < sqlc.arg(created_before) AND site_id = sqlc.arg(site_id);
|
||||
@@ -1,36 +0,0 @@
|
||||
-- SQLite-specific schema with INTEGER timestamps
|
||||
-- Main content table (current versions only)
|
||||
CREATE TABLE content (
|
||||
id TEXT NOT NULL,
|
||||
site_id TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
type TEXT NOT NULL CHECK (type IN ('text', 'markdown', 'link')),
|
||||
created_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||
updated_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||
last_edited_by TEXT DEFAULT 'system' NOT NULL,
|
||||
PRIMARY KEY (id, site_id)
|
||||
);
|
||||
|
||||
-- Version history table for rollback functionality
|
||||
CREATE TABLE content_versions (
|
||||
version_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
content_id TEXT NOT NULL,
|
||||
site_id TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
created_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||
created_by TEXT DEFAULT 'system' NOT NULL
|
||||
);
|
||||
|
||||
-- Indexes for performance
|
||||
CREATE INDEX IF NOT EXISTS idx_content_site_id ON content(site_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_content_updated_at ON content(updated_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_content_versions_lookup ON content_versions(content_id, site_id, created_at DESC);
|
||||
|
||||
-- Trigger to automatically update updated_at timestamp
|
||||
CREATE TRIGGER IF NOT EXISTS update_content_updated_at
|
||||
AFTER UPDATE ON content
|
||||
FOR EACH ROW
|
||||
BEGIN
|
||||
UPDATE content SET updated_at = strftime('%s', 'now') WHERE id = NEW.id AND site_id = NEW.site_id;
|
||||
END;
|
||||
@@ -1,39 +0,0 @@
|
||||
-- name: InitializeSchema :exec
|
||||
CREATE TABLE IF NOT EXISTS content (
|
||||
id TEXT NOT NULL,
|
||||
site_id TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
type TEXT NOT NULL CHECK (type IN ('text', 'markdown', 'link')),
|
||||
created_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||
updated_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||
last_edited_by TEXT DEFAULT 'system' NOT NULL,
|
||||
PRIMARY KEY (id, site_id)
|
||||
);
|
||||
|
||||
-- name: InitializeVersionsTable :exec
|
||||
CREATE TABLE IF NOT EXISTS content_versions (
|
||||
version_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
content_id TEXT NOT NULL,
|
||||
site_id TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
created_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||
created_by TEXT DEFAULT 'system' NOT NULL
|
||||
);
|
||||
|
||||
-- name: CreateContentSiteIndex :exec
|
||||
CREATE INDEX IF NOT EXISTS idx_content_site_id ON content(site_id);
|
||||
|
||||
-- name: CreateContentUpdatedAtIndex :exec
|
||||
CREATE INDEX IF NOT EXISTS idx_content_updated_at ON content(updated_at);
|
||||
|
||||
-- name: CreateVersionsLookupIndex :exec
|
||||
CREATE INDEX IF NOT EXISTS idx_content_versions_lookup ON content_versions(content_id, site_id, created_at DESC);
|
||||
|
||||
-- name: CreateUpdateTrigger :exec
|
||||
CREATE TRIGGER IF NOT EXISTS update_content_updated_at
|
||||
AFTER UPDATE ON content
|
||||
FOR EACH ROW
|
||||
BEGIN
|
||||
UPDATE content SET updated_at = strftime('%s', 'now') WHERE id = NEW.id AND site_id = NEW.site_id;
|
||||
END;
|
||||
@@ -1,10 +0,0 @@
|
||||
module github.com/insertr/server
|
||||
|
||||
go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/mattn/go-sqlite3 v1.14.32
|
||||
)
|
||||
|
||||
require github.com/lib/pq v1.10.9 // indirect
|
||||
@@ -1,6 +0,0 @@
|
||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs=
|
||||
github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
Binary file not shown.
@@ -1,668 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/insertr/server/internal/db"
|
||||
"github.com/insertr/server/internal/db/postgresql"
|
||||
"github.com/insertr/server/internal/db/sqlite"
|
||||
)
|
||||
|
||||
// ContentHandler handles all content-related HTTP requests
|
||||
type ContentHandler struct {
|
||||
database *db.Database
|
||||
}
|
||||
|
||||
// NewContentHandler creates a new content handler
|
||||
func NewContentHandler(database *db.Database) *ContentHandler {
|
||||
return &ContentHandler{
|
||||
database: database,
|
||||
}
|
||||
}
|
||||
|
||||
// GetContent handles GET /api/content/{id}
|
||||
func (h *ContentHandler) GetContent(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
contentID := vars["id"]
|
||||
siteID := r.URL.Query().Get("site_id")
|
||||
|
||||
if siteID == "" {
|
||||
http.Error(w, "site_id parameter is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var content interface{}
|
||||
var err error
|
||||
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
content, err = h.database.GetSQLiteQueries().GetContent(context.Background(), sqlite.GetContentParams{
|
||||
ID: contentID,
|
||||
SiteID: siteID,
|
||||
})
|
||||
case "postgresql":
|
||||
content, err = h.database.GetPostgreSQLQueries().GetContent(context.Background(), postgresql.GetContentParams{
|
||||
ID: contentID,
|
||||
SiteID: siteID,
|
||||
})
|
||||
default:
|
||||
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
http.Error(w, "Content not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
http.Error(w, fmt.Sprintf("Database error: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
item := h.convertToAPIContent(content)
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(item)
|
||||
}
|
||||
|
||||
// GetAllContent handles GET /api/content
|
||||
func (h *ContentHandler) GetAllContent(w http.ResponseWriter, r *http.Request) {
|
||||
siteID := r.URL.Query().Get("site_id")
|
||||
if siteID == "" {
|
||||
http.Error(w, "site_id parameter is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var dbContent interface{}
|
||||
var err error
|
||||
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
dbContent, err = h.database.GetSQLiteQueries().GetAllContent(context.Background(), siteID)
|
||||
case "postgresql":
|
||||
dbContent, err = h.database.GetPostgreSQLQueries().GetAllContent(context.Background(), siteID)
|
||||
default:
|
||||
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Database error: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
items := h.convertToAPIContentList(dbContent)
|
||||
response := ContentResponse{Content: items}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(response)
|
||||
}
|
||||
|
||||
// GetBulkContent handles GET /api/content/bulk
|
||||
func (h *ContentHandler) GetBulkContent(w http.ResponseWriter, r *http.Request) {
|
||||
siteID := r.URL.Query().Get("site_id")
|
||||
if siteID == "" {
|
||||
http.Error(w, "site_id parameter is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Parse ids parameter
|
||||
idsParam := r.URL.Query()["ids[]"]
|
||||
if len(idsParam) == 0 {
|
||||
// Try single ids parameter
|
||||
idsStr := r.URL.Query().Get("ids")
|
||||
if idsStr == "" {
|
||||
http.Error(w, "ids parameter is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
idsParam = strings.Split(idsStr, ",")
|
||||
}
|
||||
|
||||
var dbContent interface{}
|
||||
var err error
|
||||
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
dbContent, err = h.database.GetSQLiteQueries().GetBulkContent(context.Background(), sqlite.GetBulkContentParams{
|
||||
SiteID: siteID,
|
||||
Ids: idsParam,
|
||||
})
|
||||
case "postgresql":
|
||||
dbContent, err = h.database.GetPostgreSQLQueries().GetBulkContent(context.Background(), postgresql.GetBulkContentParams{
|
||||
SiteID: siteID,
|
||||
Ids: idsParam,
|
||||
})
|
||||
default:
|
||||
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Database error: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
items := h.convertToAPIContentList(dbContent)
|
||||
response := ContentResponse{Content: items}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(response)
|
||||
}
|
||||
|
||||
// CreateContent handles POST /api/content
|
||||
func (h *ContentHandler) CreateContent(w http.ResponseWriter, r *http.Request) {
|
||||
var req CreateContentRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
http.Error(w, "Invalid JSON", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
siteID := r.URL.Query().Get("site_id")
|
||||
if siteID == "" {
|
||||
siteID = req.SiteID // fallback to request body
|
||||
}
|
||||
if siteID == "" {
|
||||
siteID = "default" // final fallback
|
||||
}
|
||||
|
||||
// Extract user from request (for now, use X-User-ID header or fallback)
|
||||
userID := r.Header.Get("X-User-ID")
|
||||
if userID == "" && req.CreatedBy != "" {
|
||||
userID = req.CreatedBy
|
||||
}
|
||||
if userID == "" {
|
||||
userID = "anonymous"
|
||||
}
|
||||
|
||||
var content interface{}
|
||||
var err error
|
||||
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
content, err = h.database.GetSQLiteQueries().CreateContent(context.Background(), sqlite.CreateContentParams{
|
||||
ID: req.ID,
|
||||
SiteID: siteID,
|
||||
Value: req.Value,
|
||||
Type: req.Type,
|
||||
LastEditedBy: userID,
|
||||
})
|
||||
case "postgresql":
|
||||
content, err = h.database.GetPostgreSQLQueries().CreateContent(context.Background(), postgresql.CreateContentParams{
|
||||
ID: req.ID,
|
||||
SiteID: siteID,
|
||||
Value: req.Value,
|
||||
Type: req.Type,
|
||||
LastEditedBy: userID,
|
||||
})
|
||||
default:
|
||||
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Failed to create content: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
item := h.convertToAPIContent(content)
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusCreated)
|
||||
json.NewEncoder(w).Encode(item)
|
||||
}
|
||||
|
||||
// UpdateContent handles PUT /api/content/{id}
|
||||
func (h *ContentHandler) UpdateContent(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
contentID := vars["id"]
|
||||
siteID := r.URL.Query().Get("site_id")
|
||||
|
||||
if siteID == "" {
|
||||
http.Error(w, "site_id parameter is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var req UpdateContentRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
http.Error(w, "Invalid JSON", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Extract user from request
|
||||
userID := r.Header.Get("X-User-ID")
|
||||
if userID == "" && req.UpdatedBy != "" {
|
||||
userID = req.UpdatedBy
|
||||
}
|
||||
if userID == "" {
|
||||
userID = "anonymous"
|
||||
}
|
||||
|
||||
// Get current content for version history and type preservation
|
||||
var currentContent interface{}
|
||||
var err error
|
||||
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
currentContent, err = h.database.GetSQLiteQueries().GetContent(context.Background(), sqlite.GetContentParams{
|
||||
ID: contentID,
|
||||
SiteID: siteID,
|
||||
})
|
||||
case "postgresql":
|
||||
currentContent, err = h.database.GetPostgreSQLQueries().GetContent(context.Background(), postgresql.GetContentParams{
|
||||
ID: contentID,
|
||||
SiteID: siteID,
|
||||
})
|
||||
default:
|
||||
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
http.Error(w, "Content not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
http.Error(w, fmt.Sprintf("Database error: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Archive current version before updating
|
||||
err = h.createContentVersion(currentContent)
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Failed to create version: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Determine content type
|
||||
contentType := req.Type
|
||||
if contentType == "" {
|
||||
contentType = h.getContentType(currentContent) // preserve existing type if not specified
|
||||
}
|
||||
|
||||
// Update the content
|
||||
var updatedContent interface{}
|
||||
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
updatedContent, err = h.database.GetSQLiteQueries().UpdateContent(context.Background(), sqlite.UpdateContentParams{
|
||||
Value: req.Value,
|
||||
Type: contentType,
|
||||
LastEditedBy: userID,
|
||||
ID: contentID,
|
||||
SiteID: siteID,
|
||||
})
|
||||
case "postgresql":
|
||||
updatedContent, err = h.database.GetPostgreSQLQueries().UpdateContent(context.Background(), postgresql.UpdateContentParams{
|
||||
Value: req.Value,
|
||||
Type: contentType,
|
||||
LastEditedBy: userID,
|
||||
ID: contentID,
|
||||
SiteID: siteID,
|
||||
})
|
||||
default:
|
||||
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Failed to update content: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
item := h.convertToAPIContent(updatedContent)
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(item)
|
||||
}
|
||||
|
||||
// DeleteContent handles DELETE /api/content/{id}
|
||||
func (h *ContentHandler) DeleteContent(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
contentID := vars["id"]
|
||||
siteID := r.URL.Query().Get("site_id")
|
||||
|
||||
if siteID == "" {
|
||||
http.Error(w, "site_id parameter is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
err = h.database.GetSQLiteQueries().DeleteContent(context.Background(), sqlite.DeleteContentParams{
|
||||
ID: contentID,
|
||||
SiteID: siteID,
|
||||
})
|
||||
case "postgresql":
|
||||
err = h.database.GetPostgreSQLQueries().DeleteContent(context.Background(), postgresql.DeleteContentParams{
|
||||
ID: contentID,
|
||||
SiteID: siteID,
|
||||
})
|
||||
default:
|
||||
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Failed to delete content: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusNoContent)
|
||||
}
|
||||
|
||||
// GetContentVersions handles GET /api/content/{id}/versions
|
||||
func (h *ContentHandler) GetContentVersions(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
contentID := vars["id"]
|
||||
siteID := r.URL.Query().Get("site_id")
|
||||
|
||||
if siteID == "" {
|
||||
http.Error(w, "site_id parameter is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Parse limit parameter (default to 10)
|
||||
limit := int64(10)
|
||||
if limitStr := r.URL.Query().Get("limit"); limitStr != "" {
|
||||
if parsedLimit, err := strconv.ParseInt(limitStr, 10, 64); err == nil {
|
||||
limit = parsedLimit
|
||||
}
|
||||
}
|
||||
|
||||
var dbVersions interface{}
|
||||
var err error
|
||||
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
dbVersions, err = h.database.GetSQLiteQueries().GetContentVersionHistory(context.Background(), sqlite.GetContentVersionHistoryParams{
|
||||
ContentID: contentID,
|
||||
SiteID: siteID,
|
||||
LimitCount: limit,
|
||||
})
|
||||
case "postgresql":
|
||||
// Note: PostgreSQL uses different parameter names due to int32 vs int64
|
||||
dbVersions, err = h.database.GetPostgreSQLQueries().GetContentVersionHistory(context.Background(), postgresql.GetContentVersionHistoryParams{
|
||||
ContentID: contentID,
|
||||
SiteID: siteID,
|
||||
LimitCount: int32(limit),
|
||||
})
|
||||
default:
|
||||
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Database error: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
versions := h.convertToAPIVersionList(dbVersions)
|
||||
response := ContentVersionsResponse{Versions: versions}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(response)
|
||||
}
|
||||
|
||||
// RollbackContent handles POST /api/content/{id}/rollback
|
||||
func (h *ContentHandler) RollbackContent(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
contentID := vars["id"]
|
||||
siteID := r.URL.Query().Get("site_id")
|
||||
|
||||
if siteID == "" {
|
||||
http.Error(w, "site_id parameter is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var req RollbackContentRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
http.Error(w, "Invalid JSON", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Get the target version
|
||||
var targetVersion interface{}
|
||||
var err error
|
||||
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
targetVersion, err = h.database.GetSQLiteQueries().GetContentVersion(context.Background(), req.VersionID)
|
||||
case "postgresql":
|
||||
targetVersion, err = h.database.GetPostgreSQLQueries().GetContentVersion(context.Background(), int32(req.VersionID))
|
||||
default:
|
||||
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
http.Error(w, "Version not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
http.Error(w, fmt.Sprintf("Database error: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Verify the version belongs to the correct content
|
||||
if !h.versionMatches(targetVersion, contentID, siteID) {
|
||||
http.Error(w, "Version does not match content", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Extract user from request
|
||||
userID := r.Header.Get("X-User-ID")
|
||||
if userID == "" && req.RolledBackBy != "" {
|
||||
userID = req.RolledBackBy
|
||||
}
|
||||
if userID == "" {
|
||||
userID = "anonymous"
|
||||
}
|
||||
|
||||
// Archive current version before rollback
|
||||
var currentContent interface{}
|
||||
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
currentContent, err = h.database.GetSQLiteQueries().GetContent(context.Background(), sqlite.GetContentParams{
|
||||
ID: contentID,
|
||||
SiteID: siteID,
|
||||
})
|
||||
case "postgresql":
|
||||
currentContent, err = h.database.GetPostgreSQLQueries().GetContent(context.Background(), postgresql.GetContentParams{
|
||||
ID: contentID,
|
||||
SiteID: siteID,
|
||||
})
|
||||
default:
|
||||
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Failed to get current content: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
err = h.createContentVersion(currentContent)
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Failed to create version: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Rollback to target version
|
||||
var updatedContent interface{}
|
||||
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
sqliteVersion := targetVersion.(sqlite.ContentVersion)
|
||||
updatedContent, err = h.database.GetSQLiteQueries().UpdateContent(context.Background(), sqlite.UpdateContentParams{
|
||||
Value: sqliteVersion.Value,
|
||||
Type: sqliteVersion.Type,
|
||||
LastEditedBy: userID,
|
||||
ID: contentID,
|
||||
SiteID: siteID,
|
||||
})
|
||||
case "postgresql":
|
||||
pgVersion := targetVersion.(postgresql.ContentVersion)
|
||||
updatedContent, err = h.database.GetPostgreSQLQueries().UpdateContent(context.Background(), postgresql.UpdateContentParams{
|
||||
Value: pgVersion.Value,
|
||||
Type: pgVersion.Type,
|
||||
LastEditedBy: userID,
|
||||
ID: contentID,
|
||||
SiteID: siteID,
|
||||
})
|
||||
default:
|
||||
http.Error(w, "Unsupported database type", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("Failed to rollback content: %v", err), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
item := h.convertToAPIContent(updatedContent)
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(item)
|
||||
}
|
||||
|
||||
// Helper functions for type conversion
|
||||
func (h *ContentHandler) convertToAPIContent(content interface{}) ContentItem {
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
c := content.(sqlite.Content)
|
||||
return ContentItem{
|
||||
ID: c.ID,
|
||||
SiteID: c.SiteID,
|
||||
Value: c.Value,
|
||||
Type: c.Type,
|
||||
CreatedAt: time.Unix(c.CreatedAt, 0),
|
||||
UpdatedAt: time.Unix(c.UpdatedAt, 0),
|
||||
LastEditedBy: c.LastEditedBy,
|
||||
}
|
||||
case "postgresql":
|
||||
c := content.(postgresql.Content)
|
||||
return ContentItem{
|
||||
ID: c.ID,
|
||||
SiteID: c.SiteID,
|
||||
Value: c.Value,
|
||||
Type: c.Type,
|
||||
CreatedAt: time.Unix(c.CreatedAt, 0),
|
||||
UpdatedAt: time.Unix(c.UpdatedAt, 0),
|
||||
LastEditedBy: c.LastEditedBy,
|
||||
}
|
||||
}
|
||||
return ContentItem{} // Should never happen
|
||||
}
|
||||
|
||||
func (h *ContentHandler) convertToAPIContentList(contentList interface{}) []ContentItem {
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
list := contentList.([]sqlite.Content)
|
||||
items := make([]ContentItem, len(list))
|
||||
for i, content := range list {
|
||||
items[i] = h.convertToAPIContent(content)
|
||||
}
|
||||
return items
|
||||
case "postgresql":
|
||||
list := contentList.([]postgresql.Content)
|
||||
items := make([]ContentItem, len(list))
|
||||
for i, content := range list {
|
||||
items[i] = h.convertToAPIContent(content)
|
||||
}
|
||||
return items
|
||||
}
|
||||
return []ContentItem{} // Should never happen
|
||||
}
|
||||
|
||||
func (h *ContentHandler) convertToAPIVersionList(versionList interface{}) []ContentVersion {
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
list := versionList.([]sqlite.ContentVersion)
|
||||
versions := make([]ContentVersion, len(list))
|
||||
for i, version := range list {
|
||||
versions[i] = ContentVersion{
|
||||
VersionID: version.VersionID,
|
||||
ContentID: version.ContentID,
|
||||
SiteID: version.SiteID,
|
||||
Value: version.Value,
|
||||
Type: version.Type,
|
||||
CreatedAt: time.Unix(version.CreatedAt, 0),
|
||||
CreatedBy: version.CreatedBy,
|
||||
}
|
||||
}
|
||||
return versions
|
||||
case "postgresql":
|
||||
list := versionList.([]postgresql.ContentVersion)
|
||||
versions := make([]ContentVersion, len(list))
|
||||
for i, version := range list {
|
||||
versions[i] = ContentVersion{
|
||||
VersionID: int64(version.VersionID),
|
||||
ContentID: version.ContentID,
|
||||
SiteID: version.SiteID,
|
||||
Value: version.Value,
|
||||
Type: version.Type,
|
||||
CreatedAt: time.Unix(version.CreatedAt, 0),
|
||||
CreatedBy: version.CreatedBy,
|
||||
}
|
||||
}
|
||||
return versions
|
||||
}
|
||||
return []ContentVersion{} // Should never happen
|
||||
}
|
||||
|
||||
func (h *ContentHandler) createContentVersion(content interface{}) error {
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
c := content.(sqlite.Content)
|
||||
return h.database.GetSQLiteQueries().CreateContentVersion(context.Background(), sqlite.CreateContentVersionParams{
|
||||
ContentID: c.ID,
|
||||
SiteID: c.SiteID,
|
||||
Value: c.Value,
|
||||
Type: c.Type,
|
||||
CreatedBy: c.LastEditedBy,
|
||||
})
|
||||
case "postgresql":
|
||||
c := content.(postgresql.Content)
|
||||
return h.database.GetPostgreSQLQueries().CreateContentVersion(context.Background(), postgresql.CreateContentVersionParams{
|
||||
ContentID: c.ID,
|
||||
SiteID: c.SiteID,
|
||||
Value: c.Value,
|
||||
Type: c.Type,
|
||||
CreatedBy: c.LastEditedBy,
|
||||
})
|
||||
}
|
||||
return fmt.Errorf("unsupported database type")
|
||||
}
|
||||
|
||||
func (h *ContentHandler) getContentType(content interface{}) string {
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
return content.(sqlite.Content).Type
|
||||
case "postgresql":
|
||||
return content.(postgresql.Content).Type
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (h *ContentHandler) versionMatches(version interface{}, contentID, siteID string) bool {
|
||||
switch h.database.GetDBType() {
|
||||
case "sqlite3":
|
||||
v := version.(sqlite.ContentVersion)
|
||||
return v.ContentID == contentID && v.SiteID == siteID
|
||||
case "postgresql":
|
||||
v := version.(postgresql.ContentVersion)
|
||||
return v.ContentID == contentID && v.SiteID == siteID
|
||||
}
|
||||
return false
|
||||
}
|
||||
@@ -1,127 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"log"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
// CORSMiddleware adds CORS headers to enable browser requests
|
||||
func CORSMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
origin := r.Header.Get("Origin")
|
||||
|
||||
// Allow localhost and 127.0.0.1 on common development ports
|
||||
allowedOrigins := []string{
|
||||
"http://localhost:3000",
|
||||
"http://127.0.0.1:3000",
|
||||
"http://localhost:8080",
|
||||
"http://127.0.0.1:8080",
|
||||
}
|
||||
|
||||
// Check if origin is allowed
|
||||
originAllowed := false
|
||||
for _, allowed := range allowedOrigins {
|
||||
if origin == allowed {
|
||||
originAllowed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if originAllowed {
|
||||
w.Header().Set("Access-Control-Allow-Origin", origin)
|
||||
} else {
|
||||
// Fallback to wildcard for development (can be restricted in production)
|
||||
w.Header().Set("Access-Control-Allow-Origin", "*")
|
||||
}
|
||||
|
||||
w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS")
|
||||
w.Header().Set("Access-Control-Allow-Headers", "Content-Type, Authorization")
|
||||
w.Header().Set("Access-Control-Allow-Credentials", "true")
|
||||
|
||||
// Note: Explicit OPTIONS handling is done via routes, not here
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
// LoggingMiddleware logs HTTP requests
|
||||
func LoggingMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
start := time.Now()
|
||||
|
||||
// Create a response writer wrapper to capture status code
|
||||
wrapper := &responseWriter{ResponseWriter: w, statusCode: http.StatusOK}
|
||||
|
||||
next.ServeHTTP(wrapper, r)
|
||||
|
||||
log.Printf("%s %s %d %v", r.Method, r.URL.Path, wrapper.statusCode, time.Since(start))
|
||||
})
|
||||
}
|
||||
|
||||
// responseWriter wraps http.ResponseWriter to capture status code
|
||||
type responseWriter struct {
|
||||
http.ResponseWriter
|
||||
statusCode int
|
||||
}
|
||||
|
||||
func (rw *responseWriter) WriteHeader(code int) {
|
||||
rw.statusCode = code
|
||||
rw.ResponseWriter.WriteHeader(code)
|
||||
}
|
||||
|
||||
// ContentTypeMiddleware ensures JSON responses have proper content type
|
||||
func ContentTypeMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// Set default content type for API responses
|
||||
if r.URL.Path != "/" && (r.Method == "GET" || r.Method == "POST" || r.Method == "PUT") {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
}
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
// HealthMiddleware provides a simple health check endpoint
|
||||
func HealthMiddleware() http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte(`{"status":"healthy","service":"insertr-server"}`))
|
||||
}
|
||||
}
|
||||
|
||||
// CORSPreflightHandler handles CORS preflight requests (OPTIONS)
|
||||
func CORSPreflightHandler(w http.ResponseWriter, r *http.Request) {
|
||||
origin := r.Header.Get("Origin")
|
||||
|
||||
// Allow localhost and 127.0.0.1 on common development ports
|
||||
allowedOrigins := []string{
|
||||
"http://localhost:3000",
|
||||
"http://127.0.0.1:3000",
|
||||
"http://localhost:8080",
|
||||
"http://127.0.0.1:8080",
|
||||
}
|
||||
|
||||
// Check if origin is allowed
|
||||
originAllowed := false
|
||||
for _, allowed := range allowedOrigins {
|
||||
if origin == allowed {
|
||||
originAllowed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if originAllowed {
|
||||
w.Header().Set("Access-Control-Allow-Origin", origin)
|
||||
} else {
|
||||
// Fallback to wildcard for development
|
||||
w.Header().Set("Access-Control-Allow-Origin", "*")
|
||||
}
|
||||
|
||||
w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS")
|
||||
w.Header().Set("Access-Control-Allow-Headers", "Content-Type, Authorization")
|
||||
w.Header().Set("Access-Control-Allow-Credentials", "true")
|
||||
w.Header().Set("Access-Control-Max-Age", "86400") // Cache preflight for 24 hours
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
package api
|
||||
|
||||
import "time"
|
||||
|
||||
// API request/response models
|
||||
type ContentItem struct {
|
||||
ID string `json:"id"`
|
||||
SiteID string `json:"site_id"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
LastEditedBy string `json:"last_edited_by"`
|
||||
}
|
||||
|
||||
type ContentVersion struct {
|
||||
VersionID int64 `json:"version_id"`
|
||||
ContentID string `json:"content_id"`
|
||||
SiteID string `json:"site_id"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
CreatedBy string `json:"created_by"`
|
||||
}
|
||||
|
||||
type ContentResponse struct {
|
||||
Content []ContentItem `json:"content"`
|
||||
}
|
||||
|
||||
type ContentVersionsResponse struct {
|
||||
Versions []ContentVersion `json:"versions"`
|
||||
}
|
||||
|
||||
// Request models
|
||||
type CreateContentRequest struct {
|
||||
ID string `json:"id"`
|
||||
SiteID string `json:"site_id,omitempty"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
CreatedBy string `json:"created_by,omitempty"`
|
||||
}
|
||||
|
||||
type UpdateContentRequest struct {
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type,omitempty"`
|
||||
UpdatedBy string `json:"updated_by,omitempty"`
|
||||
}
|
||||
|
||||
type RollbackContentRequest struct {
|
||||
VersionID int64 `json:"version_id"`
|
||||
RolledBackBy string `json:"rolled_back_by,omitempty"`
|
||||
}
|
||||
@@ -1,184 +0,0 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
_ "github.com/lib/pq"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
|
||||
"github.com/insertr/server/internal/db/postgresql"
|
||||
"github.com/insertr/server/internal/db/sqlite"
|
||||
)
|
||||
|
||||
// Database wraps the database connection and queries
|
||||
type Database struct {
|
||||
conn *sql.DB
|
||||
dbType string
|
||||
|
||||
// Type-specific query interfaces
|
||||
sqliteQueries *sqlite.Queries
|
||||
postgresqlQueries *postgresql.Queries
|
||||
}
|
||||
|
||||
// NewDatabase creates a new database connection
|
||||
func NewDatabase(dbPath string) (*Database, error) {
|
||||
var conn *sql.DB
|
||||
var dbType string
|
||||
var err error
|
||||
|
||||
// Determine database type from connection string
|
||||
if strings.Contains(dbPath, "postgres://") || strings.Contains(dbPath, "postgresql://") {
|
||||
dbType = "postgresql"
|
||||
conn, err = sql.Open("postgres", dbPath)
|
||||
} else {
|
||||
dbType = "sqlite3"
|
||||
conn, err = sql.Open("sqlite3", dbPath)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open database: %w", err)
|
||||
}
|
||||
|
||||
// Test connection
|
||||
if err := conn.Ping(); err != nil {
|
||||
conn.Close()
|
||||
return nil, fmt.Errorf("failed to ping database: %w", err)
|
||||
}
|
||||
|
||||
// Initialize the appropriate queries
|
||||
db := &Database{
|
||||
conn: conn,
|
||||
dbType: dbType,
|
||||
}
|
||||
|
||||
switch dbType {
|
||||
case "sqlite3":
|
||||
// Initialize SQLite schema using generated functions
|
||||
db.sqliteQueries = sqlite.New(conn)
|
||||
if err := db.initializeSQLiteSchema(); err != nil {
|
||||
conn.Close()
|
||||
return nil, fmt.Errorf("failed to initialize SQLite schema: %w", err)
|
||||
}
|
||||
case "postgresql":
|
||||
// Initialize PostgreSQL schema using generated functions
|
||||
db.postgresqlQueries = postgresql.New(conn)
|
||||
if err := db.initializePostgreSQLSchema(); err != nil {
|
||||
conn.Close()
|
||||
return nil, fmt.Errorf("failed to initialize PostgreSQL schema: %w", err)
|
||||
}
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported database type: %s", dbType)
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
// Close closes the database connection
|
||||
func (db *Database) Close() error {
|
||||
return db.conn.Close()
|
||||
}
|
||||
|
||||
// GetQueries returns the appropriate query interface
|
||||
func (db *Database) GetSQLiteQueries() *sqlite.Queries {
|
||||
return db.sqliteQueries
|
||||
}
|
||||
|
||||
func (db *Database) GetPostgreSQLQueries() *postgresql.Queries {
|
||||
return db.postgresqlQueries
|
||||
}
|
||||
|
||||
// GetDBType returns the database type
|
||||
func (db *Database) GetDBType() string {
|
||||
return db.dbType
|
||||
}
|
||||
|
||||
// initializeSQLiteSchema sets up the SQLite database schema
|
||||
func (db *Database) initializeSQLiteSchema() error {
|
||||
ctx := context.Background()
|
||||
|
||||
// Create tables
|
||||
if err := db.sqliteQueries.InitializeSchema(ctx); err != nil {
|
||||
return fmt.Errorf("failed to create content table: %w", err)
|
||||
}
|
||||
|
||||
if err := db.sqliteQueries.InitializeVersionsTable(ctx); err != nil {
|
||||
return fmt.Errorf("failed to create content_versions table: %w", err)
|
||||
}
|
||||
|
||||
// Create indexes manually (sqlc doesn't generate CREATE INDEX functions for SQLite)
|
||||
indexQueries := []string{
|
||||
"CREATE INDEX IF NOT EXISTS idx_content_site_id ON content(site_id);",
|
||||
"CREATE INDEX IF NOT EXISTS idx_content_updated_at ON content(updated_at);",
|
||||
"CREATE INDEX IF NOT EXISTS idx_content_versions_lookup ON content_versions(content_id, site_id, created_at DESC);",
|
||||
}
|
||||
|
||||
for _, query := range indexQueries {
|
||||
if _, err := db.conn.Exec(query); err != nil {
|
||||
return fmt.Errorf("failed to create index: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Create update trigger manually (sqlc doesn't generate trigger creation functions)
|
||||
triggerQuery := `
|
||||
CREATE TRIGGER IF NOT EXISTS update_content_updated_at
|
||||
AFTER UPDATE ON content
|
||||
FOR EACH ROW
|
||||
BEGIN
|
||||
UPDATE content SET updated_at = strftime('%s', 'now') WHERE id = NEW.id AND site_id = NEW.site_id;
|
||||
END;`
|
||||
|
||||
if _, err := db.conn.Exec(triggerQuery); err != nil {
|
||||
return fmt.Errorf("failed to create update trigger: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// initializePostgreSQLSchema sets up the PostgreSQL database schema
|
||||
func (db *Database) initializePostgreSQLSchema() error {
|
||||
ctx := context.Background()
|
||||
|
||||
// Create tables using sqlc-generated functions
|
||||
if err := db.postgresqlQueries.InitializeSchema(ctx); err != nil {
|
||||
return fmt.Errorf("failed to create content table: %w", err)
|
||||
}
|
||||
|
||||
if err := db.postgresqlQueries.InitializeVersionsTable(ctx); err != nil {
|
||||
return fmt.Errorf("failed to create content_versions table: %w", err)
|
||||
}
|
||||
|
||||
// Create indexes using sqlc-generated functions (PostgreSQL supports this)
|
||||
if err := db.postgresqlQueries.CreateContentSiteIndex(ctx); err != nil {
|
||||
return fmt.Errorf("failed to create content site index: %w", err)
|
||||
}
|
||||
|
||||
if err := db.postgresqlQueries.CreateContentUpdatedAtIndex(ctx); err != nil {
|
||||
return fmt.Errorf("failed to create content updated_at index: %w", err)
|
||||
}
|
||||
|
||||
if err := db.postgresqlQueries.CreateVersionsLookupIndex(ctx); err != nil {
|
||||
return fmt.Errorf("failed to create versions lookup index: %w", err)
|
||||
}
|
||||
|
||||
// Create update function using sqlc-generated function
|
||||
if err := db.postgresqlQueries.CreateUpdateFunction(ctx); err != nil {
|
||||
return fmt.Errorf("failed to create update function: %w", err)
|
||||
}
|
||||
|
||||
// Create trigger manually (sqlc doesn't generate trigger creation functions)
|
||||
triggerQuery := `
|
||||
DROP TRIGGER IF EXISTS update_content_updated_at ON content;
|
||||
CREATE TRIGGER update_content_updated_at
|
||||
BEFORE UPDATE ON content
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_content_timestamp();`
|
||||
|
||||
if _, err := db.conn.Exec(triggerQuery); err != nil {
|
||||
return fmt.Errorf("failed to create update trigger: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,214 +0,0 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: content.sql
|
||||
|
||||
package postgresql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const createContent = `-- name: CreateContent :one
|
||||
INSERT INTO content (id, site_id, value, type, last_edited_by)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
RETURNING id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||
`
|
||||
|
||||
type CreateContentParams struct {
|
||||
ID string `json:"id"`
|
||||
SiteID string `json:"site_id"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
LastEditedBy string `json:"last_edited_by"`
|
||||
}
|
||||
|
||||
func (q *Queries) CreateContent(ctx context.Context, arg CreateContentParams) (Content, error) {
|
||||
row := q.db.QueryRowContext(ctx, createContent,
|
||||
arg.ID,
|
||||
arg.SiteID,
|
||||
arg.Value,
|
||||
arg.Type,
|
||||
arg.LastEditedBy,
|
||||
)
|
||||
var i Content
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.LastEditedBy,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const deleteContent = `-- name: DeleteContent :exec
|
||||
DELETE FROM content
|
||||
WHERE id = $1 AND site_id = $2
|
||||
`
|
||||
|
||||
type DeleteContentParams struct {
|
||||
ID string `json:"id"`
|
||||
SiteID string `json:"site_id"`
|
||||
}
|
||||
|
||||
func (q *Queries) DeleteContent(ctx context.Context, arg DeleteContentParams) error {
|
||||
_, err := q.db.ExecContext(ctx, deleteContent, arg.ID, arg.SiteID)
|
||||
return err
|
||||
}
|
||||
|
||||
const getAllContent = `-- name: GetAllContent :many
|
||||
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||
FROM content
|
||||
WHERE site_id = $1
|
||||
ORDER BY updated_at DESC
|
||||
`
|
||||
|
||||
func (q *Queries) GetAllContent(ctx context.Context, siteID string) ([]Content, error) {
|
||||
rows, err := q.db.QueryContext(ctx, getAllContent, siteID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []Content
|
||||
for rows.Next() {
|
||||
var i Content
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.LastEditedBy,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getBulkContent = `-- name: GetBulkContent :many
|
||||
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||
FROM content
|
||||
WHERE site_id = $1 AND id IN ($2)
|
||||
`
|
||||
|
||||
type GetBulkContentParams struct {
|
||||
SiteID string `json:"site_id"`
|
||||
Ids []string `json:"ids"`
|
||||
}
|
||||
|
||||
func (q *Queries) GetBulkContent(ctx context.Context, arg GetBulkContentParams) ([]Content, error) {
|
||||
query := getBulkContent
|
||||
var queryParams []interface{}
|
||||
queryParams = append(queryParams, arg.SiteID)
|
||||
if len(arg.Ids) > 0 {
|
||||
for _, v := range arg.Ids {
|
||||
queryParams = append(queryParams, v)
|
||||
}
|
||||
query = strings.Replace(query, "/*SLICE:ids*/?", strings.Repeat(",?", len(arg.Ids))[1:], 1)
|
||||
} else {
|
||||
query = strings.Replace(query, "/*SLICE:ids*/?", "NULL", 1)
|
||||
}
|
||||
rows, err := q.db.QueryContext(ctx, query, queryParams...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []Content
|
||||
for rows.Next() {
|
||||
var i Content
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.LastEditedBy,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getContent = `-- name: GetContent :one
|
||||
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||
FROM content
|
||||
WHERE id = $1 AND site_id = $2
|
||||
`
|
||||
|
||||
type GetContentParams struct {
|
||||
ID string `json:"id"`
|
||||
SiteID string `json:"site_id"`
|
||||
}
|
||||
|
||||
func (q *Queries) GetContent(ctx context.Context, arg GetContentParams) (Content, error) {
|
||||
row := q.db.QueryRowContext(ctx, getContent, arg.ID, arg.SiteID)
|
||||
var i Content
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.LastEditedBy,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const updateContent = `-- name: UpdateContent :one
|
||||
UPDATE content
|
||||
SET value = $1, type = $2, last_edited_by = $3
|
||||
WHERE id = $4 AND site_id = $5
|
||||
RETURNING id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||
`
|
||||
|
||||
type UpdateContentParams struct {
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
LastEditedBy string `json:"last_edited_by"`
|
||||
ID string `json:"id"`
|
||||
SiteID string `json:"site_id"`
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateContent(ctx context.Context, arg UpdateContentParams) (Content, error) {
|
||||
row := q.db.QueryRowContext(ctx, updateContent,
|
||||
arg.Value,
|
||||
arg.Type,
|
||||
arg.LastEditedBy,
|
||||
arg.ID,
|
||||
arg.SiteID,
|
||||
)
|
||||
var i Content
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.LastEditedBy,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
|
||||
package postgresql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
type DBTX interface {
|
||||
ExecContext(context.Context, string, ...interface{}) (sql.Result, error)
|
||||
PrepareContext(context.Context, string) (*sql.Stmt, error)
|
||||
QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error)
|
||||
QueryRowContext(context.Context, string, ...interface{}) *sql.Row
|
||||
}
|
||||
|
||||
func New(db DBTX) *Queries {
|
||||
return &Queries{db: db}
|
||||
}
|
||||
|
||||
type Queries struct {
|
||||
db DBTX
|
||||
}
|
||||
|
||||
func (q *Queries) WithTx(tx *sql.Tx) *Queries {
|
||||
return &Queries{
|
||||
db: tx,
|
||||
}
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
|
||||
package postgresql
|
||||
|
||||
type Content struct {
|
||||
ID string `json:"id"`
|
||||
SiteID string `json:"site_id"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
UpdatedAt int64 `json:"updated_at"`
|
||||
LastEditedBy string `json:"last_edited_by"`
|
||||
}
|
||||
|
||||
type ContentVersion struct {
|
||||
VersionID int32 `json:"version_id"`
|
||||
ContentID string `json:"content_id"`
|
||||
SiteID string `json:"site_id"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
CreatedBy string `json:"created_by"`
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
|
||||
package postgresql
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
type Querier interface {
|
||||
CreateContent(ctx context.Context, arg CreateContentParams) (Content, error)
|
||||
CreateContentSiteIndex(ctx context.Context) error
|
||||
CreateContentUpdatedAtIndex(ctx context.Context) error
|
||||
CreateContentVersion(ctx context.Context, arg CreateContentVersionParams) error
|
||||
CreateUpdateFunction(ctx context.Context) error
|
||||
CreateVersionsLookupIndex(ctx context.Context) error
|
||||
DeleteContent(ctx context.Context, arg DeleteContentParams) error
|
||||
DeleteOldVersions(ctx context.Context, arg DeleteOldVersionsParams) error
|
||||
GetAllContent(ctx context.Context, siteID string) ([]Content, error)
|
||||
GetAllVersionsForSite(ctx context.Context, arg GetAllVersionsForSiteParams) ([]GetAllVersionsForSiteRow, error)
|
||||
GetBulkContent(ctx context.Context, arg GetBulkContentParams) ([]Content, error)
|
||||
GetContent(ctx context.Context, arg GetContentParams) (Content, error)
|
||||
GetContentVersion(ctx context.Context, versionID int32) (ContentVersion, error)
|
||||
GetContentVersionHistory(ctx context.Context, arg GetContentVersionHistoryParams) ([]ContentVersion, error)
|
||||
InitializeSchema(ctx context.Context) error
|
||||
InitializeVersionsTable(ctx context.Context) error
|
||||
UpdateContent(ctx context.Context, arg UpdateContentParams) (Content, error)
|
||||
}
|
||||
|
||||
var _ Querier = (*Queries)(nil)
|
||||
@@ -1,87 +0,0 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: setup.sql
|
||||
|
||||
package postgresql
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
const createContentSiteIndex = `-- name: CreateContentSiteIndex :exec
|
||||
CREATE INDEX IF NOT EXISTS idx_content_site_id ON content(site_id)
|
||||
`
|
||||
|
||||
func (q *Queries) CreateContentSiteIndex(ctx context.Context) error {
|
||||
_, err := q.db.ExecContext(ctx, createContentSiteIndex)
|
||||
return err
|
||||
}
|
||||
|
||||
const createContentUpdatedAtIndex = `-- name: CreateContentUpdatedAtIndex :exec
|
||||
CREATE INDEX IF NOT EXISTS idx_content_updated_at ON content(updated_at)
|
||||
`
|
||||
|
||||
func (q *Queries) CreateContentUpdatedAtIndex(ctx context.Context) error {
|
||||
_, err := q.db.ExecContext(ctx, createContentUpdatedAtIndex)
|
||||
return err
|
||||
}
|
||||
|
||||
const createUpdateFunction = `-- name: CreateUpdateFunction :exec
|
||||
CREATE OR REPLACE FUNCTION update_content_timestamp()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = EXTRACT(EPOCH FROM NOW());
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql
|
||||
`
|
||||
|
||||
func (q *Queries) CreateUpdateFunction(ctx context.Context) error {
|
||||
_, err := q.db.ExecContext(ctx, createUpdateFunction)
|
||||
return err
|
||||
}
|
||||
|
||||
const createVersionsLookupIndex = `-- name: CreateVersionsLookupIndex :exec
|
||||
CREATE INDEX IF NOT EXISTS idx_content_versions_lookup ON content_versions(content_id, site_id, created_at DESC)
|
||||
`
|
||||
|
||||
func (q *Queries) CreateVersionsLookupIndex(ctx context.Context) error {
|
||||
_, err := q.db.ExecContext(ctx, createVersionsLookupIndex)
|
||||
return err
|
||||
}
|
||||
|
||||
const initializeSchema = `-- name: InitializeSchema :exec
|
||||
CREATE TABLE IF NOT EXISTS content (
|
||||
id TEXT NOT NULL,
|
||||
site_id TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
type TEXT NOT NULL CHECK (type IN ('text', 'markdown', 'link')),
|
||||
created_at BIGINT DEFAULT (EXTRACT(EPOCH FROM NOW())) NOT NULL,
|
||||
updated_at BIGINT DEFAULT (EXTRACT(EPOCH FROM NOW())) NOT NULL,
|
||||
last_edited_by TEXT DEFAULT 'system' NOT NULL,
|
||||
PRIMARY KEY (id, site_id)
|
||||
)
|
||||
`
|
||||
|
||||
func (q *Queries) InitializeSchema(ctx context.Context) error {
|
||||
_, err := q.db.ExecContext(ctx, initializeSchema)
|
||||
return err
|
||||
}
|
||||
|
||||
const initializeVersionsTable = `-- name: InitializeVersionsTable :exec
|
||||
CREATE TABLE IF NOT EXISTS content_versions (
|
||||
version_id SERIAL PRIMARY KEY,
|
||||
content_id TEXT NOT NULL,
|
||||
site_id TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
created_at BIGINT DEFAULT (EXTRACT(EPOCH FROM NOW())) NOT NULL,
|
||||
created_by TEXT DEFAULT 'system' NOT NULL
|
||||
)
|
||||
`
|
||||
|
||||
func (q *Queries) InitializeVersionsTable(ctx context.Context) error {
|
||||
_, err := q.db.ExecContext(ctx, initializeVersionsTable)
|
||||
return err
|
||||
}
|
||||
@@ -1,175 +0,0 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: versions.sql
|
||||
|
||||
package postgresql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
const createContentVersion = `-- name: CreateContentVersion :exec
|
||||
INSERT INTO content_versions (content_id, site_id, value, type, created_by)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
`
|
||||
|
||||
type CreateContentVersionParams struct {
|
||||
ContentID string `json:"content_id"`
|
||||
SiteID string `json:"site_id"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
CreatedBy string `json:"created_by"`
|
||||
}
|
||||
|
||||
func (q *Queries) CreateContentVersion(ctx context.Context, arg CreateContentVersionParams) error {
|
||||
_, err := q.db.ExecContext(ctx, createContentVersion,
|
||||
arg.ContentID,
|
||||
arg.SiteID,
|
||||
arg.Value,
|
||||
arg.Type,
|
||||
arg.CreatedBy,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
const deleteOldVersions = `-- name: DeleteOldVersions :exec
|
||||
DELETE FROM content_versions
|
||||
WHERE created_at < $1 AND site_id = $2
|
||||
`
|
||||
|
||||
type DeleteOldVersionsParams struct {
|
||||
CreatedBefore int64 `json:"created_before"`
|
||||
SiteID string `json:"site_id"`
|
||||
}
|
||||
|
||||
func (q *Queries) DeleteOldVersions(ctx context.Context, arg DeleteOldVersionsParams) error {
|
||||
_, err := q.db.ExecContext(ctx, deleteOldVersions, arg.CreatedBefore, arg.SiteID)
|
||||
return err
|
||||
}
|
||||
|
||||
const getAllVersionsForSite = `-- name: GetAllVersionsForSite :many
|
||||
SELECT
|
||||
cv.version_id, cv.content_id, cv.site_id, cv.value, cv.type, cv.created_at, cv.created_by,
|
||||
c.value as current_value
|
||||
FROM content_versions cv
|
||||
LEFT JOIN content c ON cv.content_id = c.id AND cv.site_id = c.site_id
|
||||
WHERE cv.site_id = $1
|
||||
ORDER BY cv.created_at DESC
|
||||
LIMIT $2
|
||||
`
|
||||
|
||||
type GetAllVersionsForSiteParams struct {
|
||||
SiteID string `json:"site_id"`
|
||||
LimitCount int32 `json:"limit_count"`
|
||||
}
|
||||
|
||||
type GetAllVersionsForSiteRow struct {
|
||||
VersionID int32 `json:"version_id"`
|
||||
ContentID string `json:"content_id"`
|
||||
SiteID string `json:"site_id"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
CreatedBy string `json:"created_by"`
|
||||
CurrentValue sql.NullString `json:"current_value"`
|
||||
}
|
||||
|
||||
func (q *Queries) GetAllVersionsForSite(ctx context.Context, arg GetAllVersionsForSiteParams) ([]GetAllVersionsForSiteRow, error) {
|
||||
rows, err := q.db.QueryContext(ctx, getAllVersionsForSite, arg.SiteID, arg.LimitCount)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetAllVersionsForSiteRow
|
||||
for rows.Next() {
|
||||
var i GetAllVersionsForSiteRow
|
||||
if err := rows.Scan(
|
||||
&i.VersionID,
|
||||
&i.ContentID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.CreatedBy,
|
||||
&i.CurrentValue,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getContentVersion = `-- name: GetContentVersion :one
|
||||
SELECT version_id, content_id, site_id, value, type, created_at, created_by
|
||||
FROM content_versions
|
||||
WHERE version_id = $1
|
||||
`
|
||||
|
||||
func (q *Queries) GetContentVersion(ctx context.Context, versionID int32) (ContentVersion, error) {
|
||||
row := q.db.QueryRowContext(ctx, getContentVersion, versionID)
|
||||
var i ContentVersion
|
||||
err := row.Scan(
|
||||
&i.VersionID,
|
||||
&i.ContentID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.CreatedBy,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getContentVersionHistory = `-- name: GetContentVersionHistory :many
|
||||
SELECT version_id, content_id, site_id, value, type, created_at, created_by
|
||||
FROM content_versions
|
||||
WHERE content_id = $1 AND site_id = $2
|
||||
ORDER BY created_at DESC
|
||||
LIMIT $3
|
||||
`
|
||||
|
||||
type GetContentVersionHistoryParams struct {
|
||||
ContentID string `json:"content_id"`
|
||||
SiteID string `json:"site_id"`
|
||||
LimitCount int32 `json:"limit_count"`
|
||||
}
|
||||
|
||||
func (q *Queries) GetContentVersionHistory(ctx context.Context, arg GetContentVersionHistoryParams) ([]ContentVersion, error) {
|
||||
rows, err := q.db.QueryContext(ctx, getContentVersionHistory, arg.ContentID, arg.SiteID, arg.LimitCount)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []ContentVersion
|
||||
for rows.Next() {
|
||||
var i ContentVersion
|
||||
if err := rows.Scan(
|
||||
&i.VersionID,
|
||||
&i.ContentID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.CreatedBy,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
@@ -1,214 +0,0 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: content.sql
|
||||
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const createContent = `-- name: CreateContent :one
|
||||
INSERT INTO content (id, site_id, value, type, last_edited_by)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5)
|
||||
RETURNING id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||
`
|
||||
|
||||
type CreateContentParams struct {
|
||||
ID string `json:"id"`
|
||||
SiteID string `json:"site_id"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
LastEditedBy string `json:"last_edited_by"`
|
||||
}
|
||||
|
||||
func (q *Queries) CreateContent(ctx context.Context, arg CreateContentParams) (Content, error) {
|
||||
row := q.db.QueryRowContext(ctx, createContent,
|
||||
arg.ID,
|
||||
arg.SiteID,
|
||||
arg.Value,
|
||||
arg.Type,
|
||||
arg.LastEditedBy,
|
||||
)
|
||||
var i Content
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.LastEditedBy,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const deleteContent = `-- name: DeleteContent :exec
|
||||
DELETE FROM content
|
||||
WHERE id = ?1 AND site_id = ?2
|
||||
`
|
||||
|
||||
type DeleteContentParams struct {
|
||||
ID string `json:"id"`
|
||||
SiteID string `json:"site_id"`
|
||||
}
|
||||
|
||||
func (q *Queries) DeleteContent(ctx context.Context, arg DeleteContentParams) error {
|
||||
_, err := q.db.ExecContext(ctx, deleteContent, arg.ID, arg.SiteID)
|
||||
return err
|
||||
}
|
||||
|
||||
const getAllContent = `-- name: GetAllContent :many
|
||||
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||
FROM content
|
||||
WHERE site_id = ?1
|
||||
ORDER BY updated_at DESC
|
||||
`
|
||||
|
||||
func (q *Queries) GetAllContent(ctx context.Context, siteID string) ([]Content, error) {
|
||||
rows, err := q.db.QueryContext(ctx, getAllContent, siteID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []Content
|
||||
for rows.Next() {
|
||||
var i Content
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.LastEditedBy,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getBulkContent = `-- name: GetBulkContent :many
|
||||
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||
FROM content
|
||||
WHERE site_id = ?1 AND id IN (/*SLICE:ids*/?)
|
||||
`
|
||||
|
||||
type GetBulkContentParams struct {
|
||||
SiteID string `json:"site_id"`
|
||||
Ids []string `json:"ids"`
|
||||
}
|
||||
|
||||
func (q *Queries) GetBulkContent(ctx context.Context, arg GetBulkContentParams) ([]Content, error) {
|
||||
query := getBulkContent
|
||||
var queryParams []interface{}
|
||||
queryParams = append(queryParams, arg.SiteID)
|
||||
if len(arg.Ids) > 0 {
|
||||
for _, v := range arg.Ids {
|
||||
queryParams = append(queryParams, v)
|
||||
}
|
||||
query = strings.Replace(query, "/*SLICE:ids*/?", strings.Repeat(",?", len(arg.Ids))[1:], 1)
|
||||
} else {
|
||||
query = strings.Replace(query, "/*SLICE:ids*/?", "NULL", 1)
|
||||
}
|
||||
rows, err := q.db.QueryContext(ctx, query, queryParams...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []Content
|
||||
for rows.Next() {
|
||||
var i Content
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.LastEditedBy,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getContent = `-- name: GetContent :one
|
||||
SELECT id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||
FROM content
|
||||
WHERE id = ?1 AND site_id = ?2
|
||||
`
|
||||
|
||||
type GetContentParams struct {
|
||||
ID string `json:"id"`
|
||||
SiteID string `json:"site_id"`
|
||||
}
|
||||
|
||||
func (q *Queries) GetContent(ctx context.Context, arg GetContentParams) (Content, error) {
|
||||
row := q.db.QueryRowContext(ctx, getContent, arg.ID, arg.SiteID)
|
||||
var i Content
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.LastEditedBy,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const updateContent = `-- name: UpdateContent :one
|
||||
UPDATE content
|
||||
SET value = ?1, type = ?2, last_edited_by = ?3
|
||||
WHERE id = ?4 AND site_id = ?5
|
||||
RETURNING id, site_id, value, type, created_at, updated_at, last_edited_by
|
||||
`
|
||||
|
||||
type UpdateContentParams struct {
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
LastEditedBy string `json:"last_edited_by"`
|
||||
ID string `json:"id"`
|
||||
SiteID string `json:"site_id"`
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateContent(ctx context.Context, arg UpdateContentParams) (Content, error) {
|
||||
row := q.db.QueryRowContext(ctx, updateContent,
|
||||
arg.Value,
|
||||
arg.Type,
|
||||
arg.LastEditedBy,
|
||||
arg.ID,
|
||||
arg.SiteID,
|
||||
)
|
||||
var i Content
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.LastEditedBy,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
type DBTX interface {
|
||||
ExecContext(context.Context, string, ...interface{}) (sql.Result, error)
|
||||
PrepareContext(context.Context, string) (*sql.Stmt, error)
|
||||
QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error)
|
||||
QueryRowContext(context.Context, string, ...interface{}) *sql.Row
|
||||
}
|
||||
|
||||
func New(db DBTX) *Queries {
|
||||
return &Queries{db: db}
|
||||
}
|
||||
|
||||
type Queries struct {
|
||||
db DBTX
|
||||
}
|
||||
|
||||
func (q *Queries) WithTx(tx *sql.Tx) *Queries {
|
||||
return &Queries{
|
||||
db: tx,
|
||||
}
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
|
||||
package sqlite
|
||||
|
||||
type Content struct {
|
||||
ID string `json:"id"`
|
||||
SiteID string `json:"site_id"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
UpdatedAt int64 `json:"updated_at"`
|
||||
LastEditedBy string `json:"last_edited_by"`
|
||||
}
|
||||
|
||||
type ContentVersion struct {
|
||||
VersionID int64 `json:"version_id"`
|
||||
ContentID string `json:"content_id"`
|
||||
SiteID string `json:"site_id"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
CreatedBy string `json:"created_by"`
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
type Querier interface {
|
||||
CreateContent(ctx context.Context, arg CreateContentParams) (Content, error)
|
||||
CreateContentVersion(ctx context.Context, arg CreateContentVersionParams) error
|
||||
DeleteContent(ctx context.Context, arg DeleteContentParams) error
|
||||
DeleteOldVersions(ctx context.Context, arg DeleteOldVersionsParams) error
|
||||
GetAllContent(ctx context.Context, siteID string) ([]Content, error)
|
||||
GetAllVersionsForSite(ctx context.Context, arg GetAllVersionsForSiteParams) ([]GetAllVersionsForSiteRow, error)
|
||||
GetBulkContent(ctx context.Context, arg GetBulkContentParams) ([]Content, error)
|
||||
GetContent(ctx context.Context, arg GetContentParams) (Content, error)
|
||||
GetContentVersion(ctx context.Context, versionID int64) (ContentVersion, error)
|
||||
GetContentVersionHistory(ctx context.Context, arg GetContentVersionHistoryParams) ([]ContentVersion, error)
|
||||
InitializeSchema(ctx context.Context) error
|
||||
InitializeVersionsTable(ctx context.Context) error
|
||||
UpdateContent(ctx context.Context, arg UpdateContentParams) (Content, error)
|
||||
}
|
||||
|
||||
var _ Querier = (*Queries)(nil)
|
||||
@@ -1,45 +0,0 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: setup.sql
|
||||
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
const initializeSchema = `-- name: InitializeSchema :exec
|
||||
CREATE TABLE IF NOT EXISTS content (
|
||||
id TEXT NOT NULL,
|
||||
site_id TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
type TEXT NOT NULL CHECK (type IN ('text', 'markdown', 'link')),
|
||||
created_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||
updated_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||
last_edited_by TEXT DEFAULT 'system' NOT NULL,
|
||||
PRIMARY KEY (id, site_id)
|
||||
)
|
||||
`
|
||||
|
||||
func (q *Queries) InitializeSchema(ctx context.Context) error {
|
||||
_, err := q.db.ExecContext(ctx, initializeSchema)
|
||||
return err
|
||||
}
|
||||
|
||||
const initializeVersionsTable = `-- name: InitializeVersionsTable :exec
|
||||
CREATE TABLE IF NOT EXISTS content_versions (
|
||||
version_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
content_id TEXT NOT NULL,
|
||||
site_id TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
created_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL,
|
||||
created_by TEXT DEFAULT 'system' NOT NULL
|
||||
)
|
||||
`
|
||||
|
||||
func (q *Queries) InitializeVersionsTable(ctx context.Context) error {
|
||||
_, err := q.db.ExecContext(ctx, initializeVersionsTable)
|
||||
return err
|
||||
}
|
||||
@@ -1,175 +0,0 @@
|
||||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.29.0
|
||||
// source: versions.sql
|
||||
|
||||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
const createContentVersion = `-- name: CreateContentVersion :exec
|
||||
INSERT INTO content_versions (content_id, site_id, value, type, created_by)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5)
|
||||
`
|
||||
|
||||
type CreateContentVersionParams struct {
|
||||
ContentID string `json:"content_id"`
|
||||
SiteID string `json:"site_id"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
CreatedBy string `json:"created_by"`
|
||||
}
|
||||
|
||||
func (q *Queries) CreateContentVersion(ctx context.Context, arg CreateContentVersionParams) error {
|
||||
_, err := q.db.ExecContext(ctx, createContentVersion,
|
||||
arg.ContentID,
|
||||
arg.SiteID,
|
||||
arg.Value,
|
||||
arg.Type,
|
||||
arg.CreatedBy,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
const deleteOldVersions = `-- name: DeleteOldVersions :exec
|
||||
DELETE FROM content_versions
|
||||
WHERE created_at < ?1 AND site_id = ?2
|
||||
`
|
||||
|
||||
type DeleteOldVersionsParams struct {
|
||||
CreatedBefore int64 `json:"created_before"`
|
||||
SiteID string `json:"site_id"`
|
||||
}
|
||||
|
||||
func (q *Queries) DeleteOldVersions(ctx context.Context, arg DeleteOldVersionsParams) error {
|
||||
_, err := q.db.ExecContext(ctx, deleteOldVersions, arg.CreatedBefore, arg.SiteID)
|
||||
return err
|
||||
}
|
||||
|
||||
const getAllVersionsForSite = `-- name: GetAllVersionsForSite :many
|
||||
SELECT
|
||||
cv.version_id, cv.content_id, cv.site_id, cv.value, cv.type, cv.created_at, cv.created_by,
|
||||
c.value as current_value
|
||||
FROM content_versions cv
|
||||
LEFT JOIN content c ON cv.content_id = c.id AND cv.site_id = c.site_id
|
||||
WHERE cv.site_id = ?1
|
||||
ORDER BY cv.created_at DESC
|
||||
LIMIT ?2
|
||||
`
|
||||
|
||||
type GetAllVersionsForSiteParams struct {
|
||||
SiteID string `json:"site_id"`
|
||||
LimitCount int64 `json:"limit_count"`
|
||||
}
|
||||
|
||||
type GetAllVersionsForSiteRow struct {
|
||||
VersionID int64 `json:"version_id"`
|
||||
ContentID string `json:"content_id"`
|
||||
SiteID string `json:"site_id"`
|
||||
Value string `json:"value"`
|
||||
Type string `json:"type"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
CreatedBy string `json:"created_by"`
|
||||
CurrentValue sql.NullString `json:"current_value"`
|
||||
}
|
||||
|
||||
func (q *Queries) GetAllVersionsForSite(ctx context.Context, arg GetAllVersionsForSiteParams) ([]GetAllVersionsForSiteRow, error) {
|
||||
rows, err := q.db.QueryContext(ctx, getAllVersionsForSite, arg.SiteID, arg.LimitCount)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetAllVersionsForSiteRow
|
||||
for rows.Next() {
|
||||
var i GetAllVersionsForSiteRow
|
||||
if err := rows.Scan(
|
||||
&i.VersionID,
|
||||
&i.ContentID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.CreatedBy,
|
||||
&i.CurrentValue,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getContentVersion = `-- name: GetContentVersion :one
|
||||
SELECT version_id, content_id, site_id, value, type, created_at, created_by
|
||||
FROM content_versions
|
||||
WHERE version_id = ?1
|
||||
`
|
||||
|
||||
func (q *Queries) GetContentVersion(ctx context.Context, versionID int64) (ContentVersion, error) {
|
||||
row := q.db.QueryRowContext(ctx, getContentVersion, versionID)
|
||||
var i ContentVersion
|
||||
err := row.Scan(
|
||||
&i.VersionID,
|
||||
&i.ContentID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.CreatedBy,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getContentVersionHistory = `-- name: GetContentVersionHistory :many
|
||||
SELECT version_id, content_id, site_id, value, type, created_at, created_by
|
||||
FROM content_versions
|
||||
WHERE content_id = ?1 AND site_id = ?2
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ?3
|
||||
`
|
||||
|
||||
type GetContentVersionHistoryParams struct {
|
||||
ContentID string `json:"content_id"`
|
||||
SiteID string `json:"site_id"`
|
||||
LimitCount int64 `json:"limit_count"`
|
||||
}
|
||||
|
||||
func (q *Queries) GetContentVersionHistory(ctx context.Context, arg GetContentVersionHistoryParams) ([]ContentVersion, error) {
|
||||
rows, err := q.db.QueryContext(ctx, getContentVersionHistory, arg.ContentID, arg.SiteID, arg.LimitCount)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []ContentVersion
|
||||
for rows.Next() {
|
||||
var i ContentVersion
|
||||
if err := rows.Scan(
|
||||
&i.VersionID,
|
||||
&i.ContentID,
|
||||
&i.SiteID,
|
||||
&i.Value,
|
||||
&i.Type,
|
||||
&i.CreatedAt,
|
||||
&i.CreatedBy,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
version: "2"
|
||||
sql:
|
||||
# SQLite configuration for development
|
||||
- name: "sqlite"
|
||||
engine: "sqlite"
|
||||
queries: ["db/queries/", "db/sqlite/setup.sql"]
|
||||
schema: "db/sqlite/schema.sql"
|
||||
gen:
|
||||
go:
|
||||
package: "sqlite"
|
||||
out: "internal/db/sqlite"
|
||||
emit_json_tags: true
|
||||
emit_prepared_queries: false
|
||||
emit_interface: true
|
||||
emit_exact_table_names: false
|
||||
emit_pointers_for_null_types: false # All fields are NOT NULL now
|
||||
|
||||
# PostgreSQL configuration for production
|
||||
- name: "postgresql"
|
||||
engine: "postgresql"
|
||||
queries: ["db/queries/", "db/postgresql/setup.sql"]
|
||||
schema: "db/postgresql/schema.sql"
|
||||
gen:
|
||||
go:
|
||||
package: "postgresql"
|
||||
out: "internal/db/postgresql"
|
||||
emit_json_tags: true
|
||||
emit_prepared_queries: false
|
||||
emit_interface: true
|
||||
emit_exact_table_names: false
|
||||
emit_pointers_for_null_types: false # All fields are NOT NULL now
|
||||
@@ -43,13 +43,12 @@ const commands = {
|
||||
'demo-site/about.html',
|
||||
'lib/dist/insertr.js',
|
||||
'lib/dist/insertr.min.js',
|
||||
'insertr-server/cmd/server/main.go',
|
||||
'cmd/serve.go',
|
||||
'package.json'
|
||||
];
|
||||
|
||||
const optionalFiles = [
|
||||
'insertr-cli/insertr',
|
||||
'insertr-server/insertr-server'
|
||||
'insertr'
|
||||
];
|
||||
|
||||
let allGood = true;
|
||||
|
||||
Reference in New Issue
Block a user