Search Extension
Full-text search with Elasticsearch, Meilisearch, Typesense, and in-memory backends
Search Extension
The Search extension provides a unified interface for full-text search across multiple backends including Elasticsearch, Meilisearch, Typesense, and in-memory search. It offers powerful search capabilities with features like faceting, highlighting, autocomplete, and advanced filtering.
Features
Multiple Backends
- Elasticsearch: Enterprise-grade search with advanced analytics
- Meilisearch: Fast, typo-tolerant search engine
- Typesense: Open-source alternative to Algolia
- In-Memory: Built-in search for development and testing
Core Capabilities
- Full-text search: Advanced text search with relevance scoring
- Faceted search: Multi-dimensional filtering and navigation
- Autocomplete: Real-time search suggestions
- Highlighting: Search term highlighting in results
- Fuzzy search: Typo-tolerant search capabilities
- Geospatial search: Location-based search queries
Advanced Features
- Custom analyzers: Text processing and tokenization
- Synonyms: Expand search with related terms
- Boost fields: Weight specific fields in search results
- Multi-index search: Search across multiple indexes
- Aggregations: Statistical analysis of search results
- Real-time indexing: Immediate document availability
Installation
go get github.com/xraph/forge/extensions/search# Elasticsearch
docker run -d --name elasticsearch \
-p 9200:9200 -p 9300:9300 \
-e "discovery.type=single-node" \
elasticsearch:8.11.0
# Meilisearch
docker run -d --name meilisearch \
-p 7700:7700 \
getmeili/meilisearch:v1.5
# Typesense
docker run -d --name typesense \
-p 8108:8108 \
-e TYPESENSE_API_KEY=xyz \
typesense/typesense:0.25.2# Elasticsearch (macOS)
brew install elasticsearch
# Meilisearch (macOS)
brew install meilisearch
# Typesense (macOS)
brew install typesense-serverConfiguration
YAML Configuration
extensions:
search:
driver: "elasticsearch" # elasticsearch, meilisearch, typesense, inmemory
url: "http://localhost:9200"
username: "elastic"
password: "changeme"
# Connection pool
max_connections: 100
max_idle_connections: 10
connect_timeout: "30s"
request_timeout: "10s"
keep_alive: "60s"
# Retry policy
max_retries: 3
retry_delay: "1s"
retry_backoff: 2.0
# Search defaults
default_limit: 20
max_limit: 1000
# Performance
bulk_size: 1000
flush_interval: "5s"
# Security
tls:
enabled: true
cert_file: "/path/to/cert.pem"
key_file: "/path/to/key.pem"
ca_file: "/path/to/ca.pem"
skip_verify: falseEnvironment Variables
# Connection
SEARCH_DRIVER=elasticsearch
SEARCH_URL=http://localhost:9200
SEARCH_USERNAME=elastic
SEARCH_PASSWORD=changeme
SEARCH_API_KEY=your-api-key
# Pool settings
SEARCH_MAX_CONNECTIONS=100
SEARCH_CONNECT_TIMEOUT=30s
SEARCH_REQUEST_TIMEOUT=10s
# Performance
SEARCH_DEFAULT_LIMIT=20
SEARCH_BULK_SIZE=1000Programmatic Configuration
import "github.com/xraph/forge/extensions/search"
// Basic configuration
app.RegisterExtension(search.NewExtension(
search.WithDriver("elasticsearch"),
search.WithURL("http://localhost:9200"),
search.WithCredentials("elastic", "changeme"),
))
// Advanced configuration
config := search.Config{
Driver: "meilisearch",
URL: "http://localhost:7700",
APIKey: "your-master-key",
MaxConnections: 50,
MaxIdleConnections: 5,
ConnectTimeout: 30 * time.Second,
RequestTimeout: 10 * time.Second,
MaxRetries: 3,
RetryDelay: time.Second,
RetryBackoff: 2.0,
DefaultLimit: 20,
MaxLimit: 1000,
BulkSize: 500,
FlushInterval: 5 * time.Second,
}
app.RegisterExtension(search.NewExtensionWithConfig(config))Usage Examples
Basic Search Operations
func setupSearchIndex(c forge.Context) error {
search := forge.GetSearch(c)
// Define index schema
schema := search.IndexSchema{
Fields: []search.FieldSchema{
{
Name: "title",
Type: "text",
Searchable: true,
Boost: 2.0,
Analyzer: "standard",
},
{
Name: "content",
Type: "text",
Searchable: true,
Stored: true,
},
{
Name: "category",
Type: "keyword",
Filterable: true,
Faceted: true,
},
{
Name: "price",
Type: "float",
Sortable: true,
},
{
Name: "published_at",
Type: "date",
Sortable: true,
Filterable: true,
Format: "2006-01-02T15:04:05Z",
},
{
Name: "location",
Type: "geo_point",
},
},
Settings: map[string]interface{}{
"number_of_shards": 1,
"number_of_replicas": 0,
},
Synonyms: []search.Synonym{
{Terms: []string{"laptop", "notebook", "computer"}},
{Terms: []string{"phone", "mobile", "smartphone"}},
},
StopWords: []string{"the", "a", "an", "and", "or", "but"},
Ranking: &search.RankingConfig{
Rules: []string{"words", "typo", "proximity", "attribute", "sort", "exactness"},
DistinctAttribute: "id",
},
Faceting: &search.FacetingConfig{
MaxValuesPerFacet: 100,
SortFacetValuesBy: map[string]string{
"category": "count",
"brand": "alpha",
},
},
Highlighting: &search.HighlightConfig{
PreTag: "<mark>",
PostTag: "</mark>",
},
}
// Create index
err := search.CreateIndex(c.Context(), "products", schema)
if err != nil {
return c.JSON(500, map[string]interface{}{
"error": "Failed to create index",
"details": err.Error(),
})
}
return c.JSON(200, map[string]string{
"status": "Index created successfully",
"index": "products",
})
}
func listIndexesHandler(c forge.Context) error {
search := forge.GetSearch(c)
indexes, err := search.ListIndexes(c.Context())
if err != nil {
return c.JSON(500, map[string]interface{}{
"error": "Failed to list indexes",
"details": err.Error(),
})
}
// Get detailed info for each index
var indexInfo []map[string]interface{}
for _, indexName := range indexes {
info, err := search.GetIndexInfo(c.Context(), indexName)
if err != nil {
continue
}
indexInfo = append(indexInfo, map[string]interface{}{
"name": info.Name,
"document_count": info.DocumentCount,
"index_size": info.IndexSize,
"created_at": info.CreatedAt,
"updated_at": info.UpdatedAt,
})
}
return c.JSON(200, map[string]interface{}{
"indexes": indexInfo,
"total": len(indexes),
})
}
func deleteIndexHandler(c forge.Context) error {
search := forge.GetSearch(c)
indexName := c.Param("index")
if indexName == "" {
return c.JSON(400, map[string]string{"error": "Index name is required"})
}
err := search.DeleteIndex(c.Context(), indexName)
if err != nil {
if errors.Is(err, search.ErrIndexNotFound) {
return c.JSON(404, map[string]string{"error": "Index not found"})
}
return c.JSON(500, map[string]interface{}{
"error": "Failed to delete index",
"details": err.Error(),
})
}
return c.JSON(200, map[string]string{
"status": "Index deleted successfully",
"index": indexName,
})
}func indexDocumentHandler(c forge.Context) error {
search := forge.GetSearch(c)
indexName := c.Param("index")
var req struct {
ID string `json:"id"`
Fields map[string]interface{} `json:"fields"`
}
if err := c.Bind(&req); err != nil {
return c.JSON(400, map[string]string{"error": "Invalid request"})
}
doc := search.Document{
ID: req.ID,
Fields: req.Fields,
}
err := search.Index(c.Context(), indexName, doc)
if err != nil {
return c.JSON(500, map[string]interface{}{
"error": "Failed to index document",
"details": err.Error(),
})
}
return c.JSON(201, map[string]interface{}{
"status": "Document indexed successfully",
"index": indexName,
"document_id": req.ID,
})
}
func bulkIndexHandler(c forge.Context) error {
search := forge.GetSearch(c)
indexName := c.Param("index")
var req struct {
Documents []search.Document `json:"documents"`
}
if err := c.Bind(&req); err != nil {
return c.JSON(400, map[string]string{"error": "Invalid request"})
}
if len(req.Documents) == 0 {
return c.JSON(400, map[string]string{"error": "No documents provided"})
}
err := search.BulkIndex(c.Context(), indexName, req.Documents)
if err != nil {
return c.JSON(500, map[string]interface{}{
"error": "Failed to bulk index documents",
"details": err.Error(),
})
}
return c.JSON(201, map[string]interface{}{
"status": "Documents indexed successfully",
"index": indexName,
"document_count": len(req.Documents),
})
}
func getDocumentHandler(c forge.Context) error {
search := forge.GetSearch(c)
indexName := c.Param("index")
docID := c.Param("id")
if indexName == "" || docID == "" {
return c.JSON(400, map[string]string{"error": "Index name and document ID are required"})
}
doc, err := search.Get(c.Context(), indexName, docID)
if err != nil {
if errors.Is(err, search.ErrDocumentNotFound) {
return c.JSON(404, map[string]string{"error": "Document not found"})
}
return c.JSON(500, map[string]interface{}{
"error": "Failed to get document",
"details": err.Error(),
})
}
return c.JSON(200, map[string]interface{}{
"document": doc,
"index": indexName,
})
}
func updateDocumentHandler(c forge.Context) error {
search := forge.GetSearch(c)
indexName := c.Param("index")
docID := c.Param("id")
var req struct {
Fields map[string]interface{} `json:"fields"`
}
if err := c.Bind(&req); err != nil {
return c.JSON(400, map[string]string{"error": "Invalid request"})
}
doc := search.Document{
ID: docID,
Fields: req.Fields,
}
err := search.Update(c.Context(), indexName, docID, doc)
if err != nil {
if errors.Is(err, search.ErrDocumentNotFound) {
return c.JSON(404, map[string]string{"error": "Document not found"})
}
return c.JSON(500, map[string]interface{}{
"error": "Failed to update document",
"details": err.Error(),
})
}
return c.JSON(200, map[string]interface{}{
"status": "Document updated successfully",
"index": indexName,
"document_id": docID,
})
}
func deleteDocumentHandler(c forge.Context) error {
search := forge.GetSearch(c)
indexName := c.Param("index")
docID := c.Param("id")
if indexName == "" || docID == "" {
return c.JSON(400, map[string]string{"error": "Index name and document ID are required"})
}
err := search.Delete(c.Context(), indexName, docID)
if err != nil {
if errors.Is(err, search.ErrDocumentNotFound) {
return c.JSON(404, map[string]string{"error": "Document not found"})
}
return c.JSON(500, map[string]interface{}{
"error": "Failed to delete document",
"details": err.Error(),
})
}
return c.JSON(200, map[string]interface{}{
"status": "Document deleted successfully",
"index": indexName,
"document_id": docID,
})
}func searchHandler(c forge.Context) error {
search := forge.GetSearch(c)
var req struct {
Index string `json:"index"`
Query string `json:"query"`
Filters []search.Filter `json:"filters,omitempty"`
Sort []search.SortField `json:"sort,omitempty"`
Facets []string `json:"facets,omitempty"`
Offset int `json:"offset,omitempty"`
Limit int `json:"limit,omitempty"`
Highlight bool `json:"highlight,omitempty"`
HighlightFields []string `json:"highlight_fields,omitempty"`
Fields []string `json:"fields,omitempty"`
MinScore float64 `json:"min_score,omitempty"`
BoostFields map[string]float64 `json:"boost_fields,omitempty"`
FuzzyLevel int `json:"fuzzy_level,omitempty"`
Options map[string]interface{} `json:"options,omitempty"`
}
if err := c.Bind(&req); err != nil {
return c.JSON(400, map[string]string{"error": "Invalid request"})
}
if req.Index == "" {
return c.JSON(400, map[string]string{"error": "Index name is required"})
}
if req.Limit == 0 {
req.Limit = 20 // Default limit
}
query := search.SearchQuery{
Index: req.Index,
Query: req.Query,
Filters: req.Filters,
Sort: req.Sort,
Facets: req.Facets,
Offset: req.Offset,
Limit: req.Limit,
Highlight: req.Highlight,
HighlightFields: req.HighlightFields,
Fields: req.Fields,
MinScore: req.MinScore,
BoostFields: req.BoostFields,
FuzzyLevel: req.FuzzyLevel,
Options: req.Options,
}
results, err := search.Search(c.Context(), query)
if err != nil {
return c.JSON(500, map[string]interface{}{
"error": "Search failed",
"details": err.Error(),
})
}
return c.JSON(200, map[string]interface{}{
"results": results.Hits,
"total": results.Total,
"offset": results.Offset,
"limit": results.Limit,
"processing_time": results.ProcessingTime.Milliseconds(),
"facets": results.Facets,
"query": results.Query,
"exhaustive": results.Exhaustive,
})
}
func productSearchHandler(c forge.Context) error {
search := forge.GetSearch(c)
// Get query parameters
q := c.Query("q")
category := c.Query("category")
minPrice := c.QueryFloat("min_price", 0)
maxPrice := c.QueryFloat("max_price", 0)
sortBy := c.Query("sort", "relevance")
page := c.QueryInt("page", 1)
limit := c.QueryInt("limit", 20)
// Build search query
query := search.SearchQuery{
Index: "products",
Query: q,
Offset: (page - 1) * limit,
Limit: limit,
Highlight: true,
HighlightFields: []string{"title", "description"},
Facets: []string{"category", "brand", "price_range"},
BoostFields: map[string]float64{
"title": 2.0,
"description": 1.0,
"tags": 1.5,
},
FuzzyLevel: 1, // Allow 1 typo
}
// Add filters
var filters []search.Filter
if category != "" {
filters = append(filters, search.Filter{
Field: "category",
Operator: "=",
Value: category,
})
}
if minPrice > 0 {
filters = append(filters, search.Filter{
Field: "price",
Operator: ">=",
Value: minPrice,
})
}
if maxPrice > 0 {
filters = append(filters, search.Filter{
Field: "price",
Operator: "<=",
Value: maxPrice,
})
}
// Add availability filter
filters = append(filters, search.Filter{
Field: "in_stock",
Operator: "=",
Value: true,
})
query.Filters = filters
// Add sorting
switch sortBy {
case "price_asc":
query.Sort = []search.SortField{{Field: "price", Order: "asc"}}
case "price_desc":
query.Sort = []search.SortField{{Field: "price", Order: "desc"}}
case "newest":
query.Sort = []search.SortField{{Field: "created_at", Order: "desc"}}
case "rating":
query.Sort = []search.SortField{{Field: "rating", Order: "desc"}}
default:
// Default to relevance (no explicit sort)
}
results, err := search.Search(c.Context(), query)
if err != nil {
return c.JSON(500, map[string]interface{}{
"error": "Product search failed",
"details": err.Error(),
})
}
// Transform results for frontend
var products []map[string]interface{}
for _, hit := range results.Hits {
product := map[string]interface{}{
"id": hit.ID,
"score": hit.Score,
"title": hit.Document["title"],
"description": hit.Document["description"],
"price": hit.Document["price"],
"category": hit.Document["category"],
"brand": hit.Document["brand"],
"image_url": hit.Document["image_url"],
"rating": hit.Document["rating"],
"in_stock": hit.Document["in_stock"],
}
if len(hit.Highlights) > 0 {
product["highlights"] = hit.Highlights
}
products = append(products, product)
}
return c.JSON(200, map[string]interface{}{
"products": products,
"total": results.Total,
"page": page,
"limit": limit,
"total_pages": (results.Total + int64(limit) - 1) / int64(limit),
"processing_time": results.ProcessingTime.Milliseconds(),
"facets": results.Facets,
"query": q,
"filters": map[string]interface{}{
"category": category,
"min_price": minPrice,
"max_price": maxPrice,
},
})
}Advanced Features
func autocompleteHandler(c forge.Context) error {
search := forge.GetSearch(c)
query := c.Query("q")
field := c.Query("field", "title")
limit := c.QueryInt("limit", 10)
if query == "" {
return c.JSON(400, map[string]string{"error": "Query parameter 'q' is required"})
}
autocompleteQuery := search.AutocompleteQuery{
Index: "products",
Query: query,
Field: field,
Limit: limit,
}
results, err := search.Autocomplete(c.Context(), autocompleteQuery)
if err != nil {
return c.JSON(500, map[string]interface{}{
"error": "Autocomplete failed",
"details": err.Error(),
})
}
return c.JSON(200, map[string]interface{}{
"suggestions": results.Suggestions,
"processing_time": results.ProcessingTime.Milliseconds(),
"query": query,
})
}
func suggestHandler(c forge.Context) error {
search := forge.GetSearch(c)
query := c.Query("q")
field := c.Query("field", "title")
limit := c.QueryInt("limit", 5)
fuzzy := c.QueryBool("fuzzy", true)
if query == "" {
return c.JSON(400, map[string]string{"error": "Query parameter 'q' is required"})
}
suggestQuery := search.SuggestQuery{
Index: "products",
Query: query,
Field: field,
Limit: limit,
Fuzzy: fuzzy,
}
results, err := search.Suggest(c.Context(), suggestQuery)
if err != nil {
return c.JSON(500, map[string]interface{}{
"error": "Suggestion failed",
"details": err.Error(),
})
}
return c.JSON(200, map[string]interface{}{
"suggestions": results.Suggestions,
"processing_time": results.ProcessingTime.Milliseconds(),
"query": query,
})
}
func setupSearchSuggestions(app *forge.App) {
search := app.GetSearch()
// Create suggestions index
schema := search.IndexSchema{
Fields: []search.FieldSchema{
{
Name: "text",
Type: "text",
Searchable: true,
Analyzer: "autocomplete",
},
{
Name: "category",
Type: "keyword",
Filterable: true,
},
{
Name: "popularity",
Type: "integer",
Sortable: true,
},
{
Name: "frequency",
Type: "integer",
Sortable: true,
},
},
Analyzers: map[string]search.Analyzer{
"autocomplete": {
Type: "custom",
Tokenizer: "edge_ngram",
Filters: []string{"lowercase", "asciifolding"},
},
},
}
ctx := context.Background()
err := search.CreateIndex(ctx, "suggestions", schema)
if err != nil {
log.Printf("Failed to create suggestions index: %v", err)
return
}
// Index popular search terms
suggestions := []search.Document{
{
ID: "1",
Fields: map[string]interface{}{
"text": "laptop",
"category": "electronics",
"popularity": 100,
"frequency": 1500,
},
},
{
ID: "2",
Fields: map[string]interface{}{
"text": "smartphone",
"category": "electronics",
"popularity": 95,
"frequency": 1200,
},
},
{
ID: "3",
Fields: map[string]interface{}{
"text": "headphones",
"category": "electronics",
"popularity": 80,
"frequency": 800,
},
},
}
err = search.BulkIndex(ctx, "suggestions", suggestions)
if err != nil {
log.Printf("Failed to index suggestions: %v", err)
}
}
func smartAutocompleteHandler(c forge.Context) error {
search := forge.GetSearch(c)
query := c.Query("q")
category := c.Query("category")
limit := c.QueryInt("limit", 10)
if query == "" {
return c.JSON(400, map[string]string{"error": "Query parameter 'q' is required"})
}
// Build search query for suggestions
searchQuery := search.SearchQuery{
Index: "suggestions",
Query: query,
Limit: limit,
Sort: []search.SortField{
{Field: "popularity", Order: "desc"},
{Field: "frequency", Order: "desc"},
},
Fields: []string{"text", "category", "popularity"},
}
// Add category filter if specified
if category != "" {
searchQuery.Filters = []search.Filter{
{
Field: "category",
Operator: "=",
Value: category,
},
}
}
results, err := search.Search(c.Context(), searchQuery)
if err != nil {
return c.JSON(500, map[string]interface{}{
"error": "Smart autocomplete failed",
"details": err.Error(),
})
}
// Transform results
var suggestions []map[string]interface{}
for _, hit := range results.Hits {
suggestions = append(suggestions, map[string]interface{}{
"text": hit.Document["text"],
"category": hit.Document["category"],
"popularity": hit.Document["popularity"],
"score": hit.Score,
})
}
return c.JSON(200, map[string]interface{}{
"suggestions": suggestions,
"total": results.Total,
"processing_time": results.ProcessingTime.Milliseconds(),
"query": query,
})
}func nearbySearchHandler(c forge.Context) error {
search := forge.GetSearch(c)
lat := c.QueryFloat("lat", 0)
lon := c.QueryFloat("lon", 0)
radius := c.QueryFloat("radius", 10) // km
query := c.Query("q")
limit := c.QueryInt("limit", 20)
if lat == 0 || lon == 0 {
return c.JSON(400, map[string]string{"error": "Latitude and longitude are required"})
}
// Build geospatial search query
searchQuery := search.SearchQuery{
Index: "places",
Query: query,
Limit: limit,
Filters: []search.Filter{
{
Field: "location",
Operator: "geo_distance",
Value: map[string]interface{}{
"lat": lat,
"lon": lon,
"radius": fmt.Sprintf("%.2fkm", radius),
},
},
},
Sort: []search.SortField{
{
Field: "_geo_distance",
Order: "asc",
},
},
Fields: []string{"name", "address", "category", "rating", "location"},
}
results, err := search.Search(c.Context(), searchQuery)
if err != nil {
return c.JSON(500, map[string]interface{}{
"error": "Geospatial search failed",
"details": err.Error(),
})
}
// Calculate distances and transform results
var places []map[string]interface{}
for _, hit := range results.Hits {
place := map[string]interface{}{
"id": hit.ID,
"name": hit.Document["name"],
"address": hit.Document["address"],
"category": hit.Document["category"],
"rating": hit.Document["rating"],
"location": hit.Document["location"],
"score": hit.Score,
}
// Calculate distance if location is available
if loc, ok := hit.Document["location"].(map[string]interface{}); ok {
if placeLat, ok := loc["lat"].(float64); ok {
if placeLon, ok := loc["lon"].(float64); ok {
distance := calculateDistance(lat, lon, placeLat, placeLon)
place["distance"] = fmt.Sprintf("%.2f km", distance)
}
}
}
places = append(places, place)
}
return c.JSON(200, map[string]interface{}{
"places": places,
"total": results.Total,
"center": map[string]float64{"lat": lat, "lon": lon},
"radius": radius,
"processing_time": results.ProcessingTime.Milliseconds(),
"query": query,
})
}
func calculateDistance(lat1, lon1, lat2, lon2 float64) float64 {
// Haversine formula for calculating distance between two points
const R = 6371 // Earth's radius in kilometers
dLat := (lat2 - lat1) * math.Pi / 180
dLon := (lon2 - lon1) * math.Pi / 180
a := math.Sin(dLat/2)*math.Sin(dLat/2) +
math.Cos(lat1*math.Pi/180)*math.Cos(lat2*math.Pi/180)*
math.Sin(dLon/2)*math.Sin(dLon/2)
c := 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a))
return R * c
}
func geoBoundingBoxSearchHandler(c forge.Context) error {
search := forge.GetSearch(c)
// Bounding box coordinates
topLat := c.QueryFloat("top_lat", 0)
leftLon := c.QueryFloat("left_lon", 0)
bottomLat := c.QueryFloat("bottom_lat", 0)
rightLon := c.QueryFloat("right_lon", 0)
query := c.Query("q")
limit := c.QueryInt("limit", 50)
if topLat == 0 || leftLon == 0 || bottomLat == 0 || rightLon == 0 {
return c.JSON(400, map[string]string{
"error": "All bounding box coordinates are required",
})
}
searchQuery := search.SearchQuery{
Index: "places",
Query: query,
Limit: limit,
Filters: []search.Filter{
{
Field: "location",
Operator: "geo_bounding_box",
Value: map[string]interface{}{
"top_left": map[string]float64{
"lat": topLat,
"lon": leftLon,
},
"bottom_right": map[string]float64{
"lat": bottomLat,
"lon": rightLon,
},
},
},
},
Fields: []string{"name", "address", "category", "rating", "location"},
}
results, err := search.Search(c.Context(), searchQuery)
if err != nil {
return c.JSON(500, map[string]interface{}{
"error": "Geo bounding box search failed",
"details": err.Error(),
})
}
return c.JSON(200, map[string]interface{}{
"places": results.Hits,
"total": results.Total,
"bounding_box": map[string]interface{}{
"top_left": map[string]float64{"lat": topLat, "lon": leftLon},
"bottom_right": map[string]float64{"lat": bottomLat, "lon": rightLon},
},
"processing_time": results.ProcessingTime.Milliseconds(),
"query": query,
})
}
func indexPlaceHandler(c forge.Context) error {
search := forge.GetSearch(c)
var req struct {
ID string `json:"id"`
Name string `json:"name"`
Address string `json:"address"`
Category string `json:"category"`
Rating float64 `json:"rating"`
Lat float64 `json:"lat"`
Lon float64 `json:"lon"`
}
if err := c.Bind(&req); err != nil {
return c.JSON(400, map[string]string{"error": "Invalid request"})
}
doc := search.Document{
ID: req.ID,
Fields: map[string]interface{}{
"name": req.Name,
"address": req.Address,
"category": req.Category,
"rating": req.Rating,
"location": map[string]float64{
"lat": req.Lat,
"lon": req.Lon,
},
},
}
err := search.Index(c.Context(), "places", doc)
if err != nil {
return c.JSON(500, map[string]interface{}{
"error": "Failed to index place",
"details": err.Error(),
})
}
return c.JSON(201, map[string]interface{}{
"status": "Place indexed successfully",
"id": req.ID,
"name": req.Name,
})
}func searchStatsHandler(c forge.Context) error {
search := forge.GetSearch(c)
stats, err := search.Stats(c.Context())
if err != nil {
return c.JSON(500, map[string]interface{}{
"error": "Failed to get search stats",
"details": err.Error(),
})
}
return c.JSON(200, map[string]interface{}{
"index_count": stats.IndexCount,
"document_count": stats.DocumentCount,
"total_size": stats.TotalSize,
"queries": stats.Queries,
"avg_latency_ms": stats.AvgLatency.Milliseconds(),
"uptime": stats.Uptime.String(),
"version": stats.Version,
"extra": stats.Extra,
})
}
func setupSearchMonitoring(app *forge.App) {
search := app.GetSearch()
// Start metrics collection
go func() {
ticker := time.NewTicker(30 * time.Second)
defer ticker.Stop()
for {
select {
case <-ticker.C:
collectSearchMetrics(search)
case <-app.Context().Done():
return
}
}
}()
// Start health checks
go func() {
ticker := time.NewTicker(time.Minute)
defer ticker.Stop()
for {
select {
case <-ticker.C:
checkSearchHealth(search)
case <-app.Context().Done():
return
}
}
}()
}
func collectSearchMetrics(search search.Search) {
ctx := context.Background()
stats, err := search.Stats(ctx)
if err != nil {
log.Printf("Failed to collect search stats: %v", err)
return
}
// Send metrics to monitoring system
metrics := map[string]interface{}{
"search.index_count": stats.IndexCount,
"search.document_count": stats.DocumentCount,
"search.total_size": stats.TotalSize,
"search.queries": stats.Queries,
"search.avg_latency_ms": stats.AvgLatency.Milliseconds(),
"search.uptime_seconds": stats.Uptime.Seconds(),
}
// Send to metrics backend (Prometheus, InfluxDB, etc.)
sendMetrics(metrics)
}
func checkSearchHealth(search search.Search) {
ctx := context.Background()
err := search.Ping(ctx)
if err != nil {
log.Printf("Search health check failed: %v", err)
sendAlert("search_health_check_failed", err.Error())
return
}
// Check individual indexes
indexes, err := search.ListIndexes(ctx)
if err != nil {
log.Printf("Failed to list indexes: %v", err)
return
}
for _, indexName := range indexes {
info, err := search.GetIndexInfo(ctx, indexName)
if err != nil {
log.Printf("Failed to get info for index %s: %v", indexName, err)
continue
}
// Check for stale indexes (not updated in 24 hours)
if time.Since(info.UpdatedAt) > 24*time.Hour {
log.Printf("Index %s appears stale (last updated: %v)",
indexName, info.UpdatedAt)
}
// Check for empty indexes
if info.DocumentCount == 0 {
log.Printf("Index %s is empty", indexName)
}
}
}
func searchPerformanceHandler(c forge.Context) error {
search := forge.GetSearch(c)
// Run performance test queries
testQueries := []string{
"laptop",
"smartphone",
"headphones",
"camera",
"tablet",
}
var results []map[string]interface{}
for _, query := range testQueries {
start := time.Now()
searchQuery := search.SearchQuery{
Index: "products",
Query: query,
Limit: 10,
}
searchResults, err := search.Search(c.Context(), searchQuery)
latency := time.Since(start)
result := map[string]interface{}{
"query": query,
"latency": latency.Milliseconds(),
"success": err == nil,
}
if err != nil {
result["error"] = err.Error()
} else {
result["total_hits"] = searchResults.Total
result["processing_time"] = searchResults.ProcessingTime.Milliseconds()
}
results = append(results, result)
}
// Calculate average latency
var totalLatency int64
successCount := 0
for _, result := range results {
if result["success"].(bool) {
totalLatency += result["latency"].(int64)
successCount++
}
}
var avgLatency float64
if successCount > 0 {
avgLatency = float64(totalLatency) / float64(successCount)
}
return c.JSON(200, map[string]interface{}{
"test_results": results,
"total_queries": len(testQueries),
"successful": successCount,
"failed": len(testQueries) - successCount,
"avg_latency_ms": avgLatency,
"timestamp": time.Now(),
})
}Best Practices
Index Design
- Field mapping: Choose appropriate field types for your data
- Analyzers: Use custom analyzers for specific languages or domains
- Synonyms: Implement synonyms for better search recall
- Stop words: Remove common words that don't add search value
- Boost fields: Weight important fields higher in search results
Query Optimization
- Limit results: Use reasonable limits to improve performance
- Filter before search: Apply filters to reduce search scope
- Use facets: Provide guided search with faceted navigation
- Fuzzy search: Enable typo tolerance for better user experience
- Highlighting: Show search terms in context
Performance Tuning
- Bulk operations: Use bulk indexing for large datasets
- Connection pooling: Reuse connections efficiently
- Caching: Cache frequent queries and results
- Pagination: Implement efficient pagination for large result sets
- Monitoring: Track query performance and optimize slow queries
Security
- Input validation: Sanitize all search inputs
- Access control: Implement proper search permissions
- Rate limiting: Prevent search abuse
- Encryption: Use TLS for data in transit
- Audit logging: Log search activities for compliance
Troubleshooting
Common Issues
Connection Problems
# Check search backend connectivity
curl -X GET /api/search/health
# Verify configuration
curl -X GET /api/search/statsIndex Issues
# List all indexes
curl -X GET /api/search/indexes
# Get index information
curl -X GET /api/search/indexes/products/infoSearch Performance
# Run performance tests
curl -X GET /api/search/performance
# Check query latency
curl -X GET /api/search/metrics?metric=latencyDebug Mode
extensions:
search:
debug: true
log_level: "debug"
trace_queries: trueNext Steps
How is this guide?
Last updated on