mirror of
https://github.com/edufeed-org/eventstore.git
synced 2025-12-10 00:34:32 +00:00
Rework query function to work on multiple and optional fields
This commit is contained in:
parent
b54dd7900f
commit
956860676a
1 changed files with 155 additions and 50 deletions
|
|
@ -16,18 +16,48 @@ import (
|
||||||
func (ts *TSBackend) QueryEvents(ctx context.Context, filter nostr.Filter) (chan *nostr.Event, error) {
|
func (ts *TSBackend) QueryEvents(ctx context.Context, filter nostr.Filter) (chan *nostr.Event, error) {
|
||||||
ch := make(chan *nostr.Event)
|
ch := make(chan *nostr.Event)
|
||||||
|
|
||||||
nostrs, err := ts.SearchResources(filter.Search)
|
log.Printf("Processing query with search: %s", filter.Search)
|
||||||
if err != nil {
|
|
||||||
log.Printf("Search failed: %v", err)
|
// If we have no search parameter, return an empty channel
|
||||||
return ch, err
|
if filter.Search == "" {
|
||||||
|
log.Printf("No search parameter provided, returning empty result")
|
||||||
|
close(ch)
|
||||||
|
return ch, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
go func() {
|
nostrsearch, err := ts.SearchResources(filter.Search)
|
||||||
for _, evt := range nostrs {
|
if err != nil {
|
||||||
ch <- &evt
|
log.Printf("Search failed: %v", err)
|
||||||
}
|
// Return the channel anyway, but close it immediately
|
||||||
close(ch)
|
close(ch)
|
||||||
|
return ch, fmt.Errorf("search failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("Search succeeded, found %d events", len(nostrsearch))
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
// Check if context is done before sending events
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
log.Printf("Context cancelled before sending results")
|
||||||
|
close(ch)
|
||||||
|
return
|
||||||
|
default:
|
||||||
|
for _, evt := range nostrsearch {
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
// Context was cancelled during event sending
|
||||||
|
log.Printf("Context cancelled during event sending")
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
// Send the event
|
||||||
|
ch <- &evt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
close(ch)
|
||||||
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
return ch, nil
|
return ch, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -44,10 +74,10 @@ func (ts *TSBackend) SearchResources(searchStr string) ([]nostr.Event, error) {
|
||||||
encodedQuery := url.QueryEscape(mainQuery)
|
encodedQuery := url.QueryEscape(mainQuery)
|
||||||
|
|
||||||
// Default fields to search in
|
// Default fields to search in
|
||||||
queryBy := "name,description"
|
queryBy := "name,description,about,learningResourceType,keywords,creator,publisher"
|
||||||
|
|
||||||
// Start building the search URL
|
// Start building the search URL
|
||||||
searchURL := fmt.Sprintf("%s/collections/%s/documents/search?q=%s&query_by=%s",
|
searchURL := fmt.Sprintf("%s/collections/%s/documents/search?validate_field_names=false&q=%s&query_by=%s",
|
||||||
ts.Host, ts.CollectionName, encodedQuery, queryBy)
|
ts.Host, ts.CollectionName, encodedQuery, queryBy)
|
||||||
|
|
||||||
// Add additional parameters
|
// Add additional parameters
|
||||||
|
|
@ -69,20 +99,44 @@ func (ts *TSBackend) SearchResources(searchStr string) ([]nostr.Event, error) {
|
||||||
return nil, fmt.Errorf("search failed with status code %d: %s", resp.StatusCode, string(body))
|
return nil, fmt.Errorf("search failed with status code %d: %s", resp.StatusCode, string(body))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Try to parse the raw JSON to understand its structure
|
||||||
|
var rawResponse interface{}
|
||||||
|
if err := json.Unmarshal(body, &rawResponse); err != nil {
|
||||||
|
fmt.Printf("Warning: Could not parse raw response as JSON: %v\n", err)
|
||||||
|
} else {
|
||||||
|
// Check if we got a hits array
|
||||||
|
responseMap, ok := rawResponse.(map[string]interface{})
|
||||||
|
if ok {
|
||||||
|
if hits, exists := responseMap["hits"]; exists {
|
||||||
|
hitsArray, ok := hits.([]interface{})
|
||||||
|
if ok {
|
||||||
|
fmt.Printf("Response contains %d hits\n", len(hitsArray))
|
||||||
|
if len(hitsArray) > 0 {
|
||||||
|
// Look at the structure of the first hit
|
||||||
|
firstHit, ok := hitsArray[0].(map[string]interface{})
|
||||||
|
if ok {
|
||||||
|
fmt.Printf("First hit keys: %v\n", getMapKeys(firstHit))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return parseSearchResponse(body)
|
return parseSearchResponse(body)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SearchQuery represents a parsed search query with raw terms and field filters
|
// SearchQuery represents a parsed search query with raw terms and field filters
|
||||||
type SearchQuery struct {
|
type SearchQuery struct {
|
||||||
RawTerms []string
|
RawTerms []string
|
||||||
FieldFilters map[string]string
|
FieldFilters map[string][]string // Changed from map[string]string to map[string][]string to support multiple values
|
||||||
}
|
}
|
||||||
|
|
||||||
// ParseSearchQuery parses a search string with support for quoted terms and field:value pairs
|
// ParseSearchQuery parses a search string with support for quoted terms and field:value pairs
|
||||||
func ParseSearchQuery(searchStr string) SearchQuery {
|
func ParseSearchQuery(searchStr string) SearchQuery {
|
||||||
var query SearchQuery
|
var query SearchQuery
|
||||||
query.RawTerms = []string{}
|
query.RawTerms = []string{}
|
||||||
query.FieldFilters = make(map[string]string)
|
query.FieldFilters = make(map[string][]string) // Initialize as map to array of strings
|
||||||
|
|
||||||
// Regular expression to match quoted strings and field:value pairs
|
// Regular expression to match quoted strings and field:value pairs
|
||||||
// This regex handles:
|
// This regex handles:
|
||||||
|
|
@ -97,16 +151,22 @@ func ParseSearchQuery(searchStr string) SearchQuery {
|
||||||
// This is a quoted string, add it to raw terms
|
// This is a quoted string, add it to raw terms
|
||||||
query.RawTerms = append(query.RawTerms, match[1])
|
query.RawTerms = append(query.RawTerms, match[1])
|
||||||
} else if match[2] != "" && match[3] != "" {
|
} else if match[2] != "" && match[3] != "" {
|
||||||
// This is a field:value pair
|
// This is a field:value pair with dot notation
|
||||||
fieldName := match[2]
|
fieldName := match[2]
|
||||||
fieldValue := match[3]
|
fieldValue := match[3]
|
||||||
query.FieldFilters[fieldName] = fieldValue
|
|
||||||
|
// Add to the array of values for this field
|
||||||
|
query.FieldFilters[fieldName] = append(query.FieldFilters[fieldName], fieldValue)
|
||||||
} else if match[4] != "" {
|
} else if match[4] != "" {
|
||||||
// This is a regular word, check if it's a simple field:value
|
// This is a regular word, check if it's a simple field:value
|
||||||
parts := strings.SplitN(match[4], ":", 2)
|
parts := strings.SplitN(match[4], ":", 2)
|
||||||
if len(parts) == 2 && !strings.Contains(parts[0], ".") {
|
if len(parts) == 2 && !strings.Contains(parts[0], ".") {
|
||||||
// Simple field:value without dot notation
|
// Simple field:value without dot notation
|
||||||
query.FieldFilters[parts[0]] = parts[1]
|
fieldName := parts[0]
|
||||||
|
fieldValue := parts[1]
|
||||||
|
|
||||||
|
// Add to the array of values for this field
|
||||||
|
query.FieldFilters[fieldName] = append(query.FieldFilters[fieldName], fieldValue)
|
||||||
} else {
|
} else {
|
||||||
// Regular search term
|
// Regular search term
|
||||||
query.RawTerms = append(query.RawTerms, match[4])
|
query.RawTerms = append(query.RawTerms, match[4])
|
||||||
|
|
@ -125,25 +185,42 @@ func BuildTypesenseQuery(query SearchQuery) (string, map[string]string, error) {
|
||||||
// Parameters for filter_by and other Typesense parameters
|
// Parameters for filter_by and other Typesense parameters
|
||||||
params := make(map[string]string)
|
params := make(map[string]string)
|
||||||
|
|
||||||
// Build filter expressions for field filters
|
// Group filter expressions by base field name
|
||||||
var filterExpressions []string
|
fieldGroups := make(map[string][]string)
|
||||||
|
|
||||||
for field, value := range query.FieldFilters {
|
for field, values := range query.FieldFilters {
|
||||||
// Handle special fields with dot notation
|
// Extract the base field name (part before the first dot)
|
||||||
if strings.Contains(field, ".") {
|
baseName := field
|
||||||
parts := strings.SplitN(field, ".", 2)
|
if dotIndex := strings.Index(field, "."); dotIndex != -1 {
|
||||||
fieldName := parts[0]
|
baseName = field[:dotIndex]
|
||||||
subField := parts[1]
|
}
|
||||||
|
|
||||||
filterExpressions = append(filterExpressions, fmt.Sprintf("%s.%s:%s", fieldName, subField, value))
|
for _, value := range values {
|
||||||
} else {
|
// Create the filter expression
|
||||||
filterExpressions = append(filterExpressions, fmt.Sprintf("%s:%s", field, value))
|
filterExpr := fmt.Sprintf("%s:%s", field, value)
|
||||||
|
|
||||||
|
// Add to the corresponding field group
|
||||||
|
fieldGroups[baseName] = append(fieldGroups[baseName], filterExpr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Combine all filter expressions
|
// Build the final filter expressions
|
||||||
if len(filterExpressions) > 0 {
|
var finalFilterExpressions []string
|
||||||
params["filter_by"] = strings.Join(filterExpressions, " && ")
|
|
||||||
|
for _, expressions := range fieldGroups {
|
||||||
|
if len(expressions) == 1 {
|
||||||
|
// Single expression, add as is
|
||||||
|
finalFilterExpressions = append(finalFilterExpressions, expressions[0])
|
||||||
|
} else {
|
||||||
|
// Multiple expressions for same base field, join with OR
|
||||||
|
orExpression := fmt.Sprintf("(%s)", strings.Join(expressions, " || "))
|
||||||
|
finalFilterExpressions = append(finalFilterExpressions, orExpression)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Combine all filter expressions with AND
|
||||||
|
if len(finalFilterExpressions) > 0 {
|
||||||
|
params["filter_by"] = strings.Join(finalFilterExpressions, " && ")
|
||||||
}
|
}
|
||||||
|
|
||||||
return mainQuery, params, nil
|
return mainQuery, params, nil
|
||||||
|
|
@ -155,30 +232,50 @@ func parseSearchResponse(responseBody []byte) ([]nostr.Event, error) {
|
||||||
return nil, fmt.Errorf("error parsing search response: %v", err)
|
return nil, fmt.Errorf("error parsing search response: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Debug: Print the raw response structure
|
||||||
|
fmt.Printf("Search response found %d hits\n", searchResponse.Found)
|
||||||
|
|
||||||
nostrResults := make([]nostr.Event, 0, len(searchResponse.Hits))
|
nostrResults := make([]nostr.Event, 0, len(searchResponse.Hits))
|
||||||
|
|
||||||
for _, hit := range searchResponse.Hits {
|
for i, hit := range searchResponse.Hits {
|
||||||
// Extract the document from the hit
|
// Debug: Print hit structure information
|
||||||
docMap, ok := hit["document"]
|
fmt.Printf("Processing hit %d, keys: %v\n", i, getMapKeys(hit))
|
||||||
|
|
||||||
|
// Check if document exists in the hit
|
||||||
|
docRaw, exists := hit["document"]
|
||||||
|
if !exists {
|
||||||
|
fmt.Printf("Warning: hit %d has no 'document' field\n", i)
|
||||||
|
continue // Skip this hit
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract document directly as a map[string]interface{}
|
||||||
|
docMap, ok := docRaw.(map[string]interface{})
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, fmt.Errorf("invalid document format in search results")
|
fmt.Printf("Warning: hit %d document is not a map, type: %T\n", i, docRaw)
|
||||||
|
continue // Skip this hit
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert the map to AMB metadata
|
// Debug: Print document keys
|
||||||
docJSON, err := json.Marshal(docMap)
|
fmt.Printf("Document keys: %v\n", getMapKeys(docMap))
|
||||||
|
|
||||||
|
// Check for EventRaw field directly
|
||||||
|
eventRawVal, hasEventRaw := docMap["eventRaw"]
|
||||||
|
if !hasEventRaw {
|
||||||
|
fmt.Printf("Warning: document has no 'eventRaw' field\n")
|
||||||
|
continue // Skip this document
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to extract EventRaw as string
|
||||||
|
eventRawStr, ok := eventRawVal.(string)
|
||||||
|
if !ok {
|
||||||
|
fmt.Printf("Warning: eventRaw is not a string, type: %T\n", eventRawVal)
|
||||||
|
continue // Skip this document
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert the EventRaw string to a Nostr event
|
||||||
|
nostrEvent, err := StringifiedJSONToNostrEvent(eventRawStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error marshaling document: %v", err)
|
fmt.Printf("Warning: failed to convert EventRaw to Nostr event: %v\n", err)
|
||||||
}
|
|
||||||
|
|
||||||
var ambData AMBMetadata
|
|
||||||
if err := json.Unmarshal(docJSON, &ambData); err != nil {
|
|
||||||
return nil, fmt.Errorf("error unmarshaling to AMBMetadata: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert the AMB metadata to a Nostr event
|
|
||||||
nostrEvent, err := StringifiedJSONToNostrEvent(ambData.EventRaw)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("Warning: failed to convert AMB to Nostr: %v\n", err)
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -186,8 +283,16 @@ func parseSearchResponse(responseBody []byte) ([]nostr.Event, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Print the number of results for logging
|
// Print the number of results for logging
|
||||||
fmt.Printf("Found %d results\n",
|
fmt.Printf("Successfully processed %d results\n", len(nostrResults))
|
||||||
len(nostrResults))
|
|
||||||
|
|
||||||
return nostrResults, nil
|
return nostrResults, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Helper function to get keys from a map for debugging
|
||||||
|
func getMapKeys(m map[string]interface{}) []string {
|
||||||
|
keys := make([]string, 0, len(m))
|
||||||
|
for k := range m {
|
||||||
|
keys = append(keys, k)
|
||||||
|
}
|
||||||
|
return keys
|
||||||
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue