Skip to content

Commit

Permalink
Revert to buffered response
Browse files Browse the repository at this point in the history
The http handler closes the requests body after the first response flush. Ideally it won't close the request body and after each request body line read we will write a response in a streaming fashion, thus not consuming tons of RAM. This will result in "Transfer-Encoding: chunked" instead of buffering the response and sending "Content-Length: XXX".
  • Loading branch information
assafmo committed Apr 18, 2019
1 parent 93342dc commit c7e4958
Showing 1 changed file with 34 additions and 33 deletions.
67 changes: 34 additions & 33 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ func main() {

func query(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Server", "SQLiteQueryServer v"+version)
// w.Header().Set("X-Content-Type-Options", "nosniff") // prevent browsers from doing MIME-type sniffing

if r.URL.Path != "/query" {
http.Error(w, helpMessage, http.StatusNotFound)
Expand All @@ -85,18 +86,18 @@ func query(w http.ResponseWriter, r *http.Request) {
return
}

wFlusher, ok := w.(http.Flusher)
if !ok {
http.Error(w,
fmt.Sprintf("Error creating a stream writer.\n\n%s", helpMessage), http.StatusInternalServerError)
return
}
// wFlusher, ok := w.(http.Flusher)
// if !ok {
// http.Error(w,
// fmt.Sprintf("Error creating a stream writer.\n\n%s", helpMessage), http.StatusInternalServerError)
// return
// }

w.Header().Set("Content-Type", "application/json")
outpoutEncoder := json.NewEncoder(w)
// start printing the outer array
fmt.Fprintf(w, "[")
wFlusher.Flush()
// wFlusher.Flush()

reqCsvReader := csv.NewReader(r.Body)
reqCsvReader.ReuseRecord = true
Expand All @@ -116,7 +117,7 @@ func query(w http.ResponseWriter, r *http.Request) {
if !isFirstQuery {
// print comma between queries results
fmt.Fprintf(w, ",")
wFlusher.Flush()
// wFlusher.Flush()
}
isFirstQuery = false

Expand Down Expand Up @@ -147,14 +148,14 @@ func query(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, `"headers":`)
outpoutEncoder.Encode(cols)
fmt.Fprintf(w, `,"out":[`) // start printing the out rows array
wFlusher.Flush()
// wFlusher.Flush()

isFirstRow := true
for rows.Next() {
if !isFirstRow {
// print comma between rows
fmt.Fprintf(w, ",")
wFlusher.Flush()
// wFlusher.Flush()
}
isFirstRow = false

Expand Down Expand Up @@ -184,33 +185,12 @@ func query(w http.ResponseWriter, r *http.Request) {

// finish printing a query result
fmt.Fprintf(w, "]}")
wFlusher.Flush()
// wFlusher.Flush()
}

// finish printing the outer array
fmt.Fprintf(w, "]\n")
wFlusher.Flush()
}

func countParams() int {
rows, err := queryStmt.Query()
if err != nil {
regex := regexp.MustCompile(`sql: expected (\d+) arguments, got 0`)
regexSubmatches := regex.FindAllStringSubmatch(err.Error(), 1)
if len(regexSubmatches) != 1 || len(regexSubmatches[0]) != 2 {
// this is weird, return best guess
return strings.Count(queryString, "?")
}
count, err := strconv.Atoi(regexSubmatches[0][1])
if err != nil {
// this is weirder because the regex is \d+
// return best guess
return strings.Count(queryString, "?")
}
return count
}
rows.Close()
return 0
// wFlusher.Flush()
}

func buildHelpMessage() {
Expand Down Expand Up @@ -273,3 +253,24 @@ func buildHelpMessage() {
- Element #1 is the result of query #1, Element #2 is the result of query #2, and so forth.
`, serverPort)
}

func countParams() int {
rows, err := queryStmt.Query()
if err != nil {
regex := regexp.MustCompile(`sql: expected (\d+) arguments, got 0`)
regexSubmatches := regex.FindAllStringSubmatch(err.Error(), 1)
if len(regexSubmatches) != 1 || len(regexSubmatches[0]) != 2 {
// this is weird, return best guess
return strings.Count(queryString, "?")
}
count, err := strconv.Atoi(regexSubmatches[0][1])
if err != nil {
// this is weirder because the regex is \d+
// return best guess
return strings.Count(queryString, "?")
}
return count
}
rows.Close()
return 0
}

0 comments on commit c7e4958

Please sign in to comment.