1package anaconda
2
3import (
4	"net/url"
5)
6
7type SearchMetadata struct {
8	CompletedIn   float32 `json:"completed_in"`
9	MaxId         int64   `json:"max_id"`
10	MaxIdString   string  `json:"max_id_str"`
11	Query         string  `json:"query"`
12	RefreshUrl    string  `json:"refresh_url"`
13	Count         int     `json:"count"`
14	SinceId       int64   `json:"since_id"`
15	SinceIdString string  `json:"since_id_str"`
16	NextResults   string  `json:"next_results"`
17}
18
19type SearchResponse struct {
20	Statuses []Tweet        `json:"statuses"`
21	Metadata SearchMetadata `json:"search_metadata"`
22}
23
24func (sr *SearchResponse) GetNext(a *TwitterApi) (SearchResponse, error) {
25	if sr.Metadata.NextResults == "" {
26		return SearchResponse{}, nil
27	}
28	nextUrl, err := url.Parse(sr.Metadata.NextResults)
29	if err != nil {
30		return SearchResponse{}, err
31	}
32
33	v := nextUrl.Query()
34	// remove the q parameter from the url.Values so that it
35	// can be added back via the next GetSearch method call.
36	delete(v, "q")
37
38	q, _ := url.QueryUnescape(sr.Metadata.Query)
39	if err != nil {
40		return SearchResponse{}, err
41	}
42	newSr, err := a.GetSearch(q, v)
43	return newSr, err
44}
45
46func (a TwitterApi) GetSearch(queryString string, v url.Values) (sr SearchResponse, err error) {
47	v = cleanValues(v)
48	v.Set("q", queryString)
49	response_ch := make(chan response)
50	a.queryQueue <- query{a.baseUrl + "/search/tweets.json", v, &sr, _GET, response_ch}
51
52	// We have to read from the response channel before assigning to timeline
53	// Otherwise this will happen before the responses have been written
54	resp := <-response_ch
55	err = resp.err
56	return sr, err
57}
58