1 /*
2  * $Id: SphinxClient.java 3144 2012-03-12 08:58:04Z tomat $
3  *
4  * Java version of Sphinx searchd client (Java API)
5  *
6  * Copyright (c) 2007, Vladimir Fedorkov
7  * Copyright (c) 2007-2012, Andrew Aksyonoff
8  * Copyright (c) 2008-2012, Sphinx Technologies Inc
9  * All rights reserved
10  *
11  * This program is free software; you can redistribute it and/or modify
12  * it under the terms of the GNU General Public License. You should have
13  * received a copy of the GPL license along with this program; if you
14  * did not, you can find it at http://www.gnu.org/
15  */
16 
17 package org.sphx.api;
18 
19 import java.io.*;
20 import java.net.*;
21 import java.util.*;
22 import java.net.SocketAddress.*;
23 
24 /** Sphinx client class */
25 public class SphinxClient
26 {
27 	/* matching modes */
28 	public final static int SPH_MATCH_ALL			= 0;
29 	public final static int SPH_MATCH_ANY			= 1;
30 	public final static int SPH_MATCH_PHRASE		= 2;
31 	public final static int SPH_MATCH_BOOLEAN		= 3;
32 	public final static int SPH_MATCH_EXTENDED		= 4;
33 	public final static int SPH_MATCH_FULLSCAN		= 5;
34 	public final static int SPH_MATCH_EXTENDED2		= 6;
35 
36 	/* ranking modes (extended2 only) */
37 	public final static int SPH_RANK_PROXIMITY_BM25	= 0;
38 	public final static int SPH_RANK_BM25			= 1;
39 	public final static int SPH_RANK_NONE			= 2;
40 	public final static int SPH_RANK_WORDCOUNT		= 3;
41 	public final static int SPH_RANK_PROXIMITY		= 4;
42 	public final static int SPH_RANK_MATCHANY		= 5;
43 	public final static int SPH_RANK_FIELDMASK		= 6;
44 	public final static int SPH_RANK_SPH04			= 7;
45 	public final static int SPH_RANK_EXPR			= 8;
46 	public final static int SPH_RANK_TOTAL			= 9;
47 
48 	/* sorting modes */
49 	public final static int SPH_SORT_RELEVANCE		= 0;
50 	public final static int SPH_SORT_ATTR_DESC		= 1;
51 	public final static int SPH_SORT_ATTR_ASC		= 2;
52 	public final static int SPH_SORT_TIME_SEGMENTS	= 3;
53 	public final static int SPH_SORT_EXTENDED		= 4;
54 	public final static int SPH_SORT_EXPR			= 5;
55 
56 	/* grouping functions */
57 	public final static int SPH_GROUPBY_DAY			= 0;
58 	public final static int SPH_GROUPBY_WEEK		= 1;
59 	public final static int SPH_GROUPBY_MONTH		= 2;
60 	public final static int SPH_GROUPBY_YEAR		= 3;
61 	public final static int SPH_GROUPBY_ATTR		= 4;
62 	public final static int SPH_GROUPBY_ATTRPAIR	= 5;
63 
64 	/* searchd reply status codes */
65 	public final static int SEARCHD_OK				= 0;
66 	public final static int SEARCHD_ERROR			= 1;
67 	public final static int SEARCHD_RETRY			= 2;
68 	public final static int SEARCHD_WARNING			= 3;
69 
70 	/* attribute types */
71 	public final static int SPH_ATTR_INTEGER		= 1;
72 	public final static int SPH_ATTR_TIMESTAMP		= 2;
73 	public final static int SPH_ATTR_ORDINAL		= 3;
74 	public final static int SPH_ATTR_BOOL			= 4;
75 	public final static int SPH_ATTR_FLOAT			= 5;
76 	public final static int SPH_ATTR_BIGINT			= 6;
77 	public final static int SPH_ATTR_STRING			= 7;
78 	public final static int SPH_ATTR_MULTI			= 0x40000001;
79 	public final static int SPH_ATTR_MULTI64		= 0x40000002;
80 
81 	/* searchd commands */
82 	private final static int SEARCHD_COMMAND_SEARCH		= 0;
83 	private final static int SEARCHD_COMMAND_EXCERPT	= 1;
84 	private final static int SEARCHD_COMMAND_UPDATE		= 2;
85 	private final static int SEARCHD_COMMAND_KEYWORDS	= 3;
86 	private final static int SEARCHD_COMMAND_PERSIST	= 4;
87 	private final static int SEARCHD_COMMAND_FLUSHATTRS	= 7;
88 
89 	/* searchd command versions */
90 	private final static int VER_MAJOR_PROTO		= 0x1;
91 	private final static int VER_COMMAND_SEARCH		= 0x119;
92 	private final static int VER_COMMAND_EXCERPT	= 0x102;
93 	private final static int VER_COMMAND_UPDATE		= 0x102;
94 	private final static int VER_COMMAND_KEYWORDS	= 0x100;
95 	private final static int VER_COMMAND_FLUSHATTRS	= 0x100;
96 
97 	/* filter types */
98 	private final static int SPH_FILTER_VALUES		= 0;
99 	private final static int SPH_FILTER_RANGE		= 1;
100 	private final static int SPH_FILTER_FLOATRANGE	= 2;
101 
102 
103 	private String		_host;
104 	private int			_port;
105 	private String		_path;
106 	private Socket		_socket;
107 
108 	private int			_offset;
109 	private int			_limit;
110 	private int			_mode;
111 	private int[]		_weights;
112 	private int			_sort;
113 	private String		_sortby;
114 	private int			_minId;
115 	private int			_maxId;
116 	private ByteArrayOutputStream	_rawFilters;
117 	private DataOutputStream		_filters;
118 	private int			_filterCount;
119 	private String		_groupBy;
120 	private int			_groupFunc;
121 	private String		_groupSort;
122 	private String		_groupDistinct;
123 	private int			_maxMatches;
124 	private int			_cutoff;
125 	private int			_retrycount;
126 	private int			_retrydelay;
127 	private String		_latitudeAttr;
128 	private String		_longitudeAttr;
129 	private float		_latitude;
130 	private float		_longitude;
131 
132 	private String		_error;
133 	private String		_warning;
134 	private boolean		_connerror;
135 	private int			_timeout;
136 
137 	private ArrayList	_reqs;
138 	private Map			_indexWeights;
139 	private int			_ranker;
140 	private String		_rankexpr;
141 	private int			_maxQueryTime;
142 	private Map			_fieldWeights;
143 	private Map			_overrideTypes;
144 	private Map			_overrideValues;
145 	private String		_select;
146 
147 	/** Creates a new SphinxClient instance. */
SphinxClient()148 	public SphinxClient()
149 	{
150 		this("localhost", 9312);
151 	}
152 
153 	/** Creates a new SphinxClient instance, with host:port specification. */
SphinxClient(String host, int port)154 	public SphinxClient(String host, int port)
155 	{
156 		_host	= host;
157 		_port	= port;
158 		_path	= null;
159 		_socket	= null;
160 
161 		_offset	= 0;
162 		_limit	= 20;
163 		_mode	= SPH_MATCH_ALL;
164 		_sort	= SPH_SORT_RELEVANCE;
165 		_sortby	= "";
166 		_minId	= 0;
167 		_maxId	= 0;
168 
169 		_filterCount	= 0;
170 		_rawFilters		= new ByteArrayOutputStream();
171 		_filters		= new DataOutputStream(_rawFilters);
172 
173 		_groupBy		= "";
174 		_groupFunc		= SPH_GROUPBY_DAY;
175 		_groupSort		= "@group desc";
176 		_groupDistinct	= "";
177 
178 		_maxMatches		= 1000;
179 		_cutoff			= 0;
180 		_retrycount		= 0;
181 		_retrydelay		= 0;
182 
183 		_latitudeAttr	= null;
184 		_longitudeAttr	= null;
185 		_latitude		= 0;
186 		_longitude		= 0;
187 
188 		_error			= "";
189 		_warning		= "";
190 		_connerror		= false;
191 		_timeout		= 1000;
192 
193 		_reqs			= new ArrayList();
194 		_weights		= null;
195 		_indexWeights	= new LinkedHashMap();
196 		_fieldWeights	= new LinkedHashMap();
197 		_ranker			= SPH_RANK_PROXIMITY_BM25;
198 		_rankexpr		= "";
199 
200 		_overrideTypes	= new LinkedHashMap();
201 		_overrideValues	= new LinkedHashMap();
202 		_select			= "*";
203 	}
204 
205 	/** Get last error message, if any. */
GetLastError()206 	public String GetLastError()
207 	{
208 		return _error;
209 	}
210 
211 	/** Get last warning message, if any. */
GetLastWarning()212 	public String GetLastWarning()
213 	{
214 		return _warning;
215 	}
216 
217 	/** Get last error flag (to tell network connection errors from searchd errors or broken responses). */
IsConnectError()218 	public boolean IsConnectError()
219 	{
220 		return _connerror;
221 	}
222 
223 	/** Set searchd host and port to connect to. */
SetServer(String host, int port)224 	public void SetServer(String host, int port) throws SphinxException
225 	{
226 		myAssert ( host!=null && host.length()>0, "host name must not be empty" );
227 		myAssert ( port>0 && port<65536, "port must be in 1..65535 range" );
228 		_host = host;
229 		_port = port;
230 	}
231 
232 	/** Set server connection timeout (0 to remove), in milliseconds. */
SetConnectTimeout( int timeout )233 	public void SetConnectTimeout ( int timeout )
234 	{
235 		_timeout = Math.max ( timeout, 0 );
236 	}
237 
238 	/** Internal method. Sanity check. */
myAssert( boolean condition, String err )239 	private void myAssert ( boolean condition, String err ) throws SphinxException
240 	{
241 		if ( !condition )
242 		{
243 			_error = err;
244 			throw new SphinxException ( err );
245 		}
246 	}
247 
248 	/** Internal method. String IO helper. */
writeNetUTF8( DataOutputStream ostream, String str )249 	private static void writeNetUTF8 ( DataOutputStream ostream, String str ) throws IOException
250 	{
251 		if ( str==null )
252 		{
253 			ostream.writeInt ( 0 );
254 			return;
255 		}
256 
257 		byte[] sBytes = str.getBytes ( "UTF-8" );
258 		int iLen = sBytes.length;
259 
260 		ostream.writeInt ( iLen );
261 		ostream.write ( sBytes );
262 	}
263 
264 	/** Internal method. String IO helper. */
readNetUTF8(DataInputStream istream)265 	private static String readNetUTF8(DataInputStream istream) throws IOException
266 	{
267 		int iLen = istream.readInt();
268 		byte[] sBytes = new byte [ iLen ];
269 		istream.readFully ( sBytes );
270 		return new String ( sBytes, "UTF-8");
271 	}
272 
273 	/** Internal method. Unsigned int IO helper. */
readDword( DataInputStream istream )274 	private static long readDword ( DataInputStream istream ) throws IOException
275 	{
276 		long v = (long) istream.readInt ();
277 		if ( v<0 )
278 			v += 4294967296L;
279 		return v;
280 	}
281 
282 	/** Internal method. Connect to searchd and exchange versions. */
_Connect()283 	private Socket _Connect()
284 	{
285 		if ( _socket!=null )
286 			return _socket;
287 
288 		_connerror = false;
289 		Socket sock = null;
290 		try
291 		{
292 			sock = new Socket ();
293 			sock.setSoTimeout ( _timeout );
294 			InetSocketAddress addr = new InetSocketAddress ( _host, _port );
295 			sock.connect ( addr, _timeout );
296 
297 			DataInputStream sIn = new DataInputStream ( sock.getInputStream() );
298 			int version = sIn.readInt();
299 			if ( version<1 )
300 			{
301 				sock.close ();
302 				_error = "expected searchd protocol version 1+, got version " + version;
303 				return null;
304 			}
305 
306 			DataOutputStream sOut = new DataOutputStream ( sock.getOutputStream() );
307 			sOut.writeInt ( VER_MAJOR_PROTO );
308 
309 		} catch ( IOException e )
310 		{
311 			_error = "connection to " + _host + ":" + _port + " failed: " + e;
312 			_connerror = true;
313 
314 			try
315 			{
316 				if ( sock!=null )
317 					sock.close ();
318 			} catch ( IOException e1 ) {}
319 			return null;
320 		}
321 
322 		return sock;
323 	}
324 
325 	/** Internal method. Get and check response packet from searchd. */
_GetResponse( Socket sock )326 	private byte[] _GetResponse ( Socket sock )
327 	{
328 		/* connect */
329 		DataInputStream sIn = null;
330 		InputStream SockInput = null;
331 		try
332 		{
333 			SockInput = sock.getInputStream();
334 			sIn = new DataInputStream ( SockInput );
335 
336 		} catch ( IOException e )
337 		{
338 			_error = "getInputStream() failed: " + e;
339 			return null;
340 		}
341 
342 		/* read response */
343 		byte[] response = null;
344 		short status = 0, ver = 0;
345 		int len = 0;
346 		try
347 		{
348 			/* read status fields */
349 			status = sIn.readShort();
350 			ver = sIn.readShort();
351 			len = sIn.readInt();
352 
353 			/* read response if non-empty */
354 			if ( len<=0 )
355 			{
356 				_error = "invalid response packet size (len=" + len + ")";
357 				return null;
358 			}
359 
360 			response = new byte[len];
361 			sIn.readFully ( response, 0, len );
362 
363 			/* check status */
364 			if ( status==SEARCHD_WARNING )
365 			{
366 				DataInputStream in = new DataInputStream ( new ByteArrayInputStream ( response ) );
367 
368 				int iWarnLen = in.readInt ();
369 				_warning = new String ( response, 4, iWarnLen );
370 
371 				System.arraycopy ( response, 4+iWarnLen, response, 0, response.length-4-iWarnLen );
372 
373 			} else if ( status==SEARCHD_ERROR )
374 			{
375 				_error = "searchd error: " + new String ( response, 4, response.length-4 );
376 				return null;
377 
378 			} else if ( status==SEARCHD_RETRY )
379 			{
380 				_error = "temporary searchd error: " + new String ( response, 4, response.length-4 );
381 				return null;
382 
383 			} else if ( status!=SEARCHD_OK )
384 			{
385 				_error = "searched returned unknown status, code=" + status;
386 				return null;
387 			}
388 
389 		} catch ( IOException e )
390 		{
391 			if ( len!=0 )
392 			{
393 				/* get trace, to provide even more failure details */
394 				PrintWriter ew = new PrintWriter ( new StringWriter() );
395 				e.printStackTrace ( ew );
396 				ew.flush ();
397 				ew.close ();
398 				String sTrace = ew.toString ();
399 
400 				/* build error message */
401 				_error = "failed to read searchd response (status=" + status + ", ver=" + ver + ", len=" + len + ", trace=" + sTrace +")";
402 			} else
403 			{
404 				_error = "received zero-sized searchd response (searchd crashed?): " + e.getMessage();
405 			}
406 			return null;
407 
408 		} finally
409 		{
410 			if ( _socket==null )
411 			{
412 				try
413 				{
414 					if ( sIn!=null )
415 						sIn.close();
416 					if ( sock!=null && !sock.isConnected() )
417 						sock.close();
418 				} catch ( IOException e )
419 				{
420 					/* silently ignore close failures; nothing could be done anyway */
421 				}
422 			}
423 		}
424 
425 		return response;
426 	}
427 
428 	/** Internal method. Connect to searchd, send request, get response as DataInputStream. */
_DoRequest( int command, int version, ByteArrayOutputStream req )429 	private DataInputStream _DoRequest ( int command, int version, ByteArrayOutputStream req )
430 	{
431 		/* connect */
432 		Socket sock = _Connect();
433 		if ( sock==null )
434 			return null;
435 
436 		/* send request */
437 	   	byte[] reqBytes = req.toByteArray();
438 	   	try
439 	   	{
440 			DataOutputStream sockDS = new DataOutputStream ( sock.getOutputStream() );
441 			sockDS.writeShort ( command );
442 			sockDS.writeShort ( version );
443 			sockDS.writeInt ( reqBytes.length );
444 			sockDS.write ( reqBytes );
445 
446 		} catch ( Exception e )
447 		{
448 			_error = "network error: " + e;
449 			_connerror = true;
450 			return null;
451 		}
452 
453 		/* get response */
454 		byte[] response = _GetResponse ( sock );
455 		if ( response==null )
456 			return null;
457 
458 		/* spawn that tampon */
459 		return new DataInputStream ( new ByteArrayInputStream ( response ) );
460 	}
461 
462 	/** Set matches offset and limit to return to client, max matches to retrieve on server, and cutoff. */
SetLimits( int offset, int limit, int max, int cutoff )463 	public void SetLimits ( int offset, int limit, int max, int cutoff ) throws SphinxException
464 	{
465 		myAssert ( offset>=0, "offset must not be negative" );
466 		myAssert ( limit>0, "limit must be positive" );
467 		myAssert ( max>0, "max must be positive" );
468 		myAssert ( cutoff>=0, "cutoff must not be negative" );
469 
470 		_offset = offset;
471 		_limit = limit;
472 		_maxMatches = max;
473 		_cutoff = cutoff;
474 	}
475 
476 	/** Set matches offset and limit to return to client, and max matches to retrieve on server. */
SetLimits( int offset, int limit, int max )477 	public void SetLimits ( int offset, int limit, int max ) throws SphinxException
478 	{
479 		SetLimits ( offset, limit, max, _cutoff );
480 	}
481 
482 	/** Set matches offset and limit to return to client. */
SetLimits( int offset, int limit)483 	public void SetLimits ( int offset, int limit) throws SphinxException
484 	{
485 		SetLimits ( offset, limit, _maxMatches, _cutoff );
486 	}
487 
488 	/** Set maximum query time, in milliseconds, per-index, 0 means "do not limit". */
SetMaxQueryTime( int maxTime )489 	public void SetMaxQueryTime ( int maxTime ) throws SphinxException
490 	{
491 		myAssert ( maxTime>=0, "max_query_time must not be negative" );
492 		_maxQueryTime = maxTime;
493 	}
494 
495 	/** Set matching mode. */
SetMatchMode(int mode)496 	public void SetMatchMode(int mode) throws SphinxException
497 	{
498 		myAssert (
499 			mode==SPH_MATCH_ALL ||
500 			mode==SPH_MATCH_ANY ||
501 			mode==SPH_MATCH_PHRASE ||
502 			mode==SPH_MATCH_BOOLEAN ||
503 			mode==SPH_MATCH_EXTENDED ||
504 			mode==SPH_MATCH_FULLSCAN ||
505 			mode==SPH_MATCH_EXTENDED2, "unknown mode value; use one of the SPH_MATCH_xxx constants" );
506 		_mode = mode;
507 	}
508 
509 	/** Set ranking mode. */
SetRankingMode( int ranker, String rankexpr )510 	public void SetRankingMode ( int ranker, String rankexpr ) throws SphinxException
511 	{
512 		myAssert ( ranker>=0 && ranker<SPH_RANK_TOTAL, "unknown ranker value; use one of the SPH_RANK_xxx constants" );
513 		_rankexpr = ( rankexpr==null ) ? "" : rankexpr;
514 		_ranker = ranker;
515 	}
516 
517 	/** Set sorting mode. */
SetSortMode( int mode, String sortby )518 	public void SetSortMode ( int mode, String sortby ) throws SphinxException
519 	{
520 		myAssert (
521 			mode==SPH_SORT_RELEVANCE ||
522 			mode==SPH_SORT_ATTR_DESC ||
523 			mode==SPH_SORT_ATTR_ASC ||
524 			mode==SPH_SORT_TIME_SEGMENTS ||
525 			mode==SPH_SORT_EXTENDED ||
526 			mode==SPH_SORT_EXPR, "unknown mode value; use one of the available SPH_SORT_xxx constants" );
527 		myAssert ( mode==SPH_SORT_RELEVANCE || ( sortby!=null && sortby.length()>0 ), "sortby string must not be empty in selected mode" );
528 
529 		_sort = mode;
530 		_sortby = ( sortby==null ) ? "" : sortby;
531 	}
532 
533 	/** Set per-field weights (all values must be positive). WARNING: DEPRECATED, use SetFieldWeights() instead. */
SetWeights(int[] weights)534 	public void SetWeights(int[] weights) throws SphinxException
535 	{
536 		myAssert ( weights!=null, "weights must not be null" );
537 		for (int i = 0; i < weights.length; i++) {
538 			int weight = weights[i];
539 			myAssert ( weight>0, "all weights must be greater than 0" );
540 		}
541 		_weights = weights;
542 	}
543 
544 	/**
545 	 * Bind per-field weights by field name.
546 	 * @param fieldWeights hash which maps String index names to Integer weights
547 	 */
SetFieldWeights( Map fieldWeights )548 	public void SetFieldWeights ( Map fieldWeights ) throws SphinxException
549 	{
550 		/* FIXME! implement checks here */
551 		_fieldWeights = ( fieldWeights==null ) ? new LinkedHashMap () : fieldWeights;
552 	}
553 
554 	/**
555 	 * Bind per-index weights by index name (and enable summing the weights on duplicate matches, instead of replacing them).
556 	 * @param indexWeights hash which maps String index names to Integer weights
557 	 */
SetIndexWeights( Map indexWeights )558 	public void SetIndexWeights ( Map indexWeights ) throws SphinxException
559 	{
560 		/* FIXME! implement checks here */
561 		_indexWeights = ( indexWeights==null ) ? new LinkedHashMap () : indexWeights;
562 	}
563 
564 	/** Set document IDs range to match. */
SetIDRange( int min, int max )565 	public void SetIDRange ( int min, int max ) throws SphinxException
566 	{
567 		myAssert ( min<=max, "min must be less or equal to max" );
568 		_minId = min;
569 		_maxId = max;
570 	}
571 
572 	/** Set values filter. Only match records where attribute value is in given set. */
SetFilter( String attribute, int[] values, boolean exclude )573 	public void SetFilter ( String attribute, int[] values, boolean exclude ) throws SphinxException
574 	{
575 		myAssert ( values!=null && values.length>0, "values array must not be null or empty" );
576 		myAssert ( attribute!=null && attribute.length()>0, "attribute name must not be null or empty" );
577 
578 		try
579 		{
580 			writeNetUTF8 ( _filters, attribute );
581 			_filters.writeInt ( SPH_FILTER_VALUES );
582 			_filters.writeInt ( values.length );
583 			for ( int i=0; i<values.length; i++ )
584 				_filters.writeLong ( values[i] );
585 			_filters.writeInt ( exclude ? 1 : 0 );
586 
587 		} catch ( Exception e )
588 		{
589 			myAssert ( false, "IOException: " + e.getMessage() );
590 		}
591 		_filterCount++;
592 	}
593 
594 	/** Set values filter. Only match records where attribute value is in given set. */
SetFilter( String attribute, long[] values, boolean exclude )595 	public void SetFilter ( String attribute, long[] values, boolean exclude ) throws SphinxException
596 	{
597 		myAssert ( values!=null && values.length>0, "values array must not be null or empty" );
598 		myAssert ( attribute!=null && attribute.length()>0, "attribute name must not be null or empty" );
599 
600 		try
601 		{
602 			writeNetUTF8 ( _filters, attribute );
603 			_filters.writeInt ( SPH_FILTER_VALUES );
604 			_filters.writeInt ( values.length );
605 			for ( int i=0; i<values.length; i++ )
606 				_filters.writeLong ( values[i] );
607 			_filters.writeInt ( exclude ? 1 : 0 );
608 
609 		} catch ( Exception e )
610 		{
611 			myAssert ( false, "IOException: " + e.getMessage() );
612 		}
613 		_filterCount++;
614 	}
615 
616 	/** Set values filter with a single value (syntax sugar; see {@link #SetFilter(String,int[],boolean)}). */
SetFilter( String attribute, int value, boolean exclude )617 	public void SetFilter ( String attribute, int value, boolean exclude ) throws SphinxException
618 	{
619 		long[] values = new long[] { value };
620 		SetFilter ( attribute, values, exclude );
621 	}
622 
623 	/** Set values filter with a single value (syntax sugar; see {@link #SetFilter(String,int[],boolean)}). */
SetFilter( String attribute, long value, boolean exclude )624 	public void SetFilter ( String attribute, long value, boolean exclude ) throws SphinxException
625 	{
626 		long[] values = new long[] { value };
627 		SetFilter ( attribute, values, exclude );
628 	}
629 
630 	/** Set integer range filter.  Only match records if attribute value is beetwen min and max (inclusive). */
SetFilterRange( String attribute, long min, long max, boolean exclude )631 	public void SetFilterRange ( String attribute, long min, long max, boolean exclude ) throws SphinxException
632 	{
633 		myAssert ( min<=max, "min must be less or equal to max" );
634 		try
635 		{
636 			writeNetUTF8 ( _filters, attribute );
637 			_filters.writeInt ( SPH_FILTER_RANGE );
638 			_filters.writeLong ( min );
639 			_filters.writeLong ( max );
640 			_filters.writeInt ( exclude ? 1 : 0 );
641 
642 		} catch ( Exception e )
643 		{
644 			myAssert ( false, "IOException: " + e.getMessage() );
645 		}
646 		_filterCount++;
647 	}
648 
649 	/** Set integer range filter.  Only match records if attribute value is beetwen min and max (inclusive). */
SetFilterRange( String attribute, int min, int max, boolean exclude )650 	public void SetFilterRange ( String attribute, int min, int max, boolean exclude ) throws SphinxException
651 	{
652 		SetFilterRange ( attribute, (long)min, (long)max, exclude );
653 	}
654 
655 	/** Set float range filter.  Only match records if attribute value is beetwen min and max (inclusive). */
SetFilterFloatRange( String attribute, float min, float max, boolean exclude )656 	public void SetFilterFloatRange ( String attribute, float min, float max, boolean exclude ) throws SphinxException
657 	{
658 		myAssert ( min<=max, "min must be less or equal to max" );
659 		try
660 		{
661 			writeNetUTF8 ( _filters, attribute );
662 			_filters.writeInt ( SPH_FILTER_FLOATRANGE );
663 			_filters.writeFloat ( min );
664 			_filters.writeFloat ( max );
665 			_filters.writeInt ( exclude ? 1 : 0 );
666 		} catch ( Exception e )
667 		{
668 			myAssert ( false, "IOException: " + e.getMessage() );
669 		}
670 		_filterCount++;
671 	}
672 
673 	/** Setup geographical anchor point. Required to use @geodist in filters and sorting; distance will be computed to this point. */
SetGeoAnchor( String latitudeAttr, String longitudeAttr, float latitude, float longitude )674 	public void SetGeoAnchor ( String latitudeAttr, String longitudeAttr, float latitude, float longitude ) throws SphinxException
675 	{
676 		myAssert ( latitudeAttr!=null && latitudeAttr.length()>0, "longitudeAttr string must not be null or empty" );
677 		myAssert ( longitudeAttr!=null && longitudeAttr.length()>0, "longitudeAttr string must not be null or empty" );
678 
679 		_latitudeAttr = latitudeAttr;
680 		_longitudeAttr = longitudeAttr;
681 		_latitude = latitude;
682 		_longitude = longitude;
683 	}
684 
685 	/** Set grouping attribute and function. */
SetGroupBy( String attribute, int func, String groupsort )686 	public void SetGroupBy ( String attribute, int func, String groupsort ) throws SphinxException
687 	{
688 		myAssert (
689 			func==SPH_GROUPBY_DAY ||
690 			func==SPH_GROUPBY_WEEK ||
691 			func==SPH_GROUPBY_MONTH ||
692 			func==SPH_GROUPBY_YEAR ||
693 			func==SPH_GROUPBY_ATTR ||
694 			func==SPH_GROUPBY_ATTRPAIR, "unknown func value; use one of the available SPH_GROUPBY_xxx constants" );
695 
696 		_groupBy = attribute;
697 		_groupFunc = func;
698 		_groupSort = groupsort;
699 	}
700 
701 	/** Set grouping attribute and function with default ("@group desc") groupsort (syntax sugar). */
SetGroupBy(String attribute, int func)702 	public void SetGroupBy(String attribute, int func) throws SphinxException
703 	{
704 		SetGroupBy(attribute, func, "@group desc");
705 	}
706 
707 	/** Set count-distinct attribute for group-by queries. */
SetGroupDistinct(String attribute)708 	public void SetGroupDistinct(String attribute)
709 	{
710 		_groupDistinct = attribute;
711 	}
712 
713 	/** Set distributed retries count and delay. */
SetRetries( int count, int delay )714 	public void SetRetries ( int count, int delay ) throws SphinxException
715 	{
716 		myAssert ( count>=0, "count must not be negative" );
717 		myAssert ( delay>=0, "delay must not be negative" );
718 		_retrycount = count;
719 		_retrydelay = delay;
720 	}
721 
722 	/** Set distributed retries count with default (zero) delay (syntax sugar). */
SetRetries( int count )723 	public void SetRetries ( int count ) throws SphinxException
724 	{
725 		SetRetries ( count, 0 );
726 	}
727 
728 	/**
729 	 * Set attribute values override (one override list per attribute).
730 	 * @param values maps Long document IDs to Int/Long/Float values (as specified in attrtype).
731 	 */
SetOverride( String attrname, int attrtype, Map values )732 	public void SetOverride ( String attrname, int attrtype, Map values ) throws SphinxException
733 	{
734 		myAssert ( attrname!=null && attrname.length()>0, "attrname must not be empty" );
735 		myAssert ( attrtype==SPH_ATTR_INTEGER || attrtype==SPH_ATTR_TIMESTAMP || attrtype==SPH_ATTR_BOOL || attrtype==SPH_ATTR_FLOAT || attrtype==SPH_ATTR_BIGINT,
736 			"unsupported attrtype (must be one of INTEGER, TIMESTAMP, BOOL, FLOAT, or BIGINT)" );
737 		_overrideTypes.put ( attrname, new Integer ( attrtype ) );
738 		_overrideValues.put ( attrname, values );
739 	}
740 
741 	/** Set select-list (attributes or expressions), SQL-like syntax. */
SetSelect( String select )742 	public void SetSelect ( String select ) throws SphinxException
743 	{
744 		myAssert ( select!=null, "select clause string must not be null" );
745 		_select = select;
746 	}
747 
748 
749 
750 	/** Reset all currently set filters (for multi-queries). */
ResetFilters()751 	public void ResetFilters()
752 	{
753 		/* should we close them first? */
754 		_rawFilters = new ByteArrayOutputStream();
755 		_filters = new DataOutputStream(_rawFilters);
756 		_filterCount = 0;
757 
758 		/* reset GEO anchor */
759 		_latitudeAttr = null;
760 		_longitudeAttr = null;
761 		_latitude = 0;
762 		_longitude = 0;
763 	}
764 
765 	/** Clear groupby settings (for multi-queries). */
ResetGroupBy()766 	public void ResetGroupBy ()
767 	{
768 		_groupBy = "";
769 		_groupFunc = SPH_GROUPBY_DAY;
770 		_groupSort = "@group desc";
771 		_groupDistinct = "";
772 	}
773 
774 	/** Clear all attribute value overrides (for multi-queries). */
ResetOverrides()775 	public void ResetOverrides ()
776     {
777 		_overrideTypes.clear ();
778 		_overrideValues.clear ();
779     }
780 
781 
782 
783 	/** Connect to searchd server and run current search query against all indexes (syntax sugar). */
Query( String query )784 	public SphinxResult Query ( String query ) throws SphinxException
785 	{
786 		return Query ( query, "*", "" );
787 	}
788 
789 	/** Connect to searchd server and run current search query against all indexes (syntax sugar). */
Query( String query, String index )790 	public SphinxResult Query ( String query, String index ) throws SphinxException
791 	{
792 		return Query ( query, index, "" );
793 	}
794 
795 	/** Connect to searchd server and run current search query. */
Query( String query, String index, String comment )796 	public SphinxResult Query ( String query, String index, String comment ) throws SphinxException
797 	{
798 		myAssert ( _reqs==null || _reqs.size()==0, "AddQuery() and Query() can not be combined; use RunQueries() instead" );
799 
800 		AddQuery ( query, index, comment );
801 		SphinxResult[] results = RunQueries();
802 		_reqs = new ArrayList(); /* just in case it failed too early */
803 		if ( results==null || results.length<1 )
804 			return null; /* probably network error; error message should be already filled */
805 
806 		SphinxResult res = results[0];
807 		_warning = res.warning;
808 		_error = res.error;
809 		if ( res==null || res.getStatus()==SEARCHD_ERROR )
810 			return null;
811 		return res;
812 	}
813 
814 	/** Add new query with current settings to current search request. */
AddQuery( String query, String index, String comment )815 	public int AddQuery ( String query, String index, String comment ) throws SphinxException
816 	{
817 		ByteArrayOutputStream req = new ByteArrayOutputStream();
818 
819 		/* build request */
820 		try {
821 			DataOutputStream out = new DataOutputStream(req);
822 			out.writeInt(_offset);
823 			out.writeInt(_limit);
824 			out.writeInt(_mode);
825 			out.writeInt(_ranker);
826 			if ( _ranker == SPH_RANK_EXPR ) {
827 				writeNetUTF8(out, _rankexpr);
828 			}
829 			out.writeInt(_sort);
830 			writeNetUTF8(out, _sortby);
831 			writeNetUTF8(out, query);
832 			int weightLen = _weights != null ? _weights.length : 0;
833 
834 			out.writeInt(weightLen);
835 			if (_weights != null) {
836 				for (int i = 0; i < _weights.length; i++)
837 					out.writeInt(_weights[i]);
838 			}
839 
840 			writeNetUTF8(out, index);
841 			out.writeInt(0);
842 			out.writeInt(_minId);
843 			out.writeInt(_maxId);
844 
845 			/* filters */
846 			out.writeInt(_filterCount);
847 			out.write(_rawFilters.toByteArray());
848 
849 			/* group-by, max matches, sort-by-group flag */
850 			out.writeInt(_groupFunc);
851 			writeNetUTF8(out, _groupBy);
852 			out.writeInt(_maxMatches);
853 			writeNetUTF8(out, _groupSort);
854 
855 			out.writeInt(_cutoff);
856 			out.writeInt(_retrycount);
857 			out.writeInt(_retrydelay);
858 
859 			writeNetUTF8(out, _groupDistinct);
860 
861 			/* anchor point */
862 			if (_latitudeAttr == null || _latitudeAttr.length() == 0 || _longitudeAttr == null || _longitudeAttr.length() == 0) {
863 				out.writeInt(0);
864 			} else {
865 				out.writeInt(1);
866 				writeNetUTF8(out, _latitudeAttr);
867 				writeNetUTF8(out, _longitudeAttr);
868 				out.writeFloat(_latitude);
869 				out.writeFloat(_longitude);
870 
871 			}
872 
873 			/* per-index weights */
874 			out.writeInt(_indexWeights.size());
875 			for (Iterator e = _indexWeights.keySet().iterator(); e.hasNext();) {
876 				String indexName = (String) e.next();
877 				Integer weight = (Integer) _indexWeights.get(indexName);
878 				writeNetUTF8(out, indexName);
879 				out.writeInt(weight.intValue());
880 			}
881 
882 			/* max query time */
883 			out.writeInt ( _maxQueryTime );
884 
885 			/* per-field weights */
886 			out.writeInt ( _fieldWeights.size() );
887 			for ( Iterator e=_fieldWeights.keySet().iterator(); e.hasNext(); )
888 			{
889 				String field = (String) e.next();
890 				Integer weight = (Integer) _fieldWeights.get ( field );
891 				writeNetUTF8 ( out, field );
892 				out.writeInt ( weight.intValue() );
893 			}
894 
895 			/* comment */
896 			writeNetUTF8 ( out, comment );
897 
898 			/* overrides */
899 			out.writeInt ( _overrideTypes.size() );
900 			for ( Iterator e=_overrideTypes.keySet().iterator(); e.hasNext(); )
901 			{
902 				String attr = (String) e.next();
903 				Integer type = (Integer) _overrideTypes.get ( attr );
904 				Map values = (Map) _overrideValues.get ( attr );
905 
906 				writeNetUTF8 ( out, attr );
907 				out.writeInt ( type.intValue() );
908 				out.writeInt ( values.size() );
909 
910 				for ( Iterator e2=values.keySet().iterator(); e2.hasNext(); )
911 				{
912 					Long id = (Long) e2.next ();
913 					out.writeLong ( id.longValue() );
914 					switch ( type.intValue() )
915 					{
916 						case SPH_ATTR_FLOAT:	out.writeFloat ( ( (Float) values.get ( id ) ).floatValue() ); break;
917 						case SPH_ATTR_BIGINT:	out.writeLong ( ( (Long)values.get ( id ) ).longValue() ); break;
918 						default:				out.writeInt ( ( (Integer)values.get ( id ) ).intValue() ); break;
919 					}
920 				}
921 			}
922 
923 			/* select-list */
924 			writeNetUTF8 ( out, _select );
925 
926 			/* done! */
927 			out.flush ();
928 			int qIndex = _reqs.size();
929 			_reqs.add ( qIndex, req.toByteArray() );
930 			return qIndex;
931 
932 		} catch ( Exception e )
933 		{
934 			myAssert ( false, "error in AddQuery(): " + e + ": " + e.getMessage() );
935 
936 		} finally
937 		{
938 			try
939 			{
940 				_filters.close ();
941 				_rawFilters.close ();
942 			} catch ( IOException e )
943 			{
944 				myAssert ( false, "error in AddQuery(): " + e + ": " + e.getMessage() );
945 			}
946 		}
947 		return -1;
948 	}
949 
950 	/** Run all previously added search queries. */
RunQueries()951 	public SphinxResult[] RunQueries() throws SphinxException
952 	{
953 		if ( _reqs==null || _reqs.size()<1 )
954 		{
955 			_error = "no queries defined, issue AddQuery() first";
956 			return null;
957 		}
958 
959 		/* build the mega-request */
960 		int nreqs = _reqs.size();
961 		ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
962 		try
963 		{
964 			DataOutputStream req = new DataOutputStream ( reqBuf );
965 			/* its a client */
966 			req.writeInt(0);
967 			req.writeInt ( nreqs );
968 			for ( int i=0; i<nreqs; i++ )
969 				req.write ( (byte[]) _reqs.get(i) );
970 			req.flush ();
971 
972 		} catch ( Exception e )
973 		{
974 			_error = "internal error: failed to build request: " + e;
975 			return null;
976 		}
977 
978 		DataInputStream in =_DoRequest ( SEARCHD_COMMAND_SEARCH, VER_COMMAND_SEARCH, reqBuf );
979 		if ( in==null )
980 			return null;
981 
982 		SphinxResult[] results = new SphinxResult [ nreqs ];
983 		_reqs = new ArrayList();
984 
985 		try
986 		{
987 			for ( int ires=0; ires<nreqs; ires++ )
988 			{
989 				SphinxResult res = new SphinxResult();
990 				results[ires] = res;
991 
992 				int status = in.readInt();
993 				res.setStatus ( status );
994 				if (status != SEARCHD_OK) {
995 					String message = readNetUTF8(in);
996 					if (status == SEARCHD_WARNING) {
997 						res.warning = message;
998 					} else {
999 						res.error = message;
1000 						continue;
1001 					}
1002 				}
1003 
1004 				/* read fields */
1005 				int nfields = in.readInt();
1006 				res.fields = new String[nfields];
1007 				int pos = 0;
1008 				for (int i = 0; i < nfields; i++)
1009 					res.fields[i] = readNetUTF8(in);
1010 
1011 				/* read arrts */
1012 				int nattrs = in.readInt();
1013 				res.attrTypes = new int[nattrs];
1014 				res.attrNames = new String[nattrs];
1015 				for (int i = 0; i < nattrs; i++) {
1016 					String AttrName = readNetUTF8(in);
1017 					int AttrType = in.readInt();
1018 					res.attrNames[i] = AttrName;
1019 					res.attrTypes[i] = AttrType;
1020 				}
1021 
1022 				/* read match count */
1023 				int count = in.readInt();
1024 				int id64 = in.readInt();
1025 				res.matches = new SphinxMatch[count];
1026 				for ( int matchesNo=0; matchesNo<count; matchesNo++ )
1027 				{
1028 					SphinxMatch docInfo;
1029 					docInfo = new SphinxMatch (
1030 							( id64==0 ) ? readDword(in) : in.readLong(),
1031 							in.readInt() );
1032 
1033 					/* read matches */
1034 					for (int attrNumber = 0; attrNumber < res.attrTypes.length; attrNumber++)
1035 					{
1036 						String attrName = res.attrNames[attrNumber];
1037 						int type = res.attrTypes[attrNumber];
1038 
1039 						/* handle bigints */
1040 						if ( type==SPH_ATTR_BIGINT )
1041 						{
1042 							docInfo.attrValues.add ( attrNumber, new Long ( in.readLong() ) );
1043 							continue;
1044 						}
1045 
1046 						/* handle floats */
1047 						if ( type==SPH_ATTR_FLOAT )
1048 						{
1049 							docInfo.attrValues.add ( attrNumber, new Float ( in.readFloat() ) );
1050 							continue;
1051 						}
1052 
1053 						/* handle strings */
1054 						if ( type==SPH_ATTR_STRING )
1055 						{
1056 							String s = readNetUTF8(in);
1057 							docInfo.attrValues.add ( attrNumber, s );
1058 							continue;
1059 						}
1060 
1061 						/* handle everything else as unsigned ints */
1062 						long val = readDword ( in );
1063 						if ( type==SPH_ATTR_MULTI )
1064 						{
1065 							long[] vals = new long [ (int)val ];
1066 							for ( int k=0; k<val; k++ )
1067 								vals[k] = readDword ( in );
1068 
1069 							docInfo.attrValues.add ( attrNumber, vals );
1070 
1071 						} else if ( type==SPH_ATTR_MULTI64 )
1072 						{
1073 							val = val / 2;
1074 							long[] vals = new long [ (int)val ];
1075 							for ( int k=0; k<val; k++ )
1076 								vals[k] = in.readLong ();
1077 
1078 							docInfo.attrValues.add ( attrNumber, vals );
1079 
1080 						} else
1081 						{
1082 							docInfo.attrValues.add ( attrNumber, new Long ( val ) );
1083 						}
1084 					}
1085 					res.matches[matchesNo] = docInfo;
1086 				}
1087 
1088 				res.total = in.readInt();
1089 				res.totalFound = in.readInt();
1090 				res.time = in.readInt() / 1000.0f;
1091 
1092 				res.words = new SphinxWordInfo [ in.readInt() ];
1093 				for ( int i=0; i<res.words.length; i++ )
1094 					res.words[i] = new SphinxWordInfo ( readNetUTF8(in), readDword(in), readDword(in) );
1095 			}
1096 			return results;
1097 
1098 		} catch ( IOException e )
1099 		{
1100 			_error = "incomplete reply";
1101 			return null;
1102 		}
1103 	}
1104 
1105 
1106 
1107 	/**
1108 	 * Connect to searchd server and generate excerpts (snippets) from given documents.
1109 	 * @param opts maps String keys to String or Integer values (see the documentation for complete keys list).
1110 	 * @return null on failure, array of snippets on success.
1111 	 */
BuildExcerpts( String[] docs, String index, String words, Map opts )1112 	public String[] BuildExcerpts ( String[] docs, String index, String words, Map opts ) throws SphinxException
1113 	{
1114 		myAssert(docs != null && docs.length > 0, "BuildExcerpts: Have no documents to process");
1115 		myAssert(index != null && index.length() > 0, "BuildExcerpts: Have no index to process documents");
1116 		myAssert(words != null && words.length() > 0, "BuildExcerpts: Have no words to highlight");
1117 		if (opts == null) opts = new LinkedHashMap();
1118 
1119 		/* fixup options */
1120 		if (!opts.containsKey("before_match")) opts.put("before_match", "<b>");
1121 		if (!opts.containsKey("after_match")) opts.put("after_match", "</b>");
1122 		if (!opts.containsKey("chunk_separator")) opts.put("chunk_separator", "...");
1123 		if (!opts.containsKey("html_strip_mode")) opts.put("html_strip_mode", "index");
1124 		if (!opts.containsKey("limit")) opts.put("limit", new Integer(256));
1125 		if (!opts.containsKey("limit_passages")) opts.put("limit_passages", new Integer(0));
1126 		if (!opts.containsKey("limit_words")) opts.put("limit_words", new Integer(0));
1127 		if (!opts.containsKey("around")) opts.put("around", new Integer(5));
1128 		if (!opts.containsKey("start_passage_id")) opts.put("start_passage_id", new Integer(1));
1129 		if (!opts.containsKey("exact_phrase")) opts.put("exact_phrase", new Integer(0));
1130 		if (!opts.containsKey("single_passage")) opts.put("single_passage", new Integer(0));
1131 		if (!opts.containsKey("use_boundaries")) opts.put("use_boundaries", new Integer(0));
1132 		if (!opts.containsKey("weight_order")) opts.put("weight_order", new Integer(0));
1133 		if (!opts.containsKey("load_files")) opts.put("load_files", new Integer(0));
1134 		if (!opts.containsKey("allow_empty")) opts.put("allow_empty", new Integer(0));
1135 		if (!opts.containsKey("query_mode")) opts.put("query_mode", new Integer(0));
1136 		if (!opts.containsKey("force_all_words")) opts.put("force_all_words", new Integer(0));
1137 
1138 		/* build request */
1139 		ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
1140 		DataOutputStream req = new DataOutputStream ( reqBuf );
1141 		try
1142 		{
1143 			req.writeInt(0);
1144 			int iFlags = 1; /* remove_spaces */
1145 			if ( ((Integer)opts.get("exact_phrase")).intValue()!=0 )	iFlags |= 2;
1146 			if ( ((Integer)opts.get("single_passage")).intValue()!=0 )	iFlags |= 4;
1147 			if ( ((Integer)opts.get("use_boundaries")).intValue()!=0 )	iFlags |= 8;
1148 			if ( ((Integer)opts.get("weight_order")).intValue()!=0 )	iFlags |= 16;
1149 			if ( ((Integer)opts.get("query_mode")).intValue()!=0 )		iFlags |= 32;
1150 			if ( ((Integer)opts.get("force_all_words")).intValue()!=0 )	iFlags |= 64;
1151 			if ( ((Integer)opts.get("load_files")).intValue()!=0 )		iFlags |= 128;
1152 			if ( ((Integer)opts.get("allow_empty")).intValue()!=0 )		iFlags |= 256;
1153 			req.writeInt ( iFlags );
1154 			writeNetUTF8 ( req, index );
1155 			writeNetUTF8 ( req, words );
1156 
1157 			/* send options */
1158 			writeNetUTF8 ( req, (String) opts.get("before_match") );
1159 			writeNetUTF8 ( req, (String) opts.get("after_match") );
1160 			writeNetUTF8 ( req, (String) opts.get("chunk_separator") );
1161 			req.writeInt ( ((Integer) opts.get("limit")).intValue() );
1162 			req.writeInt ( ((Integer) opts.get("around")).intValue() );
1163 
1164 			req.writeInt ( ((Integer) opts.get("limit_passages")).intValue() );
1165 			req.writeInt ( ((Integer) opts.get("limit_words")).intValue() );
1166 			req.writeInt ( ((Integer) opts.get("start_passage_id")).intValue() );
1167 			writeNetUTF8 ( req, (String) opts.get("html_strip_mode") );
1168 
1169 			/* send documents */
1170 			req.writeInt ( docs.length );
1171 			for ( int i=0; i<docs.length; i++ )
1172 				writeNetUTF8 ( req, docs[i] );
1173 
1174 			req.flush();
1175 
1176 		} catch ( Exception e )
1177 		{
1178 			_error = "internal error: failed to build request: " + e;
1179 			return null;
1180 		}
1181 
1182 		DataInputStream in = _DoRequest ( SEARCHD_COMMAND_EXCERPT, VER_COMMAND_EXCERPT, reqBuf );
1183 		if ( in==null )
1184 			return null;
1185 
1186 		try
1187 		{
1188 			String[] res = new String [ docs.length ];
1189 			for ( int i=0; i<docs.length; i++ )
1190 				res[i] = readNetUTF8 ( in );
1191 			return res;
1192 
1193 		} catch ( Exception e )
1194 		{
1195 			_error = "incomplete reply";
1196 			return null;
1197 		}
1198 	}
1199 
1200 
1201 
1202 	/**
1203 	 * Connect to searchd server and update given attributes on given documents in given indexes.
1204 	 * Sample code that will set group_id=123 where id=1 and group_id=456 where id=3:
1205 	 *
1206 	 * <pre>
1207 	 * String[] attrs = new String[1];
1208 	 *
1209 	 * attrs[0] = "group_id";
1210 	 * long[][] values = new long[2][2];
1211 	 *
1212 	 * values[0] = new long[2]; values[0][0] = 1; values[0][1] = 123;
1213 	 * values[1] = new long[2]; values[1][0] = 3; values[1][1] = 456;
1214 	 *
1215 	 * int res = cl.UpdateAttributes ( "test1", attrs, values );
1216 	 * </pre>
1217 	 *
1218 	 * @param index		index name(s) to update; might be distributed
1219 	 * @param attrs		array with the names of the attributes to update
1220 	 * @param values	array of updates; each long[] entry must contains document ID
1221 	 *					in the first element, and all new attribute values in the following ones
1222 	 * @return			-1 on failure, amount of actually found and updated documents (might be 0) on success
1223 	 *
1224 	 * @throws			SphinxException on invalid parameters
1225 	 */
UpdateAttributes( String index, String[] attrs, long[][] values )1226 	public int UpdateAttributes ( String index, String[] attrs, long[][] values ) throws SphinxException
1227 	{
1228 		/* check args */
1229 		myAssert ( index!=null && index.length()>0, "no index name provided" );
1230 		myAssert ( attrs!=null && attrs.length>0, "no attribute names provided" );
1231 		myAssert ( values!=null && values.length>0, "no update entries provided" );
1232 		for ( int i=0; i<values.length; i++ )
1233 		{
1234 			myAssert ( values[i]!=null, "update entry #" + i + " is null" );
1235 			myAssert ( values[i].length==1+attrs.length, "update entry #" + i + " has wrong length" );
1236 		}
1237 
1238 		/* build and send request */
1239 		ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
1240 		DataOutputStream req = new DataOutputStream ( reqBuf );
1241 		try
1242 		{
1243 			writeNetUTF8 ( req, index );
1244 
1245 			req.writeInt ( attrs.length );
1246 			for ( int i=0; i<attrs.length; i++ )
1247 			{
1248 				writeNetUTF8 ( req, attrs[i] );
1249 				req.writeInt ( 0 ); // not MVA attr
1250 			}
1251 
1252 			req.writeInt ( values.length );
1253 			for ( int i=0; i<values.length; i++ )
1254 			{
1255 				req.writeLong ( values[i][0] ); /* send docid as 64bit value */
1256 				for ( int j=1; j<values[i].length; j++ )
1257 					req.writeInt ( (int)values[i][j] ); /* send values as 32bit values; FIXME! what happens when they are over 2^31? */
1258 			}
1259 
1260 			req.flush();
1261 
1262 		} catch ( Exception e )
1263 		{
1264 			_error = "internal error: failed to build request: " + e;
1265 			return -1;
1266 		}
1267 
1268 		/* get and parse response */
1269 		DataInputStream in = _DoRequest ( SEARCHD_COMMAND_UPDATE, VER_COMMAND_UPDATE, reqBuf );
1270 		if ( in==null )
1271 			return -1;
1272 
1273 		try
1274 		{
1275 			return in.readInt ();
1276 		} catch ( Exception e )
1277 		{
1278 			_error = "incomplete reply";
1279 			return -1;
1280 		}
1281 	}
1282 
1283 
1284 
1285 	/**
1286 	 * Connect to searchd server and update given MVA attributes on given document in given indexes.
1287 	 * Sample code that will set group_id=(123, 456, 789) where id=10
1288 	 *
1289 	 * <pre>
1290 	 * String[] attrs = new String[1];
1291 	 *
1292 	 * attrs[0] = "group_id";
1293 	 * int[][] values = new int[1][3];
1294 	 *
1295 	 * values[0] = new int[3]; values[0][0] = 123; values[0][1] = 456; values[0][2] = 789
1296 	 *
1297 	 * int res = cl.UpdateAttributesMVA ( "test1", 10, attrs, values );
1298 	 * </pre>
1299 	 *
1300 	 * @param index		index name(s) to update; might be distributed
1301 	 * @param docid		id of document to update
1302 	 * @param attrs		array with the names of the attributes to update
1303 	 * @param values		array of updates; each int[] entry must contains all new attribute values
1304 	 * @return			-1 on failure, amount of actually found and updated documents (might be 0) on success
1305 	 *
1306 	 * @throws			SphinxException on invalid parameters
1307 	 */
UpdateAttributesMVA( String index, long docid, String[] attrs, int[][] values )1308 	public int UpdateAttributesMVA ( String index, long docid, String[] attrs, int[][] values ) throws SphinxException
1309 	{
1310 		/* check args */
1311 		myAssert ( index!=null && index.length()>0, "no index name provided" );
1312 		myAssert ( docid>0, "invalid document id" );
1313 		myAssert ( attrs!=null && attrs.length>0, "no attribute names provided" );
1314 		myAssert ( values!=null && values.length>0, "no update entries provided" );
1315 		myAssert ( values.length==attrs.length, "update entry has wrong length" );
1316 		for ( int i=0; i<values.length; i++ )
1317 		{
1318 			myAssert ( values[i]!=null, "update entry #" + i + " is null" );
1319 		}
1320 
1321 		/* build and send request */
1322 		ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
1323 		DataOutputStream req = new DataOutputStream ( reqBuf );
1324 		try
1325 		{
1326 			writeNetUTF8 ( req, index );
1327 
1328 			req.writeInt ( attrs.length );
1329 			for ( int i=0; i<attrs.length; i++ )
1330 			{
1331 				writeNetUTF8 ( req, attrs[i] );
1332 				req.writeInt ( 1 ); // MVA attr
1333 			}
1334 
1335 			req.writeInt ( 1 );
1336 			req.writeLong ( docid ); /* send docid as 64bit value */
1337 
1338 			for ( int i=0; i<values.length; i++ )
1339 			{
1340 				req.writeInt ( values[i].length ); /* send MVA's count */
1341 				for ( int j=0; j<values[i].length; j++ ) /* send MVAs itself*/
1342 					req.writeInt ( values[i][j] );
1343 			}
1344 
1345 			req.flush();
1346 
1347 		} catch ( Exception e )
1348 		{
1349 			_error = "internal error: failed to build request: " + e;
1350 			return -1;
1351 		}
1352 
1353 		/* get and parse response */
1354 		DataInputStream in = _DoRequest ( SEARCHD_COMMAND_UPDATE, VER_COMMAND_UPDATE, reqBuf );
1355 		if ( in==null )
1356 			return -1;
1357 
1358 		try
1359 		{
1360 			return in.readInt ();
1361 		} catch ( Exception e )
1362 		{
1363 			_error = "incomplete reply";
1364 			return -1;
1365 		}
1366 	}
1367 
1368 
1369 
1370 	/**
1371      * Connect to searchd server, and generate keyword list for a given query.
1372      * Returns null on failure, an array of Maps with misc per-keyword info on success.
1373      */
BuildKeywords( String query, String index, boolean hits )1374 	public Map[] BuildKeywords ( String query, String index, boolean hits ) throws SphinxException
1375 	{
1376 		/* build request */
1377 		ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
1378 		DataOutputStream req = new DataOutputStream ( reqBuf );
1379 		try
1380 		{
1381 			writeNetUTF8 ( req, query );
1382 			writeNetUTF8 ( req, index );
1383 			req.writeInt ( hits ? 1 : 0 );
1384 
1385 		} catch ( Exception e )
1386 		{
1387 			_error = "internal error: failed to build request: " + e;
1388 			return null;
1389 		}
1390 
1391 		/* run request */
1392 		DataInputStream in = _DoRequest ( SEARCHD_COMMAND_KEYWORDS, VER_COMMAND_KEYWORDS, reqBuf );
1393 		if ( in==null )
1394 			return null;
1395 
1396 		/* parse reply */
1397 		try
1398 		{
1399 			int iNumWords = in.readInt ();
1400 			Map[] res = new Map[iNumWords];
1401 
1402 			for ( int i=0; i<iNumWords; i++ )
1403 			{
1404 				res[i] = new LinkedHashMap ();
1405 				res[i].put ( "tokenized", readNetUTF8 ( in ) );
1406 				res[i].put ( "normalized", readNetUTF8 ( in ) );
1407 				if ( hits )
1408 				{
1409 					res[i].put ( "docs", new Long ( readDword ( in ) ) );
1410 					res[i].put ( "hits", new Long ( readDword ( in ) ) );
1411 				}
1412 			}
1413 			return res;
1414 
1415 		} catch ( Exception e )
1416 		{
1417 			_error = "incomplete reply";
1418 			return null;
1419 		}
1420 	}
1421 
1422 
1423 
1424 	/**
1425      * Force attribute flush, and block until it completes.
1426      * Returns current internal flush tag on success, -1 on failure.
1427      */
FlushAttributes()1428 	public int FlushAttributes() throws SphinxException
1429 	{
1430 		/* build request */
1431 		ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
1432 
1433 		/* run request */
1434 		DataInputStream in = _DoRequest ( SEARCHD_COMMAND_FLUSHATTRS, VER_COMMAND_FLUSHATTRS, reqBuf );
1435 		if ( in==null )
1436 			return -1;
1437 
1438 		/* parse reply */
1439 		try
1440 		{
1441 			int iFlushTag = in.readInt ();
1442 			return iFlushTag;
1443 
1444 		} catch ( Exception e )
1445 		{
1446 			_error = "incomplete reply";
1447 			return -1;
1448 		}
1449 	}
1450 
1451 
1452 
1453 	/** Escape the characters with special meaning in query syntax. */
EscapeString( String s )1454 	static public String EscapeString ( String s )
1455 	{
1456 		return s.replaceAll ( "([\\(\\)\\|\\-\\!\\@\\~\\\\\\\"\\&\\/\\^\\$\\=])", "\\\\$1" );
1457 	}
1458 
1459 	/** Open persistent connection to searchd. */
Open()1460 	public boolean Open()
1461 	{
1462 		if ( _socket!=null )
1463 		{
1464 			_error = "already connected";
1465 			return false;
1466 		}
1467 
1468 		Socket sock = _Connect();
1469 		if ( sock==null )
1470 			return false;
1471 
1472 		// command, command version = 0, body length = 4, body = 1
1473 		try
1474 		{
1475 			DataOutputStream sOut = new DataOutputStream ( sock.getOutputStream() );
1476 			sOut.writeShort ( SEARCHD_COMMAND_PERSIST );
1477 			sOut.writeShort ( 0 );
1478 			sOut.writeInt ( 4 );
1479 			sOut.writeInt ( 1 );
1480 		} catch ( IOException e )
1481 		{
1482 			_error = "network error: " + e;
1483 			_connerror = true;
1484 		}
1485 
1486 		_socket = sock;
1487 		return true;
1488 	}
1489 
1490 	/** Close existing persistent connection. */
Close()1491 	public boolean Close()
1492 	{
1493 		if ( _socket==null )
1494 		{
1495 			_error = "not connected";
1496 			return false;
1497 		}
1498 
1499 		try
1500 		{
1501 			_socket.close();
1502 		} catch ( IOException e )
1503 		{}
1504 		_socket = null;
1505 		return true;
1506 	}
1507 }
1508 
1509 /*
1510  * $Id: SphinxClient.java 3144 2012-03-12 08:58:04Z tomat $
1511  */
1512