1#
2# Script for maintaining Kamailio Berkeley DB tables
3# Copyright (C) 2007 Cisco Systems
4#
5# This file is part of Kamailio, a free SIP server.
6#
7# Kamailio is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License as published by
9# the Free Software Foundation; either version 2 of the License, or
10# (at your option) any later version
11#
12# Kamailio is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License
18# along with this program; if not, write to the Free Software
19# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
20#
21#
22
23#constants
24PATH=$PATH:/usr/local/BerkeleyDB.4.6/bin
25DELIM="|"
26BACKUP_CMD="tar czvf "
27RESTORE_CMD="tar xzvf "
28
29#berkeley db utility program that writes out db to plain text
30#small hack to autodetect the db dump command, debian prefix the version..
31
32which db_dump > /dev/null
33ret=$?
34if [ $ret -eq 0 ] ; then
35	DUMP_CMD="db_dump"
36fi ;
37
38which db4.4_dump > /dev/null
39ret=$?
40if [ $ret -eq 0 ] ; then
41	DUMP_CMD="db4.4_dump"
42fi ;
43
44which db4.5_dump > /dev/null
45ret=$?
46if [ $ret -eq 0 ] ; then
47	DUMP_CMD="db4.5_dump"
48fi ;
49
50which db4.6_dump > /dev/null
51ret=$?
52if [ $ret -eq 0 ] ; then
53	DUMP_CMD="db4.6_dump"
54fi ;
55
56
57which db5.0_dump > /dev/null
58ret=$?
59if [ $ret -eq 0 ] ; then
60	DUMP_CMD="db5.0_dump"
61fi ;
62
63which db5.1_dump > /dev/null
64ret=$?
65if [ $ret -eq 0 ] ; then
66	DUMP_CMD="db5.1_dump"
67fi ;
68
69
70#berkeley db utility program that imports data from plain text file
71#small hack to autodetect the db load command, debian prefix the version..
72
73which db_load > /dev/null
74ret=$?
75if [ $ret -eq 0 ] ; then
76	LOAD_CMD="db_load"
77fi ;
78
79which db4.4_load > /dev/null
80ret=$?
81if [ $ret -eq 0 ] ; then
82	LOAD_CMD="db4.4_load"
83fi ;
84
85which db4.5_load > /dev/null
86ret=$?
87if [ $ret -eq 0 ] ; then
88	LOAD_CMD="db4.5_load"
89fi ;
90
91which db4.6_load > /dev/null
92ret=$?
93if [ $ret -eq 0 ] ; then
94	LOAD_CMD="db4.6_load"
95fi ;
96
97
98which db5.0_load > /dev/null
99ret=$?
100if [ $ret -eq 0 ] ; then
101	LOAD_CMD="db5.0_load"
102fi ;
103
104which db5.1_load > /dev/null
105ret=$?
106if [ $ret -eq 0 ] ; then
107	LOAD_CMD="db5.1_load"
108fi ;
109
110# path to the database schemas
111DATA_DIR="/usr/local/share/kamailio"
112if [ -d "$DATA_DIR/db_berkeley/kamailio" ] ; then
113	DB_SCHEMA="$DATA_DIR/db_berkeley/kamailio"
114else
115	DB_SCHEMA="./db_berkeley/kamailio"
116fi
117
118# path to the db_berkeley database
119if [ -z "$DB_PATH" ]; then
120	DB_PATH="/usr/local/etc/kamailio/db_berkeley"
121fi
122
123berkeley_usage()
124{
125COMMAND=`basename $0`
126cat <<EOF
127Script for maintaining Kamailio Berkeley DB tables
128       $COMMAND list      (lists the underlying db files in DB_PATH)
129       $COMMAND cat       <db>  (db_dump the underlying db file to STDOUT)
130       $COMMAND swap      <db>  (installs db.new by db -> db.old; db.new -> db)
131       $COMMAND append    <db> <datafile> (appends data to an existing db;output DB_PATH/db.new)
132       $COMMAND newappend <db> <datafile> (appends data to a new instance of db; output DB_PATH/db.new)
133       $COMMAND export  <dump_dir> (exports table data to plain-txt files in dump_dir)
134       $COMMAND import  <dump_dir> (imports plain-txt table data and creates new db tables in db_path)
135EOF
136} #usage
137
138
139#
140#
141#
142kamailio_berkeley()  # parms: <op> <arg1> <arg2>
143{
144	case $1 in
145		list|ls)
146			ls -l $DB_PATH
147			exit $?
148			;;
149		cat)
150			shift
151			kamailio_cat $1 $DB_PATH
152			exit $?
153			;;
154
155		swap)
156			shift
157			kamailio_swap $1 $DB_PATH
158			exit $?
159			;;
160
161		append)
162			shift
163			kamailio_append  $1 $2 $DB_PATH
164			exit $?
165			;;
166
167		newappend)
168			shift
169			kamailio_newappend  $1 $2 $DB_PATH
170			exit $?
171			;;
172
173		export)
174			shift
175			kamailio_export  $1 $DB_PATH
176			exit $?
177			;;
178
179		import)
180			shift
181			kamailio_import  $1 $DB_PATH
182			exit $?
183			;;
184
185		*)
186			berkeley_usage
187			exit 1;
188			;;
189esac
190}
191
192
193
194##
195# EXPORT existing data to plain-txt files in DUMP_DIR
196# eg.  DB_PATH/version ---> DUMP_DIR/version.txt
197#
198# Export is used as part of a DB migration process to another
199# major version of berkeley db.
200kamailio_export()  # parms: <DUMP_DIR> [DB_PATH]
201{
202	if [ $# -lt 2  ]; then
203		echo  "kamailio_dump parms: <DUMP_DIR> [DB_PATH]"
204		exit 1
205	fi
206
207	# Assert: the DB_PATH directory should already exist
208	if [ ! -d $2 ] ; then
209		merr "BerkeleyDB directory does not exist at: [$2]"
210		exit 1
211	fi
212
213	# Assert: DB_PATH directory should already contain table 'version'
214	if [ ! -f $2/version ] ; then
215		merr "BerkeleyDB directory does not have VERSION table at: [$2]"
216		exit 1
217	fi
218
219	# Create dir at <DUMP_DIR> to store the exported data
220	if [ ! -d $1 ] ; then
221		minfo "creating DUMP_DIR at: [$1]"
222		mkdir -p $1
223	else
224		mdbg "Cleaning out DUMP_DIR to get ready for new data"
225		rm -rf $1/*
226	fi
227
228	# DUMP_CMD will result in something like this:
229	#
230	#	VERSION=3
231	#	format=print
232	#	type=hash
233	#	h_nelem=2
234	#	db_pagesize=4096
235	#	HEADER=END
236	#	 METADATA_COLUMNS
237	#	 callid(str) method(str) from_tag(str) to_tag(str) sip_code(str) sip_reason(str) time(datetime)
238	#	 METADATA_KEY
239	#	 0
240	#	DATA=END
241	#
242	# However, we are only interested in the indented stuff between
243	#  'HEADER=END' and 'DATA=END',
244	#  as everything else is DB instance specific. That is, we are interested in this part:
245	#
246	# METADATA_COLUMNS
247	# callid(str) method(str) from_tag(str) to_tag(str) sip_code(str) sip_reason(str) time(datetime)
248	# METADATA_KEY
249	# 0
250	#
251	# The following PERL filter will do this processing.
252	#
253	# perl -pe 's/^\w.*// ; s/^\s(.*)/$1/'
254
255	# Dump the STANDARD tables to plain-text files in DUMP_DIR
256	for TABLE in $STANDARD_TABLES; do
257	    if [ -f $2/$TABLE ] ; then
258		    mdbg "Exporting standard table: $TABLE"
259		    $DUMP_CMD -p -h $2 $TABLE  | perl -pe 's/^\w.*// ; s/^\s(.*)/$1/' > $1/$TABLE.txt
260
261		    # Check return code to make sure the export worked ok
262		    if [ $? -ne 0 ] ; then
263			merr "Export of standard table failed [$TABLE]"
264			# there was a problem, but it is not something
265			# we can handle here; We can deal with this at import
266			# time.
267		    fi
268	    else
269	    	    mwarn "Table not found: [$TABLE]"
270	    fi
271	done
272
273	# Dump the PRESENCE tables to plain-text files in DUMP_DIR
274	for TABLE in $PRESENCE_TABLES; do
275	    if [ -f $2/$TABLE ] ; then
276		    mdbg "Exporting presence table: $TABLE"
277		    $DUMP_CMD -p -h $2 $TABLE  | perl -pe 's/^\w.*// ; s/^\s(.*)/$1/' > $1/$TABLE.txt
278		    if [ $? -ne 0 ] ; then
279			merr "Export of presence table failed [$TABLE]"
280		    fi
281	    else
282	    	    mwarn "Table not found: [$TABLE]"
283	    fi
284	done
285
286	# Dump the EXTRA tables to plain-text files in DUMP_DIR
287	for TABLE in $EXTRA_TABLES; do
288	    if [ -f $2/$TABLE ] ; then
289		    mdbg "Exporting extra table: $TABLE"
290		    $DUMP_CMD -p -h $2 $TABLE  | perl -pe 's/^\w.*// ; s/^\s(.*)/$1/' > $1/$TABLE.txt
291		    if [ $? -ne 0 ] ; then
292			merr "Export of extra table failed [$TABLE]"
293		    fi
294	    else
295	    	    mwarn "Table not found: [$TABLE]"
296	    fi
297	done
298
299	mdbg "All tables are now exported to DUMP_DIR: [$1]"
300	return 0
301
302}
303
304
305##
306# IMPORT existing plain-txt files from DUMP_DIR to DB_PATH
307# eg.  DUMP_DIR/version.txt  --> DB_PATH/version
308#
309# import is used as part of DB migrate to another major version of berkeley db.
310# this will over-write anything in DB_PATH
311kamailio_import()  # parms: <DUMP_DIR> [DB_PATH]
312{
313	if [ $# -lt 2  ]; then
314		echo  "kamailio_dump parms: <DUMP_DIR> [DB_PATH]"
315		exit 1
316	fi
317
318	# Assert: DUMP_DIR (source dir) already exists
319	if [ ! -d $1 ] ; then
320		merr "Berkeley DUMP_DIR directory does not exist: [$1]"
321		exit 1;
322	fi
323
324	# Assert: DUMP_DIR directory should already contain table 'version.txt'
325	if [ ! -f $1/version.txt ] ; then
326		merr "DUMP_DIR directory does not have VERSION.txt data at: [$1]"
327		exit 1
328	fi
329
330	# Assert: destination dir exists [DB_PATH]
331	if [ ! -d $2 ] ; then
332		mdbg "Berkeley DB_PATH directory is being created: [$2]"
333		mkdir -p $2
334	else
335		# Wipe out the destination dir to make room for new data
336		mwarn "Berkeley DB_PATH directory is being purged at: [$2]"
337		rm -rf $2/*
338	fi
339
340	# Creates STANDARD tables from plain-text files in DUMP_DIR
341	for TABLE in $STANDARD_TABLES; do
342	    if [ -s $1/$TABLE.txt ] ; then
343		    mdbg "Importing standard table: $TABLE"
344		    $LOAD_CMD -T -t hash -f $1/$TABLE.txt -h $2 $TABLE
345
346		    # Check return code to make sure the export worked ok
347		    if [ $? -ne 0 ] ; then
348			merr "Import of standard table failed [$TABLE.txt]"
349			merr "Create this missing table with kambdb_recover."
350		    fi
351	    else
352	    	    merr "Import data not found for table: [$TABLE.txt]"
353		    merr "Create this missing table with kambdb_recover."
354	    fi
355	done
356
357
358	# Creates PRESENCE tables from plain-text files in DUMP_DIR
359	for TABLE in $PRESENCE_TABLES; do
360	    if [ -s $1/$TABLE.txt ] ; then
361		    mdbg "Importing presence table: $TABLE"
362		    $LOAD_CMD -T -t hash -f $1/$TABLE.txt -h $2 $TABLE
363
364		    # Check return code to make sure the export worked ok
365		    if [ $? -ne 0 ] ; then
366			merr "Import of presence table failed [$TABLE.txt]"
367			merr "Create this missing table with kambdb_recover."
368		    fi
369	    else
370		    mwarn "Import data not found for table: [$TABLE.txt]"
371	    fi
372	done
373
374	# Creates EXTRA tables from plain-text files in DUMP_DIR
375	for TABLE in $EXTRA_TABLES; do
376	    if [ -s $1/$TABLE.txt ] ; then
377		    mdbg "Importing extra table: $TABLE"
378		    $LOAD_CMD -T -t hash -f $1/$TABLE.txt -h $2 $TABLE
379
380		    # Check return code to make sure the export worked ok
381		    if [ $? -ne 0 ] ; then
382			merr "Import of extra table failed [$TABLE.txt]"
383			merr "Create this missing table with kambdb_recover."
384		    fi
385	    else
386		    mwarn "Import data not found for table: [$TABLE.txt]"
387	    fi
388	done
389
390	mdbg "All tables are now imported to DB_PATH: [$2]"
391	return 0
392
393}
394
395
396kamailio_swap()  # parms: <db> [DB_PATH]
397{
398	if [ $# -lt 2  ]; then
399		echo  "kamailio_swap parms: <db> [DB_PATH]"
400		exit 1
401	fi
402
403	DB=$2/$1
404	DBNEW=$DB.new
405	DBOLD=$DB.old
406	cp $DB $DBOLD
407	mv $DBNEW $DB
408}
409
410#####
411# append process is:
412# 1. copy DB_PATH/db to DB_PATH/db.new
413# 2. appends contents of newdata to DB_PATH/db.new
414#
415kamailio_append()  # parms: <db> <newdata> [DB_PATH]
416{
417	if [ $# -lt 3  ]; then
418		echo  "kamailio_append parms: <db> <newdata> [DB_PATH]"
419		exit 1
420	fi
421
422	DB=$3/$1
423	DBNEW=$DB.new
424	if [ -f $DB.new ] ; then
425		rm $DB.new
426	fi
427
428	cp $DB $DBNEW
429# echo "$LOAD_CMD -T -t hash -f $2 -h $3 $1.new"
430	$LOAD_CMD -T -t hash -f $2 -h $3 $1.new
431
432# echo "$LOAD_CMD -r fileid -h $3 $1.new"
433  	$LOAD_CMD -r fileid -h $3 $1.new
434}
435
436
437#####
438# newappend process is:
439# 1. create a new temp DBENV in /tmp/sc-<processID>
440# 2. appends contents of newdata to /tmp/sc-<processID>/db
441# 3. move /tmp/sc-<processID>/db over to DB_PATH/db.new
442# 4. delete temp DBENV dir /tmp/sc-<processID>
443#
444kamailio_newappend()  # parms: <db> <newdata> [DB_PATH]
445{
446	if [ $# -lt 3  ]; then
447		echo  "kamailio_append parms: <db> <newdata> [DB_PATH]"
448		exit 1
449	fi
450
451	DB=$3/$1
452	DBNEW=$DB.new
453	if [ -f $DBNEW ] ; then
454		rm $DBNEW
455	fi
456	TMPENV=/tmp/sc-$$
457	kamailio_create $TMPENV
458	cd $OLDPWD
459	$LOAD_CMD -T -t hash -f $2 -h $TMPENV $1
460	mv $TMPENV/$1 $DBNEW
461	rm -rf $TMPENV
462}
463
464
465# cat all rows to STDOUT
466kamailio_cat()  # pars: <database name> <DB_PATH>
467{
468	if [ $# -ne 2 ] ; then
469		echo  "kamailio_cat params <db> [DB_PATH]"
470		exit 1
471	fi
472
473	$DUMP_CMD -p -h $2 $1
474}
475
476kamailio_drop()  # pars:  <DB_PATH>
477{
478	if [ $# -ne 1 ] ; then
479		echo "kamailio_drop function takes one param"
480		exit 1
481	fi
482
483	if [ ! -d $1 ] ; then
484		echo "Directory does not exist:  $1"
485	fi
486
487	minfo "Dropping Berkeley DB database at: $1 ..."
488
489	# core
490	if [ -f $1/version ] ; then
491		for TABLE in $STANDARD_TABLES; do
492		    mdbg "Dropping core table: $TABLE"
493		    rm -f $1/$TABLE
494		done
495	fi
496
497	# presence
498	if [ -f $1/presentity ] ; then
499		for TABLE in $PRESENCE_TABLES; do
500		    mdbg "Dropping presence table: $TABLE"
501		    rm -f $1/$TABLE
502		done
503	fi
504
505	# extra tables
506	if [ -f $1/cpl ] ; then
507		for TABLE in $EXTRA_TABLES; do
508		    mdbg "Dropping extra table: $TABLE"
509		    rm -f $1/$TABLE
510		done
511	fi
512
513	# delete db files and directory
514	rm -rf $1/__db.001
515	rm -rf $1/__db.002
516	rm -rf $1/__db.003
517	rm -rf $1/__db.004
518	rmdir $1
519}
520
521
522kamailio_create() # pars: <DB_PATH>
523{
524	if [ $# -ne 1 ] ; then
525		echo "kamailio_create param [DB_PATH]"
526		exit 1
527	fi
528
529	DB_PATH=$1
530	if [ ! -d $1 ] ; then
531		minfo "creating Berkeley DB database at: [$1]"
532		mkdir -p $DB_PATH
533	fi
534
535	for TABLE in $STANDARD_TABLES; do
536	    mdbg "Creating standard table: $TABLE"
537	    $LOAD_CMD -T -t hash -f $DB_SCHEMA/$TABLE -h $1 $TABLE
538	    if [ $? -ne 0 ] ; then
539		merr "Creating standard tables failed!"
540		exit 1
541	    fi
542	done
543
544	get_answer $INSTALL_PRESENCE_TABLES "Install presence related tables? (y/n): "
545	if [ "$ANSWER" = "y" ]; then
546		presence_create $1
547	fi
548
549	get_answer $INSTALL_EXTRA_TABLES "Install tables for $EXTRA_MODULES? (y/n): "
550	if [ "$ANSWER" = "y" ]; then
551		extra_create $1
552	fi
553
554} # kamailio_create
555
556
557presence_create() # pars: <DB_PATH>
558{
559	if [ $# -ne 1 ] ; then
560		merr "presence_create param [DB_PATH]"
561		exit 1
562	fi
563
564	DB_PATH=$1
565	if [ ! -d $1 ] ; then
566		# Assert: the directory should already exist
567		merr "BerkeleyDB directory does not exist at: [$1]"
568		exit 1
569	fi
570
571	if [ ! -f $1/version ] ; then
572		# Assert: directory should already contain table 'version'
573		merr "BerkeleyDB directory does not have VERSION table at: [$1]"
574		exit 1
575	fi
576
577	for TABLE in $PRESENCE_TABLES; do
578	    mdbg "Creating presence table: $TABLE"
579	    $LOAD_CMD -T -t hash -f $DB_SCHEMA/$TABLE -h $1 $TABLE
580	    if [ $? -ne 0 ] ; then
581		merr "Creating presence tables failed!"
582		exit 1
583	    fi
584	done
585
586}  # end presence_create
587
588
589extra_create() # pars: <DB_PATH>
590{
591
592	if [ $# -ne 1 ] ; then
593		merr "extra_create function takes one param (DB_PATH)"
594		exit 1
595	fi
596
597	DB_PATH=$1
598	if [ ! -d $1 ] ; then
599		# Assert: the directory should already exist
600		merr "BerkeleyDB directory does not exist at: [$1]"
601		exit 1
602	fi
603
604	if [ ! -f $1/version ] ; then
605		# Assert: directory should already contain table 'version'
606		merr "BerkeleyDB directory does not have VERSION table at: [$1]"
607		exit 1
608	fi
609
610	for TABLE in $EXTRA_TABLES; do
611	    mdbg "Creating extra table: $TABLE"
612	    $LOAD_CMD -T -t hash -f $DB_SCHEMA/$TABLE -h $1 $TABLE
613	    if [ $? -ne 0 ] ; then
614		merr "Creating extra tables failed!"
615		exit 1
616	    fi
617	done
618
619}  # end extra_create
620