1#The MIT License (MIT)
2#Copyright (c) 2014 Microsoft Corporation
3
4#Permission is hereby granted, free of charge, to any person obtaining a copy
5#of this software and associated documentation files (the "Software"), to deal
6#in the Software without restriction, including without limitation the rights
7#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8#copies of the Software, and to permit persons to whom the Software is
9#furnished to do so, subject to the following conditions:
10
11#The above copyright notice and this permission notice shall be included in all
12#copies or substantial portions of the Software.
13
14#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20#SOFTWARE.
21
22"""End to end test.
23"""
24
25import json
26import logging
27import os.path
28import sys
29import unittest
30from six.moves import xrange
31from struct import unpack, pack
32# from six.moves.builtins import *
33import time
34import six
35if six.PY2:
36    import urllib as urllib
37else:
38    import urllib.parse as urllib
39import uuid
40import pytest
41import azure.cosmos.base as base
42import azure.cosmos.consistent_hash_ring as consistent_hash_ring
43import azure.cosmos.documents as documents
44import azure.cosmos.cosmos_client as cosmos_client
45import azure.cosmos.errors as errors
46import azure.cosmos.hash_partition_resolver as hash_partition_resolver
47from azure.cosmos.http_constants import HttpHeaders, StatusCodes, SubStatusCodes
48import azure.cosmos.murmur_hash as murmur_hash
49import azure.cosmos.range_partition_resolver as range_partition_resolver
50import azure.cosmos.range as partition_range
51import test.test_config as test_config
52import test.test_partition_resolver as test_partition_resolver
53import azure.cosmos.base as base
54from requests.packages.urllib3.util.retry import Retry
55from requests.exceptions import ConnectionError
56
57
58#IMPORTANT NOTES:
59
60#  	Most test cases in this file create collections in your Azure Cosmos account.
61#  	Collections are billing entities.  By running these test cases, you may incur monetary costs on your account.
62
63#  	To Run the test, replace the two member fields (masterKey and host) with values
64#   associated with your Azure Cosmos account.
65
66@pytest.mark.usefixtures("teardown")
67class CRUDTests(unittest.TestCase):
68    """Python CRUD Tests.
69    """
70
71    configs = test_config._test_config
72    host = configs.host
73    masterKey = configs.masterKey
74    connectionPolicy = configs.connectionPolicy
75    client = cosmos_client.CosmosClient(host, {'masterKey': masterKey}, connectionPolicy)
76    databseForTest = configs.create_database_if_not_exist(client)
77
78    def __AssertHTTPFailureWithStatus(self, status_code, func, *args, **kwargs):
79        """Assert HTTP failure with status.
80
81        :Parameters:
82            - `status_code`: int
83            - `func`: function
84        """
85        try:
86            func(*args, **kwargs)
87            self.assertFalse(True, 'function should fail.')
88        except errors.HTTPFailure as inst:
89            self.assertEqual(inst.status_code, status_code)
90
91    @classmethod
92    def setUpClass(cls):
93        if (cls.masterKey == '[YOUR_KEY_HERE]' or
94                cls.host == '[YOUR_ENDPOINT_HERE]'):
95            raise Exception(
96                "You must specify your Azure Cosmos account values for "
97                "'masterKey' and 'host' at the top of this class to run the "
98                "tests.")
99
100    def setUp(self):
101        self.client = cosmos_client.CosmosClient(self.host, {'masterKey': self.masterKey}, self.connectionPolicy)
102
103    def test_database_crud_self_link(self):
104        self._test_database_crud(False)
105
106    def test_database_crud_name_based(self):
107        self._test_database_crud(True)
108
109    def _test_database_crud(self, is_name_based):
110        # read databases.
111        databases = list(self.client.ReadDatabases())
112        # create a database.
113        before_create_databases_count = len(databases)
114        database_definition = { 'id': str(uuid.uuid4()) }
115        created_db = self.client.CreateDatabase(database_definition)
116        self.assertEqual(created_db['id'], database_definition['id'])
117        # Read databases after creation.
118        databases = list(self.client.ReadDatabases())
119        self.assertEqual(len(databases),
120                         before_create_databases_count + 1,
121                         'create should increase the number of databases')
122        # query databases.
123        databases = list(self.client.QueryDatabases({
124            'query': 'SELECT * FROM root r WHERE r.id=@id',
125            'parameters': [
126                { 'name':'@id', 'value': database_definition['id'] }
127            ]
128        }))
129        self.assert_(databases,
130                     'number of results for the query should be > 0')
131
132        # read database.
133        self.client.ReadDatabase(self.GetDatabaseLink(created_db, is_name_based))
134
135        # delete database.
136        self.client.DeleteDatabase(self.GetDatabaseLink(created_db, is_name_based))
137        # read database after deletion
138        self.__AssertHTTPFailureWithStatus(StatusCodes.NOT_FOUND,
139                                           self.client.ReadDatabase,
140                                           self.GetDatabaseLink(created_db, is_name_based))
141
142    def test_sql_query_crud(self):
143        # create two databases.
144        db1 = self.client.CreateDatabase({ 'id': 'database 1' })
145        db2 = self.client.CreateDatabase({ 'id': 'database 2' })
146        # query with parameters.
147        databases = list(self.client.QueryDatabases({
148            'query': 'SELECT * FROM root r WHERE r.id=@id',
149            'parameters': [
150                { 'name':'@id', 'value': 'database 1' }
151            ]
152        }))
153        self.assertEqual(1, len(databases), 'Unexpected number of query results.')
154
155        # query without parameters.
156        databases = list(self.client.QueryDatabases({
157            'query': 'SELECT * FROM root r WHERE r.id="database non-existing"'
158        }))
159        self.assertEqual(0, len(databases), 'Unexpected number of query results.')
160
161        # query with a string.
162        databases = list(self.client.QueryDatabases('SELECT * FROM root r WHERE r.id="database 2"'))
163        self.assertEqual(1, len(databases), 'Unexpected number of query results.')
164
165        self.client.DeleteDatabase(db1['_self'])
166        self.client.DeleteDatabase(db2['_self'])
167
168    def test_collection_crud_self_link(self):
169        self._test_collection_crud(False)
170
171    def test_collection_crud_name_based(self):
172        self._test_collection_crud(True)
173
174    def _test_collection_crud(self, is_name_based):
175        created_db = self.databseForTest
176        collections = list(self.client.ReadContainers(self.GetDatabaseLink(created_db, is_name_based)))
177        # create a collection
178        before_create_collections_count = len(collections)
179        collection_definition = { 'id': 'test_collection_crud ' + str(uuid.uuid4()), 'indexingPolicy': {'indexingMode': 'consistent'} }
180        created_collection = self.client.CreateContainer(self.GetDatabaseLink(created_db, is_name_based),
181                                                     collection_definition)
182        self.assertEqual(collection_definition['id'], created_collection['id'])
183        self.assertEqual('consistent', created_collection['indexingPolicy']['indexingMode'])
184
185        # read collections after creation
186        collections = list(self.client.ReadContainers(self.GetDatabaseLink(created_db, is_name_based)))
187        self.assertEqual(len(collections),
188                         before_create_collections_count + 1,
189                         'create should increase the number of collections')
190        # query collections
191        collections = list(self.client.QueryContainers(
192            self.GetDatabaseLink(created_db, is_name_based),
193            {
194                'query': 'SELECT * FROM root r WHERE r.id=@id',
195                'parameters': [
196                    { 'name':'@id', 'value': collection_definition['id'] }
197                ]
198            }))
199        # Replacing indexing policy is allowed.
200        lazy_policy = {'indexingMode': 'lazy'}
201        created_collection['indexingPolicy'] = lazy_policy
202        replaced_collection = self.client.ReplaceContainer(self.GetDocumentCollectionLink(created_db, created_collection, is_name_based), created_collection)
203        self.assertEqual('lazy', replaced_collection['indexingPolicy']['indexingMode'])
204        # Replacing collection Id should fail.
205        change_collection = created_collection.copy()
206        change_collection['id'] = 'try_change_id'
207        self.__AssertHTTPFailureWithStatus(StatusCodes.BAD_REQUEST,
208                                           self.client.ReplaceContainer,
209                                           self.GetDocumentCollectionLink(created_db, created_collection, is_name_based),
210                                           change_collection)
211
212        self.assertTrue(collections)
213        # delete collection
214        self.client.DeleteContainer(self.GetDocumentCollectionLink(created_db, created_collection, is_name_based))
215        # read collection after deletion
216        self.__AssertHTTPFailureWithStatus(StatusCodes.NOT_FOUND,
217                                           self.client.ReadContainer,
218                                           self.GetDocumentCollectionLink(created_db, created_collection, is_name_based))
219
220
221    def test_partitioned_collection(self):
222        created_db = self.databseForTest
223
224        collection_definition = {   'id': 'test_partitioned_collection ' + str(uuid.uuid4()),
225                                    'partitionKey':
226                                    {
227                                        'paths': ['/id'],
228                                        'kind': documents.PartitionKind.Hash
229                                    }
230                                }
231
232        options = { 'offerThroughput': 10100 }
233
234        created_collection = self.client.CreateContainer(self.GetDatabaseLink(created_db),
235                                collection_definition,
236                                options)
237
238        self.assertEqual(collection_definition.get('id'), created_collection.get('id'))
239        self.assertEqual(collection_definition.get('partitionKey').get('paths')[0], created_collection.get('partitionKey').get('paths')[0])
240        self.assertEqual(collection_definition.get('partitionKey').get('kind'), created_collection.get('partitionKey').get('kind'))
241
242        offers = self.GetCollectionOffers(self.client, created_collection['_rid'])
243
244        self.assertEqual(1, len(offers))
245        expected_offer = offers[0]
246        self.assertEqual(expected_offer.get('content').get('offerThroughput'), options.get('offerThroughput'))
247
248        self.client.DeleteContainer(self.GetDocumentCollectionLink(created_db, created_collection))
249
250    def test_partitioned_collection_quota(self):
251        created_db = self.databseForTest
252
253        options = { 'offerThroughput': 20000 }
254
255        created_collection = self.configs.create_multi_partition_collection_if_not_exist(self.client)
256
257        read_options = { 'populatePartitionKeyRangeStatistics': True, 'populateQuotaInfo': True}
258
259        retrieved_collection = self.client.ReadContainer(created_collection.get('_self'), read_options)
260
261        self.assertTrue(retrieved_collection.get("statistics") != None)
262        self.assertTrue(self.client.last_response_headers.get("x-ms-resource-usage") != None)
263
264    def test_partitioned_collection_partition_key_extraction(self):
265        created_db = self.databseForTest
266
267        collection_definition = {   'id': 'test_partitioned_collection_partition_key_extraction ' + str(uuid.uuid4()),
268                                    'partitionKey':
269                                    {
270                                        'paths': ['/address/state'],
271                                        'kind': documents.PartitionKind.Hash
272                                    }
273                                }
274
275        created_collection = self.client.CreateContainer(self.GetDatabaseLink(created_db),
276                                collection_definition)
277
278        document_definition = {'id': 'document1',
279                               'address' : { 'street' : '1 Microsoft Way',
280                                             'city' : 'Redmond',
281                                             'state' : 'WA',
282                                             'zip code' : 98052
283                                           }
284                               }
285
286        # create document without partition key being specified
287        created_document = self.client.CreateItem(
288            self.GetDocumentCollectionLink(created_db, created_collection),
289            document_definition)
290
291        self.assertEqual(created_document.get('id'), document_definition.get('id'))
292        self.assertEqual(created_document.get('address').get('state'), document_definition.get('address').get('state'))
293
294        # create document by specifying a different partition key in options than what's in the document will result in BadRequest(status code 400)
295        document_definition['id'] = 'document2'
296        options = { 'partitionKey': 'NY' }
297
298        self.__AssertHTTPFailureWithStatus(
299            StatusCodes.BAD_REQUEST,
300            self.client.CreateItem,
301            self.GetDocumentCollectionLink(created_db, created_collection),
302            document_definition,
303            options)
304
305        collection_definition1 = {   'id': 'test_partitioned_collection_partition_key_extraction1 ' + str(uuid.uuid4()),
306                                    'partitionKey':
307                                    {
308                                        'paths': ['/address'],
309                                        'kind': documents.PartitionKind.Hash
310                                    }
311                                }
312
313        created_collection1 = self.client.CreateContainer(self.GetDatabaseLink(created_db),
314                                collection_definition1)
315
316        # Create document with partitionkey not present as a leaf level property but a dict
317        options = {}
318        created_document = self.client.CreateItem(
319            self.GetDocumentCollectionLink(created_db, created_collection1),
320            document_definition, options)
321
322        self.assertEqual(options['partitionKey'], documents.Undefined)
323
324        collection_definition2 = {   'id': 'test_partitioned_collection_partition_key_extraction2 ' + str(uuid.uuid4()),
325                                    'partitionKey':
326                                    {
327                                        'paths': ['/address/state/city'],
328                                        'kind': documents.PartitionKind.Hash
329                                    }
330                                }
331
332        created_collection2 = self.client.CreateContainer(self.GetDatabaseLink(created_db),
333                                collection_definition2)
334
335        # Create document with partitionkey not present in the document
336        options = {}
337        created_document = self.client.CreateItem(
338            self.GetDocumentCollectionLink(created_db, created_collection2),
339            document_definition, options)
340
341        self.assertEqual(options['partitionKey'], documents.Undefined)
342
343        self.client.DeleteContainer(self.GetDocumentCollectionLink(created_db, created_collection))
344        self.client.DeleteContainer(self.GetDocumentCollectionLink(created_db, created_collection1))
345        self.client.DeleteContainer(self.GetDocumentCollectionLink(created_db, created_collection2))
346
347    def test_partitioned_collection_partition_key_extraction_special_chars(self):
348        created_db = self.databseForTest
349
350        collection_definition1 = {   'id': 'test_partitioned_collection_partition_key_extraction_special_chars1 ' + str(uuid.uuid4()),
351                                    'partitionKey':
352                                    {
353                                        'paths': ['/\"level\' 1*()\"/\"le/vel2\"'],
354                                        'kind': documents.PartitionKind.Hash
355                                    }
356                                }
357
358        created_collection1 = self.client.CreateContainer(self.GetDatabaseLink(created_db),
359                                collection_definition1)
360
361        document_definition = {'id': 'document1',
362                               "level' 1*()" : { "le/vel2" : 'val1' }
363                              }
364
365        options = {}
366        self.client.CreateItem(
367            self.GetDocumentCollectionLink(created_db, created_collection1),
368            document_definition, options)
369
370        self.assertEqual(options['partitionKey'], 'val1')
371
372        collection_definition2 = {   'id': 'test_partitioned_collection_partition_key_extraction_special_chars2 ' + str(uuid.uuid4()),
373                                    'partitionKey':
374                                    {
375                                        'paths': ['/\'level\" 1*()\'/\'le/vel2\''],
376                                        'kind': documents.PartitionKind.Hash
377                                    }
378                                }
379
380        created_collection2 = self.client.CreateContainer(self.GetDatabaseLink(created_db),
381                                collection_definition2)
382
383        document_definition = {'id': 'document2',
384                               'level\" 1*()' : { 'le/vel2' : 'val2' }
385                              }
386
387        options = {}
388        self.client.CreateItem(
389            self.GetDocumentCollectionLink(created_db, created_collection2),
390            document_definition, options)
391
392        self.assertEqual(options['partitionKey'], 'val2')
393
394        self.client.DeleteContainer(self.GetDocumentCollectionLink(created_db, created_collection1))
395        self.client.DeleteContainer(self.GetDocumentCollectionLink(created_db, created_collection2))
396
397    def test_partitioned_collection_path_parser(self):
398        test_dir = os.path.dirname(os.path.abspath(__file__))
399        with open(os.path.join(test_dir, "BaselineTest.PathParser.json")) as json_file:
400            entries = json.loads(json_file.read())
401        for entry in entries:
402            parts = base.ParsePaths([entry['path']])
403            self.assertEqual(parts, entry['parts'])
404
405        paths = ["/\"Ke \\ \\\" \\\' \\? \\a \\\b \\\f \\\n \\\r \\\t \\v y1\"/*"]
406        parts = [ "Ke \\ \\\" \\\' \\? \\a \\\b \\\f \\\n \\\r \\\t \\v y1", "*" ]
407        self.assertEqual(parts, base.ParsePaths(paths))
408
409        paths = ["/'Ke \\ \\\" \\\' \\? \\a \\\b \\\f \\\n \\\r \\\t \\v y1'/*"]
410        parts = [ "Ke \\ \\\" \\\' \\? \\a \\\b \\\f \\\n \\\r \\\t \\v y1", "*" ]
411        self.assertEqual(parts, base.ParsePaths(paths))
412
413    def test_partitioned_collection_document_crud_and_query(self):
414        created_db = self.databseForTest
415
416        created_collection = self.configs.create_multi_partition_collection_if_not_exist(self.client)
417
418        document_definition = {'id': 'document',
419                               'key': 'value'}
420
421        created_document = self.client.CreateItem(
422            self.GetDocumentCollectionLink(created_db, created_collection),
423            document_definition)
424
425        self.assertEqual(created_document.get('id'), document_definition.get('id'))
426        self.assertEqual(created_document.get('key'), document_definition.get('key'))
427
428        # For ReadDocument, we require to have the partitionKey to be specified as part of options otherwise we get BadRequest(status code 400)
429        #self.__AssertHTTPFailureWithStatus(
430        #    StatusCodes.BAD_REQUEST,
431        #    client.ReadItem,
432        #    self.GetDocumentLink(created_db, created_collection, created_document))
433
434        # read document
435        options = { 'partitionKey': document_definition.get('id') }
436        read_document = self.client.ReadItem(
437            self.GetDocumentLink(created_db, created_collection, created_document),
438            options)
439
440        self.assertEqual(read_document.get('id'), created_document.get('id'))
441        self.assertEqual(read_document.get('key'), created_document.get('key'))
442
443        # Read document feed doesn't require partitionKey as it's always a cross partition query
444        documentlist = list(self.client.ReadItems(
445            self.GetDocumentCollectionLink(created_db, created_collection)))
446        self.assertEqual(1, len(documentlist))
447
448        # replace document
449        document_definition['key'] = 'new value'
450
451        replaced_document = self.client.ReplaceItem(
452            self.GetDocumentLink(created_db, created_collection, created_document),
453            document_definition)
454
455        self.assertEqual(replaced_document.get('key'), document_definition.get('key'))
456
457        # upsert document(create scenario)
458        document_definition['id'] = 'document2'
459        document_definition['key'] = 'value2'
460
461        upserted_document = self.client.UpsertItem(self.GetDocumentCollectionLink(created_db, created_collection),
462            document_definition)
463
464        self.assertEqual(upserted_document.get('id'), document_definition.get('id'))
465        self.assertEqual(upserted_document.get('key'), document_definition.get('key'))
466
467        documentlist = list(self.client.ReadItems(
468            self.GetDocumentCollectionLink(created_db, created_collection)))
469        self.assertEqual(2, len(documentlist))
470
471        # For DeleteDocument, we require to have the partitionKey to be specified as part of options otherwise we get BadRequest(status code 400)
472        self.__AssertHTTPFailureWithStatus(
473            StatusCodes.BAD_REQUEST,
474            self.client.DeleteItem,
475            self.GetDocumentLink(created_db, created_collection, upserted_document))
476
477        # delete document
478        options = { 'partitionKey': upserted_document.get('id') }
479        self.client.DeleteItem(
480            self.GetDocumentLink(created_db, created_collection, upserted_document),
481            options)
482
483        # query document on the partition key specified in the predicate will pass even without setting enableCrossPartitionQuery or passing in the partitionKey value
484        documentlist = list(self.client.QueryItems(
485            self.GetDocumentCollectionLink(created_db, created_collection),
486            {
487                'query': 'SELECT * FROM root r WHERE r.id=\'' + replaced_document.get('id') + '\''
488            }))
489        self.assertEqual(1, len(documentlist))
490
491        # query document on any property other than partitionKey will fail without setting enableCrossPartitionQuery or passing in the partitionKey value
492        try:
493            list(self.client.QueryItems(
494                self.GetDocumentCollectionLink(created_db, created_collection),
495                {
496                    'query': 'SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\''
497                }))
498        except Exception:
499            pass
500
501        # cross partition query
502        options = { 'enableCrossPartitionQuery': True }
503        documentlist = list(self.client.QueryItems(
504            self.GetDocumentCollectionLink(created_db, created_collection),
505            {
506                'query': 'SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\''
507            }, options))
508
509        self.assertEqual(1, len(documentlist))
510
511        # query document by providing the partitionKey value
512        options = { 'partitionKey': replaced_document.get('id') }
513        documentlist = list(self.client.QueryItems(
514            self.GetDocumentCollectionLink(created_db, created_collection),
515            {
516                'query': 'SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\''
517            }, options))
518
519        self.assertEqual(1, len(documentlist))
520
521    def test_partitioned_collection_permissions(self):
522        created_db = self.databseForTest
523
524        collection_definition = {   'id': 'sample collection ' + str(uuid.uuid4()),
525                                    'partitionKey':
526                                    {
527                                        'paths': ['/key'],
528                                        'kind': documents.PartitionKind.Hash
529                                    }
530                                }
531
532        collection_definition['id'] = 'test_partitioned_collection_permissions all collection'
533
534        all_collection = self.client.CreateContainer(self.GetDatabaseLink(created_db),
535                                collection_definition)
536
537        collection_definition['id'] = 'test_partitioned_collection_permissions read collection'
538
539        read_collection = self.client.CreateContainer(self.GetDatabaseLink(created_db),
540                                collection_definition)
541
542        user = self.client.CreateUser(self.GetDatabaseLink(created_db), { 'id': 'user' })
543
544        permission_definition = {
545            'id': 'all permission',
546            'permissionMode': documents.PermissionMode.All,
547            'resource': self.GetDocumentCollectionLink(created_db, all_collection),
548            'resourcePartitionKey' : [1]
549        }
550
551        all_permission = self.client.CreatePermission(self.GetUserLink(created_db, user), permission_definition)
552
553        permission_definition = {
554            'id': 'read permission',
555            'permissionMode': documents.PermissionMode.Read,
556            'resource': self.GetDocumentCollectionLink(created_db, read_collection),
557            'resourcePartitionKey' : [1]
558        }
559
560        read_permission = self.client.CreatePermission(self.GetUserLink(created_db, user), permission_definition)
561
562        resource_tokens = {}
563        # storing the resource tokens based on Resource IDs
564        resource_tokens[all_collection['_rid']] = (all_permission['_token'])
565        resource_tokens[read_collection['_rid']] = (read_permission['_token'])
566
567        restricted_client = cosmos_client.CosmosClient(
568            CRUDTests.host, {'resourceTokens': resource_tokens}, CRUDTests.connectionPolicy)
569
570        document_definition = {'id': 'document1',
571                               'key': 1
572                               }
573
574        # Create document in all_collection should succeed since the partitionKey is 1 which is what specified as resourcePartitionKey in permission object and it has all permissions
575        created_document = restricted_client.CreateItem(
576            self.GetDocumentCollectionLink(created_db, all_collection, False),
577            document_definition)
578
579        # Create document in read_collection should fail since it has only read permissions for this collection
580        self.__AssertHTTPFailureWithStatus(
581            StatusCodes.FORBIDDEN,
582            restricted_client.CreateItem,
583            self.GetDocumentCollectionLink(created_db, read_collection, False),
584            document_definition)
585
586        # Read document feed should succeed for this collection. Note that I need to pass in partitionKey here since permission has resourcePartitionKey defined
587        options = { 'partitionKey': document_definition.get('key') }
588        documentlist = list(restricted_client.ReadItems(
589            self.GetDocumentCollectionLink(created_db, read_collection, False),
590            options))
591
592        self.assertEqual(0, len(documentlist))
593
594        document_definition['key'] = 2
595        options = { 'partitionKey': document_definition.get('key') }
596        # Create document should fail since the partitionKey is 2 which is different that what is specified as resourcePartitionKey in permission object
597        self.__AssertHTTPFailureWithStatus(
598            StatusCodes.FORBIDDEN,
599            restricted_client.CreateItem,
600            self.GetDocumentCollectionLink(created_db, all_collection, False),
601            document_definition,
602            options)
603
604        document_definition['key'] = 1
605        options = { 'partitionKey': document_definition.get('key') }
606        # Delete document should succeed since the partitionKey is 1 which is what specified as resourcePartitionKey in permission object
607        created_document = restricted_client.DeleteItem(
608            self.GetDocumentLink(created_db, all_collection, created_document, False),
609            options)
610
611        # Delete document in read_collection should fail since it has only read permissions for this collection
612        self.__AssertHTTPFailureWithStatus(
613            StatusCodes.FORBIDDEN,
614            restricted_client.DeleteItem,
615            self.GetDocumentCollectionLink(created_db, read_collection, False),
616            options)
617
618        self.client.DeleteContainer(self.GetDocumentCollectionLink(created_db, all_collection))
619        self.client.DeleteContainer(self.GetDocumentCollectionLink(created_db, read_collection))
620
621    def test_partitioned_collection_execute_stored_procedure(self):
622        created_db = self.databseForTest
623
624        created_collection = self.configs.create_multi_partition_collection_with_custom_pk_if_not_exist(self.client)
625
626        sproc = {
627            'id': 'storedProcedure' + str(uuid.uuid4()),
628            'body': (
629                'function () {' +
630                '   var client = getContext().getCollection();' +
631                '   client.createDocument(client.getSelfLink(), { id: \'testDoc\', pk : 2}, {}, function(err, docCreated, options) { ' +
632                '   if(err) throw new Error(\'Error while creating document: \' + err.message);' +
633                '   else {' +
634                         '   getContext().getResponse().setBody(1);' +
635                '        }' +
636                '   });}')
637        }
638
639        created_sproc = self.client.CreateStoredProcedure(self.GetDocumentCollectionLink(created_db, created_collection), sproc)
640
641        # Partiton Key value same as what is specified in the stored procedure body
642        self.client.ExecuteStoredProcedure(self.GetStoredProcedureLink(created_db, created_collection, created_sproc),
643                                               None, { 'partitionKey' : 2})
644
645        # Partiton Key value different than what is specified in the stored procedure body will cause a bad request(400) error
646        self.__AssertHTTPFailureWithStatus(
647            StatusCodes.BAD_REQUEST,
648            self.client.ExecuteStoredProcedure,
649            self.GetStoredProcedureLink(created_db, created_collection, created_sproc),
650            None,
651            { 'partitionKey' : 3})
652
653    def test_partitioned_collection_attachment_crud_and_query(self):
654        class ReadableStream(object):
655            """Customized file-like stream.
656            """
657
658            def __init__(self, chunks = ['first chunk ', 'second chunk']):
659                """Initialization.
660
661                :Parameters:
662                    - `chunks`: list
663
664                """
665                if six.PY2:
666                    self._chunks = list(chunks)
667                else:
668                    # python3: convert to bytes
669                    self._chunks = [chunk.encode() for chunk in chunks]
670
671            def read(self, n=-1):
672                """Simulates the read method in a file stream.
673
674                :Parameters:
675                    - `n`: int
676
677                :Returns:
678                    bytes or str
679
680                """
681                if self._chunks:
682                    return self._chunks.pop(0)
683                else:
684                    return ''
685
686            def __len__(self):
687                """To make len(ReadableStream) work.
688                """
689                return sum([len(chunk) for chunk in self._chunks])
690
691        db = self.databseForTest
692        collection_definition = {'id': 'test_partitioned_collection_attachment_crud_and_query ' + str(uuid.uuid4()),
693                                 'partitionKey': {'paths': ['/id'],'kind': 'Hash'}}
694
695        collection = self.client.CreateContainer(db['_self'], collection_definition)
696
697        document_definition = {'id': 'sample document' + str(uuid.uuid4()),
698                               'key': 'value'}
699
700        document = self.client.CreateItem(self.GetDocumentCollectionLink(db, collection),
701                                         document_definition)
702
703        content_stream = ReadableStream()
704        options = { 'slug': 'sample attachment',
705                    'contentType': 'application/text' }
706
707        # Currently, we require to have the partitionKey to be specified as part of options otherwise we get BadRequest(status code 400)
708        #self.__AssertHTTPFailureWithStatus(
709        #    StatusCodes.BAD_REQUEST,
710        #    client.CreateAttachmentAndUploadMedia,
711        #    self.GetDocumentLink(db, collection, document),
712        #    content_stream,
713        #    options)
714
715        content_stream = ReadableStream()
716        # Setting the partitionKey as part of options is required for attachment CRUD
717        options = { 'slug': 'sample attachment' + str(uuid.uuid4()),
718                    'contentType': 'application/text',
719                    'partitionKey' :  document_definition.get('id') }
720
721        # create attachment and upload media
722        attachment = self.client.CreateAttachmentAndUploadMedia(
723            self.GetDocumentLink(db, collection, document), content_stream, options)
724
725        self.assertEqual(attachment['id'], options['slug'])
726
727        # Currently, we require to have the partitionKey to be specified as part of options otherwise we get BadRequest(status code 400)
728        try:
729            list(self.client.ReadAttachments(
730            self.GetDocumentLink(db, collection, document)))
731        except Exception:
732            pass
733
734        # Read attachment feed requires partitionKey to be passed
735        options = { 'partitionKey': document_definition.get('id') }
736        attachmentlist = list(self.client.ReadAttachments(
737            self.GetDocumentLink(db, collection, document), options))
738        self.assertEqual(1, len(attachmentlist))
739
740        content_stream = ReadableStream()
741        options = { 'slug': 'new attachment' + str(uuid.uuid4()),
742                    'contentType': 'application/text' }
743        # Currently, we require to have the partitionKey to be specified as part of options otherwise we get BadRequest(status code 400)
744        self.__AssertHTTPFailureWithStatus(
745            StatusCodes.BAD_REQUEST,
746            self.client.UpsertAttachmentAndUploadMedia,
747            self.GetDocumentLink(db, collection, document),
748            content_stream,
749            options)
750
751        content_stream = ReadableStream()
752        # Setting the partitionKey as part of options is required for attachment CRUD
753        options = { 'slug': 'new attachment' + str(uuid.uuid4()),
754                    'contentType': 'application/text',
755                    'partitionKey' :  document_definition.get('id') }
756
757        # upsert attachment and upload media
758        attachment = self.client.UpsertAttachmentAndUploadMedia(
759            self.GetDocumentLink(db, collection, document), content_stream, options)
760
761        self.assertEqual(attachment['id'], options['slug'])
762
763        options = { 'partitionKey': document_definition.get('id') }
764        attachmentlist = list(self.client.ReadAttachments(
765            self.GetDocumentLink(db, collection, document), options))
766        self.assertEqual(2, len(attachmentlist))
767
768        # create attachment with media link
769        dynamic_attachment = {
770            'id': 'dynamic attachment' + str(uuid.uuid4()),
771            'media': 'http://xstore.',
772            'MediaType': 'Book',
773            'Author':'My Book Author',
774            'Title':'My Book Title',
775            'contentType':'application/text'
776        }
777
778        # Currently, we require to have the partitionKey to be specified as part of options otherwise we get BadRequest(status code 400)
779        self.__AssertHTTPFailureWithStatus(
780            StatusCodes.BAD_REQUEST,
781            self.client.CreateAttachment,
782            self.GetDocumentLink(db, collection, document),
783            dynamic_attachment)
784
785        # create dynamic attachment
786        options = { 'partitionKey': document_definition.get('id') }
787        attachment = self.client.CreateAttachment(self.GetDocumentLink(db, collection, document),
788                                             dynamic_attachment, options)
789
790        self.assertEqual(attachment['MediaType'], dynamic_attachment['MediaType'])
791        self.assertEqual(attachment['Author'], dynamic_attachment['Author'])
792
793        # Read Attachment feed
794        options = { 'partitionKey': document_definition.get('id') }
795        attachmentlist = list(self.client.ReadAttachments(
796            self.GetDocumentLink(db, collection, document), options))
797        self.assertEqual(3, len(attachmentlist))
798
799        # Currently, we require to have the partitionKey to be specified as part of options otherwise we get BadRequest(status code 400)
800        #self.__AssertHTTPFailureWithStatus(
801        #    StatusCodes.BAD_REQUEST,
802        #    client.ReadAttachment,
803        #    self.GetAttachmentLink(db, collection, document, attachment))
804
805        # Read attachment
806        options = { 'partitionKey': document_definition.get('id') }
807        read_attachment = self.client.ReadAttachment(self.GetAttachmentLink(db, collection, document, attachment),
808                                                options)
809
810        self.assertEqual(attachment['id'], read_attachment['id'])
811
812        attachment['Author'] = 'new author'
813
814        # Currently, we require to have the partitionKey to be specified as part of options otherwise we get BadRequest(status code 400)
815        self.__AssertHTTPFailureWithStatus(
816            StatusCodes.BAD_REQUEST,
817            self.client.ReplaceAttachment,
818            self.GetAttachmentLink(db, collection, document, attachment),
819            attachment)
820
821        # replace the attachment
822        options = { 'partitionKey': document_definition.get('id') }
823        replaced_attachment = self.client.ReplaceAttachment(self.GetAttachmentLink(db, collection, document, attachment), attachment, options)
824
825        self.assertEqual(attachment['id'], replaced_attachment['id'])
826        self.assertEqual(attachment['Author'], replaced_attachment['Author'])
827
828        attachment['id'] = 'new dynamic attachment' + str(uuid.uuid4())
829        attachment['Title'] = 'new title'
830
831        # Currently, we require to have the partitionKey to be specified as part of options otherwise we get BadRequest(status code 400)
832        self.__AssertHTTPFailureWithStatus(
833            StatusCodes.BAD_REQUEST,
834            self.client.UpsertAttachment,
835            self.GetDocumentLink(db, collection, document),
836            attachment)
837
838        # upsert attachment(create scenario)
839        options = { 'partitionKey': document_definition.get('id') }
840        upserted_attachment = self.client.UpsertAttachment(self.GetDocumentLink(db, collection, document), attachment, options)
841
842        self.assertEqual(attachment['id'], upserted_attachment['id'])
843        self.assertEqual(attachment['Title'], upserted_attachment['Title'])
844
845        # query attachments will fail without passing in the partitionKey value
846        try:
847            list(self.client.QueryAttachments(
848                self.GetDocumentLink(db, collection, document),
849                {
850                    'query': 'SELECT * FROM root r WHERE r.MediaType=\'' + dynamic_attachment.get('MediaType') + '\''
851                }))
852        except Exception:
853            pass
854
855        # query attachments by providing the partitionKey value
856        options = { 'partitionKey': document_definition.get('id') }
857        attachmentlist = list(self.client.QueryAttachments(
858            self.GetDocumentLink(db, collection, document),
859            {
860                'query': 'SELECT * FROM root r WHERE r.MediaType=\'' + dynamic_attachment.get('MediaType') + '\''
861            }, options))
862
863        self.assertEqual(2, len(attachmentlist))
864
865        # Currently, we require to have the partitionKey to be specified as part of options otherwise we get BadRequest(status code 400)
866        self.__AssertHTTPFailureWithStatus(
867            StatusCodes.BAD_REQUEST,
868            self.client.DeleteAttachment,
869            self.GetAttachmentLink(db, collection, document, attachment))
870
871        # deleting attachment
872        options = { 'partitionKey': document_definition.get('id') }
873        self.client.DeleteAttachment(self.GetAttachmentLink(db, collection, document, attachment), options)
874        self.client.DeleteContainer(collection['_self'])
875
876    def test_partitioned_collection_partition_key_value_types(self):
877        created_db = self.databseForTest
878
879        created_collection = self.configs.create_multi_partition_collection_with_custom_pk_if_not_exist(self.client)
880
881        document_definition = {'id': 'document1' + str(uuid.uuid4()),
882                               'pk' : None,
883                               'spam': 'eggs'}
884
885        # create document with partitionKey set as None here
886        self.client.CreateItem(
887            self.GetDocumentCollectionLink(created_db, created_collection),
888            document_definition)
889
890        document_definition = {'id': 'document1' + str(uuid.uuid4()),
891                               'spam': 'eggs'}
892
893        # create document with partitionKey set as Undefined here
894        self.client.CreateItem(
895            self.GetDocumentCollectionLink(created_db, created_collection),
896            document_definition)
897
898        document_definition = {'id': 'document1' + str(uuid.uuid4()),
899                               'pk' : True,
900                               'spam': 'eggs'}
901
902        # create document with bool partitionKey
903        self.client.CreateItem(
904            self.GetDocumentCollectionLink(created_db, created_collection),
905            document_definition)
906
907        document_definition = {'id': 'document1' + str(uuid.uuid4()),
908                               'pk' : 'value',
909                               'spam': 'eggs'}
910
911        # create document with string partitionKey
912        self.client.CreateItem(
913            self.GetDocumentCollectionLink(created_db, created_collection),
914            document_definition)
915
916        document_definition = {'id': 'document1' + str(uuid.uuid4()),
917                               'pk' : 100,
918                               'spam': 'eggs'}
919
920        # create document with int partitionKey
921        self.client.CreateItem(
922            self.GetDocumentCollectionLink(created_db, created_collection),
923            document_definition)
924
925        document_definition = {'id': 'document1' + str(uuid.uuid4()),
926                               'pk' : 10.50,
927                               'spam': 'eggs'}
928
929        # create document with float partitionKey
930        self.client.CreateItem(
931            self.GetDocumentCollectionLink(created_db, created_collection),
932            document_definition)
933
934    def test_partitioned_collection_conflict_crud_and_query(self):
935        created_db = self.databseForTest
936
937        created_collection = self.configs.create_multi_partition_collection_if_not_exist(self.client)
938
939        conflict_definition = {'id': 'new conflict',
940                               'resourceId' : 'doc1',
941                               'operationType' : 'create',
942                               'resourceType' : 'document'
943                              }
944
945        # Currently, we require to have the partitionKey to be specified as part of options otherwise we get BadRequest(status code 400)
946        #self.__AssertHTTPFailureWithStatus(
947        #    StatusCodes.BAD_REQUEST,
948        #    client.ReadConflict,
949        #    self.GetConflictLink(created_db, created_collection, conflict_definition))
950
951        # read conflict here will return resource not found(404) since there is no conflict here
952        options = { 'partitionKey': conflict_definition.get('id') }
953        self.__AssertHTTPFailureWithStatus(
954            StatusCodes.NOT_FOUND,
955            self.client.ReadConflict,
956            self.GetConflictLink(created_db, created_collection, conflict_definition),
957            options)
958
959        # Read conflict feed doesn't requires partitionKey to be specified as it's a cross partition thing
960        conflictlist = list(self.client.ReadConflicts(self.GetDocumentCollectionLink(created_db, created_collection)))
961        self.assertEqual(0, len(conflictlist))
962
963        # Currently, we require to have the partitionKey to be specified as part of options otherwise we get BadRequest(status code 400)
964        self.__AssertHTTPFailureWithStatus(
965            StatusCodes.BAD_REQUEST,
966            self.client.DeleteConflict,
967            self.GetConflictLink(created_db, created_collection, conflict_definition))
968
969        # delete conflict here will return resource not found(404) since there is no conflict here
970        options = { 'partitionKey': conflict_definition.get('id') }
971        self.__AssertHTTPFailureWithStatus(
972            StatusCodes.NOT_FOUND,
973            self.client.DeleteConflict,
974            self.GetConflictLink(created_db, created_collection, conflict_definition),
975            options)
976
977        # query conflicts on any property other than partitionKey will fail without setting enableCrossPartitionQuery or passing in the partitionKey value
978        try:
979            list(self.client.QueryConflicts(
980                self.GetDocumentCollectionLink(created_db, created_collection),
981                {
982                    'query': 'SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get('resourceType') + '\''
983                }))
984        except Exception:
985            pass
986
987        # cross partition query
988        options = { 'enableCrossPartitionQuery': True }
989        conflictlist = list(self.client.QueryConflicts(
990            self.GetDocumentCollectionLink(created_db, created_collection),
991            {
992                'query': 'SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get('resourceType') + '\''
993            }, options))
994
995        self.assertEqual(0, len(conflictlist))
996
997        # query conflicts by providing the partitionKey value
998        options = { 'partitionKey': conflict_definition.get('id') }
999        conflictlist = list(self.client.QueryConflicts(
1000            self.GetDocumentCollectionLink(created_db, created_collection),
1001            {
1002                'query': 'SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get('resourceType') + '\''
1003            }, options))
1004
1005        self.assertEqual(0, len(conflictlist))
1006
1007    def test_document_crud_self_link(self):
1008        self._test_document_crud(False)
1009
1010    def test_document_crud_name_based(self):
1011        self._test_document_crud(True)
1012
1013    def _test_document_crud(self, is_name_based):
1014        # create database
1015        created_db = self.databseForTest
1016        # create collection
1017        created_collection = self.configs.create_single_partition_collection_if_not_exist(self.client)
1018        # read documents
1019        documents = list(self.client.ReadItems(
1020            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based)))
1021
1022        # create a document
1023        before_create_documents_count = len(documents)
1024        document_definition = {'name': 'sample document',
1025                               'spam': 'eggs',
1026                               'key': 'value'}
1027        # Should throw an error because automatic id generation is disabled.
1028        self.__AssertHTTPFailureWithStatus(
1029            StatusCodes.BAD_REQUEST,
1030            self.client.CreateItem,
1031            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based),
1032            document_definition,
1033            {'disableAutomaticIdGeneration': True})
1034
1035        created_document = self.client.CreateItem(
1036            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based),
1037            document_definition)
1038        self.assertEqual(created_document['name'],
1039                         document_definition['name'])
1040        self.assertTrue(created_document['id'] != None)
1041        # duplicated documents are allowed when 'id' is not provided.
1042        duplicated_document = self.client.CreateItem(
1043            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based),
1044            document_definition)
1045        self.assertEqual(duplicated_document['name'],
1046                         document_definition['name'])
1047        self.assert_(duplicated_document['id'])
1048        self.assertNotEqual(duplicated_document['id'],
1049                            created_document['id'])
1050        # duplicated documents are not allowed when 'id' is provided.
1051        duplicated_definition_with_id = document_definition.copy()
1052        duplicated_definition_with_id['id'] = created_document['id']
1053        self.__AssertHTTPFailureWithStatus(StatusCodes.CONFLICT,
1054                                           self.client.CreateItem,
1055                                           self.GetDocumentCollectionLink(created_db, created_collection, is_name_based),
1056                                           duplicated_definition_with_id)
1057        # read documents after creation
1058        documents = list(self.client.ReadItems(
1059            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based)))
1060        self.assertEqual(
1061            len(documents),
1062            before_create_documents_count + 2,
1063            'create should increase the number of documents')
1064        # query documents
1065        documents = list(self.client.QueryItems(
1066            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based),
1067            {
1068                'query': 'SELECT * FROM root r WHERE r.name=@name',
1069                'parameters': [
1070                    { 'name':'@name', 'value':document_definition['name'] }
1071                ]
1072            }))
1073        self.assert_(documents)
1074        documents = list(self.client.QueryItems(
1075            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based),
1076            {
1077                'query': 'SELECT * FROM root r WHERE r.name=@name',
1078                'parameters': [
1079                    { 'name':'@name', 'value':document_definition['name'] }
1080                ]
1081            },
1082            { 'enableScanInQuery': True}))
1083        self.assert_(documents)
1084        # replace document.
1085        created_document['name'] = 'replaced document'
1086        created_document['spam'] = 'not eggs'
1087        old_etag = created_document['_etag']
1088        replaced_document = self.client.ReplaceItem(
1089            self.GetDocumentLink(created_db, created_collection, created_document, is_name_based),
1090            created_document)
1091        self.assertEqual(replaced_document['name'],
1092                         'replaced document',
1093                         'document id property should change')
1094        self.assertEqual(replaced_document['spam'],
1095                         'not eggs',
1096                         'property should have changed')
1097        self.assertEqual(created_document['id'],
1098                         replaced_document['id'],
1099                         'document id should stay the same')
1100
1101        #replace document based on condition
1102        replaced_document['name'] = 'replaced document based on condition'
1103        replaced_document['spam'] = 'new spam field'
1104
1105        #should fail for stale etag
1106        self.__AssertHTTPFailureWithStatus(
1107            StatusCodes.PRECONDITION_FAILED, self.client.ReplaceItem,
1108                self.GetDocumentLink(created_db, created_collection, replaced_document, is_name_based),
1109                replaced_document, { 'accessCondition' : { 'type': 'IfMatch', 'condition': old_etag } })
1110
1111        #should pass for most recent etag
1112        replaced_document_conditional = self.client.ReplaceItem(
1113            self.GetDocumentLink(created_db, created_collection, replaced_document, is_name_based),
1114            replaced_document, { 'accessCondition' : { 'type': 'IfMatch', 'condition': replaced_document['_etag'] } })
1115        self.assertEqual(replaced_document_conditional['name'],
1116                         'replaced document based on condition',
1117                         'document id property should change')
1118        self.assertEqual(replaced_document_conditional['spam'],
1119                         'new spam field',
1120                         'property should have changed')
1121        self.assertEqual(replaced_document_conditional['id'],
1122                         replaced_document['id'],
1123                         'document id should stay the same')
1124        # read document
1125        one_document_from_read = self.client.ReadItem(
1126            self.GetDocumentLink(created_db, created_collection, replaced_document, is_name_based))
1127        self.assertEqual(replaced_document['id'],
1128                         one_document_from_read['id'])
1129        # delete document
1130        self.client.DeleteItem(self.GetDocumentLink(created_db, created_collection, replaced_document, is_name_based))
1131        # read documents after deletion
1132        self.__AssertHTTPFailureWithStatus(StatusCodes.NOT_FOUND,
1133                                           self.client.ReadItem,
1134                                           self.GetDocumentLink(created_db, created_collection, replaced_document, is_name_based))
1135
1136    def test_partitioning(self):
1137        # create test database
1138        created_db = self.databseForTest
1139
1140        # Create bunch of collections participating in partitioning
1141        collection0 = self.client.CreateContainer(
1142            self.GetDatabaseLink(created_db, True),
1143            { 'id': 'test_partitioning coll_0' + str(uuid.uuid4()) })
1144        collection1 = self.client.CreateContainer(
1145            self.GetDatabaseLink(created_db, True),
1146            { 'id': 'test_partitioning coll_1' + str(uuid.uuid4())})
1147        collection2 = self.client.CreateContainer(
1148            self.GetDatabaseLink(created_db, True),
1149            { 'id': 'test_partitioning coll_2' + str(uuid.uuid4())})
1150
1151        # Register the collection links for partitioning through partition resolver
1152        collection_links = [self.GetDocumentCollectionLink(created_db, collection0, True), self.GetDocumentCollectionLink(created_db, collection1, True), self.GetDocumentCollectionLink(created_db, collection2, True)]
1153        partition_resolver = test_partition_resolver.TestPartitionResolver(collection_links)
1154        self.client.RegisterPartitionResolver(self.GetDatabaseLink(created_db, True), partition_resolver)
1155
1156        # create a document using the document definition
1157        document_definition = { 'id': '0',
1158                                'name': 'sample document',
1159                                'key': 'value' }
1160
1161        self.client.CreateItem(
1162            self.GetDatabaseLink(created_db, True),
1163            document_definition)
1164
1165        # Read the documents in collection1 and verify that the count is 1 now
1166        documents = list(self.client.ReadItems(
1167            self.GetDocumentCollectionLink(created_db, collection0, True)))
1168        self.assertEqual(1, len(documents))
1169
1170        # Verify that it contains the document with Id 0
1171        self.assertEqual('0', documents[0]['id'])
1172
1173        document_definition['id'] = '1'
1174
1175        self.client.CreateItem(
1176            self.GetDatabaseLink(created_db, True),
1177            document_definition)
1178
1179        # Read the documents in collection1 and verify that the count is 1 now
1180        documents = list(self.client.ReadItems(
1181            self.GetDocumentCollectionLink(created_db, collection1, True)))
1182        self.assertEqual(1, len(documents))
1183
1184        # Verify that it contains the document with Id 1
1185        self.assertEqual('1', documents[0]['id'])
1186
1187        document_definition['id'] = '2'
1188
1189        self.client.CreateItem(
1190            self.GetDatabaseLink(created_db, True),
1191            document_definition)
1192
1193        # Read the documents in collection2 and verify that the count is 1 now
1194        documents = list(self.client.ReadItems(
1195            self.GetDocumentCollectionLink(created_db, collection2, True)))
1196        self.assertEqual(1, len(documents))
1197
1198        # Verify that it contains the document with Id 2
1199        self.assertEqual('2', documents[0]['id'])
1200
1201        # Updating the value of "key" property to test UpsertDocument(replace scenario)
1202        document_definition['id'] = '0'
1203        document_definition['key'] = 'new value'
1204
1205        self.client.UpsertItem(
1206            self.GetDatabaseLink(created_db, True),
1207            document_definition)
1208
1209        # Read the documents in collection0 and verify that the count is still 1
1210        documents = list(self.client.ReadItems(
1211            self.GetDocumentCollectionLink(created_db, collection0, True)))
1212        self.assertEqual(1, len(documents))
1213
1214        # Verify that it contains the document with new key value
1215        self.assertEqual(document_definition['key'], documents[0]['key'])
1216
1217        # Query documents in all collections(since no partition key specified) using query string
1218        documents = list(self.client.QueryItems(
1219            self.GetDatabaseLink(created_db, True),
1220            {
1221                'query': 'SELECT * FROM root r WHERE r.id=\'2\''
1222            }))
1223        self.assertEqual(1, len(documents))
1224
1225        # Updating the value of id property to test UpsertDocument(create scenario)
1226        document_definition['id'] = '4'
1227
1228        self.client.UpsertItem(
1229            self.GetDatabaseLink(created_db, True),
1230            document_definition)
1231
1232        # Read the documents in collection1 and verify that the count is 2 now
1233        documents = list(self.client.ReadItems(
1234            self.GetDocumentCollectionLink(created_db, collection1, True)))
1235        self.assertEqual(2, len(documents))
1236
1237        # Query documents in all collections(since no partition key specified) using query spec
1238        documents = list(self.client.QueryItems(
1239            self.GetDatabaseLink(created_db, True),
1240            {
1241                'query': 'SELECT * FROM root r WHERE r.id=@id',
1242                'parameters': [
1243                    { 'name':'@id', 'value':document_definition['id'] }
1244                ]
1245            }))
1246        self.assertEqual(1, len(documents))
1247
1248        # Query documents in collection(with partition key of '4' specified) which resolves to collection1
1249        documents = list(self.client.QueryItems(
1250            self.GetDatabaseLink(created_db, True),
1251            {
1252                'query': 'SELECT * FROM root r'
1253            }, {}, document_definition['id']))
1254        self.assertEqual(2, len(documents))
1255
1256        # Query documents in collection(with partition key '5' specified) which resolves to collection2 but non existent document in that collection
1257        documents = list(self.client.QueryItems(
1258            self.GetDatabaseLink(created_db, True),
1259            {
1260                'query': 'SELECT * FROM root r WHERE r.id=@id',
1261                'parameters': [
1262                    { 'name':'@id', 'value':document_definition['id'] }
1263                ]
1264            }, {}, '5'))
1265        self.assertEqual(0, len(documents))
1266
1267        self.client.DeleteContainer(collection0['_self'])
1268        self.client.DeleteContainer(collection1['_self'])
1269        self.client.DeleteContainer(collection2['_self'])
1270
1271    # Partitioning test(with paging)
1272    def test_partition_paging(self):
1273        # create test database
1274        created_db = self.databseForTest
1275
1276        # Create bunch of collections participating in partitioning
1277        collection0 = self.client.CreateContainer(
1278            self.GetDatabaseLink(created_db, True),
1279            { 'id': 'test_partition_paging coll_0 ' + str(uuid.uuid4()) })
1280        collection1 = self.client.CreateContainer(
1281            self.GetDatabaseLink(created_db, True),
1282            { 'id': 'test_partition_paging coll_1 ' + str(uuid.uuid4()) })
1283
1284        # Register the collection links for partitioning through partition resolver
1285        collection_links = [self.GetDocumentCollectionLink(created_db, collection0, True), self.GetDocumentCollectionLink(created_db, collection1, True)]
1286        partition_resolver = test_partition_resolver.TestPartitionResolver(collection_links)
1287        self.client.RegisterPartitionResolver(self.GetDatabaseLink(created_db, True), partition_resolver)
1288
1289        # Create document definition used to create documents
1290        document_definition = { 'id': '0',
1291                                'name': 'sample document',
1292                                'key': 'value' }
1293
1294        # Create 10 documents each with a different id starting from 0 to 9
1295        for i in xrange(0, 10):
1296            document_definition['id'] = str(i)
1297            self.client.CreateItem(
1298                self.GetDatabaseLink(created_db, True),
1299                document_definition)
1300
1301        # Query the documents to ensure that you get the correct count(no paging)
1302        documents = list(self.client.QueryItems(
1303            self.GetDatabaseLink(created_db, True),
1304            {
1305                'query': 'SELECT * FROM root r WHERE r.id < \'7\''
1306            }))
1307        self.assertEqual(7, len(documents))
1308
1309        # Query the documents with maxItemCount to restrict the max number of documents returned
1310        queryIterable = self.client.QueryItems(
1311            self.GetDatabaseLink(created_db, True),
1312            {
1313                'query': 'SELECT * FROM root r WHERE r.id < \'7\''
1314            }, {'maxItemCount':3})
1315
1316        # Query again and count the number of documents(with paging)
1317        docCount = 0
1318        for _ in queryIterable:
1319            docCount += 1
1320
1321        self.assertEqual(7, docCount)
1322
1323        # Query again to test fetch_next_block to ensure that it returns the correct number of documents everytime it's called
1324        queryIterable = self.client.QueryItems(
1325            self.GetDatabaseLink(created_db, True),
1326            {
1327                'query': 'SELECT * FROM root r WHERE r.id < \'7\''
1328            }, {'maxItemCount':3})
1329
1330        # Documents with id 0, 2, 4(in collection0)
1331        self.assertEqual(3, len(queryIterable.fetch_next_block()))
1332
1333        # Documents with id 6(in collection0)
1334        self.assertEqual(1, len(queryIterable.fetch_next_block()))
1335
1336        # Documents with id 1, 3, 5(in collection1)
1337        self.assertEqual(3, len(queryIterable.fetch_next_block()))
1338
1339        # No more documents
1340        self.assertEqual(0, len(queryIterable.fetch_next_block()))
1341
1342        # Set maxItemCount to -1 to lift the limit on max documents returned by the query
1343        queryIterable = self.client.QueryItems(
1344            self.GetDatabaseLink(created_db, True),
1345            {
1346                'query': 'SELECT * FROM root r WHERE r.id < \'7\''
1347            }, {'maxItemCount':-1})
1348
1349        # Documents with id 0, 2, 4, 6(all docs in collection0 adhering to query condition)
1350        self.assertEqual(4, len(queryIterable.fetch_next_block()))
1351
1352        # Documents with id 1, 3, 5(all docs in collection1 adhering to query condition)
1353        self.assertEqual(3, len(queryIterable.fetch_next_block()))
1354
1355        # No more documents
1356        self.assertEqual(0, len(queryIterable.fetch_next_block()))
1357
1358        self.client.DeleteContainer(collection0['_self'])
1359        self.client.DeleteContainer(collection1['_self'])
1360
1361    def test_hash_partition_resolver(self):
1362        created_db = self.databseForTest
1363
1364        # Create bunch of collections participating in partitioning
1365        collection0 = { 'id': 'coll_0 ' + str(uuid.uuid4()) }
1366        collection1 = { 'id': 'coll_1 ' + str(uuid.uuid4()) }
1367
1368        collection_links = [self.GetDocumentCollectionLink(created_db, collection0, True), self.GetDocumentCollectionLink(created_db, collection1, True)]
1369
1370        id_partition_key_extractor = lambda document: document['id']
1371
1372        hashpartition_resolver = hash_partition_resolver.HashPartitionResolver(id_partition_key_extractor, collection_links)
1373
1374        # create a document using the document definition
1375        document_definition = { 'id': '0',
1376                                'name': 'sample document',
1377                                'key': 'value' }
1378
1379        document_definition['id'] = '2'
1380
1381        collection_link = hashpartition_resolver.ResolveForCreate(document_definition)
1382
1383        read_collection_links = hashpartition_resolver.ResolveForRead(document_definition['id'])
1384
1385        self.assertEqual(1, len(read_collection_links))
1386        self.assertEqual(collection_link, read_collection_links[0])
1387
1388    def test_consistent_hash_ring(self):
1389        created_db = { 'id': 'db' }
1390
1391        collection_links = list()
1392        expected_partition_list = list()
1393
1394        total_collections_count = 2
1395
1396        collection = { 'id': 'coll' }
1397
1398        for i in xrange(0, total_collections_count):
1399            collection['id'] = 'coll' + str(i)
1400            collection_link = self.GetDocumentCollectionLink(created_db, collection, True)
1401            collection_links.append(collection_link)
1402
1403        expected_partition_list.append(('dbs/db/colls/coll0', 1076200484))
1404        expected_partition_list.append(('dbs/db/colls/coll0', 1302652881))
1405        expected_partition_list.append(('dbs/db/colls/coll0', 2210251988))
1406        expected_partition_list.append(('dbs/db/colls/coll1', 2341558382))
1407        expected_partition_list.append(('dbs/db/colls/coll0', 2348251587))
1408        expected_partition_list.append(('dbs/db/colls/coll0', 2887945459))
1409        expected_partition_list.append(('dbs/db/colls/coll1', 2894403633))
1410        expected_partition_list.append(('dbs/db/colls/coll1', 3031617259))
1411        expected_partition_list.append(('dbs/db/colls/coll1', 3090861424))
1412        expected_partition_list.append(('dbs/db/colls/coll1', 4222475028))
1413
1414        id_partition_key_extractor = lambda document: document['id']
1415
1416        hashpartition_resolver = hash_partition_resolver.HashPartitionResolver(id_partition_key_extractor, collection_links, 5)
1417
1418        actual_partition_list = hashpartition_resolver.consistent_hash_ring._GetSerializedPartitionList()
1419
1420        self.assertEqual(len(expected_partition_list), len(actual_partition_list))
1421
1422        for i in xrange(0, len(expected_partition_list)):
1423            self.assertEqual(actual_partition_list[i][0], expected_partition_list[i][0])
1424            self.assertEqual(actual_partition_list[i][1], expected_partition_list[i][1])
1425
1426        # Querying for a document and verifying that it's in the expected collection
1427        read_collection_links = hashpartition_resolver.ResolveForRead("beadledom")
1428
1429        self.assertEqual(1, len(read_collection_links))
1430
1431        collection['id'] = 'coll1'
1432        collection_link = self.GetDocumentCollectionLink(created_db, collection, True)
1433
1434        self.assertTrue(collection_link in read_collection_links)
1435
1436        # Querying for a document and verifying that it's in the expected collection
1437        read_collection_links = hashpartition_resolver.ResolveForRead("999")
1438
1439        self.assertEqual(1, len(read_collection_links))
1440
1441        collection['id'] = 'coll0'
1442        collection_link = self.GetDocumentCollectionLink(created_db, collection, True)
1443
1444        self.assertTrue(collection_link in read_collection_links)
1445
1446    def test_murmur_hash(self):
1447        str = 'afdgdd'
1448        bytes = bytearray(str, encoding='utf-8')
1449
1450        hash_value = murmur_hash._MurmurHash._ComputeHash(bytes)
1451        self.assertEqual(1099701186, hash_value)
1452
1453        num = 374.0
1454        bytes = bytearray(pack('d', num))
1455
1456        hash_value = murmur_hash._MurmurHash._ComputeHash(bytes)
1457        self.assertEqual(3717946798, hash_value)
1458
1459        self._validate_bytes("", 0x1B873593, bytearray(b'\xEE\xA8\xA2\x67'), 1738713326)
1460        self._validate_bytes("1", 0xE82562E4, bytearray(b'\xD0\x92\x24\xED'), 3978597072)
1461        self._validate_bytes("00", 0xB4C39035, bytearray(b'\xFA\x09\x64\x1B'), 459540986)
1462        self._validate_bytes("eyetooth", 0x8161BD86, bytearray(b'\x98\x62\x1C\x6F'), 1864131224)
1463        self._validate_bytes("acid", 0x4DFFEAD7, bytearray(b'\x36\x92\xC0\xB9'), 3116405302)
1464        self._validate_bytes("elevation", 0x1A9E1828, bytearray(b'\xA9\xB6\x40\xDF'), 3745560233)
1465        self._validate_bytes("dent", 0xE73C4579, bytearray(b'\xD4\x59\xE1\xD3'), 3554761172)
1466        self._validate_bytes("homeland", 0xB3DA72CA, bytearray(b'\x06\x4D\x72\xBB'), 3144830214)
1467        self._validate_bytes("glamor", 0x8078A01B, bytearray(b'\x89\x89\xA2\xA7'), 2812447113)
1468        self._validate_bytes("flags", 0x4D16CD6C, bytearray(b'\x52\x87\x66\x02'), 40273746)
1469        self._validate_bytes("democracy", 0x19B4FABD, bytearray(b'\xE4\x55\xD6\xB0'), 2966836708)
1470        self._validate_bytes("bumble", 0xE653280E, bytearray(b'\xFE\xD7\xC3\x0C'), 214161406)
1471        self._validate_bytes("catch", 0xB2F1555F, bytearray(b'\x98\x4B\xB6\xCD'), 3451276184)
1472        self._validate_bytes("omnomnomnivore", 0x7F8F82B0, bytearray(b'\x38\xC4\xCD\xFF'), 4291675192)
1473        self._validate_bytes("The quick brown fox jumps over the lazy dog", 0x4C2DB001, bytearray(b'\x6D\xAB\x8D\xC9'), 3381504877)
1474
1475    def _validate_bytes(self, str, seed, expected_hash_bytes, expected_value):
1476        hash_value = murmur_hash._MurmurHash._ComputeHash(bytearray(str, encoding='utf-8'), seed)
1477        bytes = bytearray(pack('I', hash_value))
1478        self.assertEqual(expected_value, hash_value)
1479        self.assertEqual(expected_hash_bytes, bytes)
1480
1481    def test_get_bytes(self):
1482        actual_bytes = consistent_hash_ring._ConsistentHashRing._GetBytes("documentdb")
1483        expected_bytes = bytearray(b'\x64\x6F\x63\x75\x6D\x65\x6E\x74\x64\x62')
1484        self.assertEqual(expected_bytes, actual_bytes)
1485
1486        actual_bytes = consistent_hash_ring._ConsistentHashRing._GetBytes("azure")
1487        expected_bytes = bytearray(b'\x61\x7A\x75\x72\x65')
1488        self.assertEqual(expected_bytes, actual_bytes)
1489
1490        actual_bytes = consistent_hash_ring._ConsistentHashRing._GetBytes("json")
1491        expected_bytes = bytearray(b'\x6A\x73\x6F\x6E')
1492        self.assertEqual(expected_bytes, actual_bytes)
1493
1494        actual_bytes = consistent_hash_ring._ConsistentHashRing._GetBytes("nosql")
1495        expected_bytes = bytearray(b'\x6E\x6F\x73\x71\x6C')
1496        self.assertEqual(expected_bytes, actual_bytes)
1497
1498    def test_range_partition_resolver(self):
1499        # create test database
1500        created_db = self.databseForTest
1501
1502        # Create bunch of collections participating in partitioning
1503        collection0 = { 'id': 'coll_0' }
1504        collection1 = { 'id': 'coll_1' }
1505        collection2 = { 'id': 'coll_2' }
1506
1507        collection_links = [self.GetDocumentCollectionLink(created_db, collection0, True), self.GetDocumentCollectionLink(created_db, collection1, True), self.GetDocumentCollectionLink(created_db, collection2, True)]
1508
1509        val_partition_key_extractor = lambda document: document['val']
1510
1511        ranges =[partition_range.Range(0,400), partition_range.Range(401,800), partition_range.Range(501,1200)]
1512
1513        partition_map = dict([(ranges[0],collection_links[0]), (ranges[1],collection_links[1]), (ranges[2],collection_links[2])])
1514
1515        rangepartition_resolver = range_partition_resolver.RangePartitionResolver(val_partition_key_extractor, partition_map)
1516
1517        # create a document using the document definition
1518        document_definition = { 'id': '0',
1519                                'name': 'sample document',
1520                                'val': 0 }
1521
1522        document_definition['val'] = 400
1523
1524        collection_link = rangepartition_resolver.ResolveForCreate(document_definition)
1525        self.assertEquals(collection_links[0], collection_link)
1526
1527        read_collection_links = rangepartition_resolver.ResolveForRead(600)
1528
1529        self.assertEqual(2, len(read_collection_links))
1530        self.assertTrue(collection_links[1] in read_collection_links)
1531        self.assertTrue(collection_links[2] in read_collection_links)
1532
1533        read_collection_links = rangepartition_resolver.ResolveForRead(partition_range.Range(250, 500))
1534
1535        self.assertEqual(2, len(read_collection_links))
1536        self.assertTrue(collection_links[0] in read_collection_links)
1537        self.assertTrue(collection_links[1] in read_collection_links)
1538
1539        read_collection_links = rangepartition_resolver.ResolveForRead(list([partition_range.Range(250, 500), partition_range.Range(600, 1000)]))
1540
1541        self.assertEqual(3, len(read_collection_links))
1542        self.assertTrue(collection_links[0] in read_collection_links)
1543        self.assertTrue(collection_links[1] in read_collection_links)
1544        self.assertTrue(collection_links[2] in read_collection_links)
1545
1546        read_collection_links = rangepartition_resolver.ResolveForRead(list([50, 100, 600, 1000]))
1547
1548        self.assertEqual(3, len(read_collection_links))
1549        self.assertTrue(collection_links[0] in read_collection_links)
1550        self.assertTrue(collection_links[1] in read_collection_links)
1551        self.assertTrue(collection_links[2] in read_collection_links)
1552
1553        read_collection_links = rangepartition_resolver.ResolveForRead(list([100, None]))
1554
1555        self.assertEqual(3, len(read_collection_links))
1556        self.assertTrue(collection_links[0] in read_collection_links)
1557        self.assertTrue(collection_links[1] in read_collection_links)
1558        self.assertTrue(collection_links[2] in read_collection_links)
1559
1560
1561    # Upsert test for Document resource - selflink version
1562    def test_document_upsert_self_link(self):
1563        self._test_document_upsert(False)
1564
1565    # Upsert test for Document resource - name based routing version
1566    def test_document_upsert_name_based(self):
1567        self._test_document_upsert(True)
1568
1569    def _test_document_upsert(self, is_name_based):
1570        # create database
1571        created_db = self.databseForTest
1572
1573        # create collection
1574        created_collection = self.configs.create_single_partition_collection_if_not_exist(self.client)
1575
1576        # read documents and check count
1577        documents = list(self.client.ReadItems(
1578            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based)))
1579        before_create_documents_count = len(documents)
1580
1581        # create document definition
1582        document_definition = {'id': 'doc',
1583                               'name': 'sample document',
1584                               'spam': 'eggs',
1585                               'key': 'value'}
1586
1587        # create document using Upsert API
1588        created_document = self.client.UpsertItem(
1589            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based),
1590            document_definition)
1591
1592        # verify id property
1593        self.assertEqual(created_document['id'],
1594                         document_definition['id'])
1595
1596        # read documents after creation and verify updated count
1597        documents = list(self.client.ReadItems(
1598            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based)))
1599        self.assertEqual(
1600            len(documents),
1601            before_create_documents_count + 1,
1602            'create should increase the number of documents')
1603
1604        # update document
1605        created_document['name'] = 'replaced document'
1606        created_document['spam'] = 'not eggs'
1607
1608        # should replace document since it already exists
1609        upserted_document = self.client.UpsertItem(
1610            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based),
1611            created_document)
1612
1613        # verify the changed properties
1614        self.assertEqual(upserted_document['name'],
1615                         created_document['name'],
1616                         'document id property should change')
1617        self.assertEqual(upserted_document['spam'],
1618                         created_document['spam'],
1619                         'property should have changed')
1620
1621        # verify id property
1622        self.assertEqual(upserted_document['id'],
1623                         created_document['id'],
1624                         'document id should stay the same')
1625
1626        # read documents after upsert and verify count doesn't increases again
1627        documents = list(self.client.ReadItems(
1628            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based)))
1629        self.assertEqual(
1630            len(documents),
1631            before_create_documents_count + 1,
1632            'number of documents should remain same')
1633
1634        created_document['id'] = 'new id'
1635
1636        # Upsert should create new document since the id is different
1637        new_document = self.client.UpsertItem(
1638            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based),
1639            created_document)
1640
1641        # verify id property
1642        self.assertEqual(created_document['id'],
1643                         new_document['id'],
1644                         'document id should be same')
1645
1646        # read documents after upsert and verify count increases
1647        documents = list(self.client.ReadItems(
1648            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based)))
1649        self.assertEqual(
1650            len(documents),
1651            before_create_documents_count + 2,
1652            'upsert should increase the number of documents')
1653
1654        # delete documents
1655        self.client.DeleteItem(self.GetDocumentLink(created_db, created_collection, upserted_document, is_name_based))
1656        self.client.DeleteItem(self.GetDocumentLink(created_db, created_collection, new_document, is_name_based))
1657
1658        # read documents after delete and verify count is same as original
1659        documents = list(self.client.ReadItems(
1660            self.GetDocumentCollectionLink(created_db, created_collection, is_name_based)))
1661        self.assertEqual(
1662            len(documents),
1663            before_create_documents_count,
1664            'number of documents should remain same')
1665
1666
1667    def test_spatial_index_self_link(self):
1668        self._test_spatial_index(False)
1669
1670    def test_spatial_index_name_based(self):
1671        self._test_spatial_index(True)
1672
1673    def _test_spatial_index(self, is_name_based):
1674        db = self.databseForTest
1675        # partial policy specified
1676        collection = self.client.CreateContainer(
1677            self.GetDatabaseLink(db, is_name_based),
1678            {
1679                'id': 'collection with spatial index ' + str(uuid.uuid4()),
1680                'indexingPolicy': {
1681                    'includedPaths': [
1682                        {
1683                            'path': '/"Location"/?',
1684                            'indexes': [
1685                                {
1686                                    'kind': 'Spatial',
1687                                    'dataType': 'Point'
1688                                }
1689                            ]
1690                        },
1691                        {
1692                            'path': '/'
1693                        }
1694                    ]
1695                }
1696            })
1697        self.client.CreateItem(self.GetDocumentCollectionLink(db, collection, is_name_based), {
1698            'id': 'loc1',
1699            'Location': {
1700                'type': 'Point',
1701                'coordinates': [ 20.0, 20.0 ]
1702            }
1703        })
1704        self.client.CreateItem(self.GetDocumentCollectionLink(db, collection, is_name_based), {
1705            'id': 'loc2',
1706            'Location': {
1707                'type': 'Point',
1708                'coordinates': [ 100.0, 100.0 ]
1709            }
1710        })
1711        results = list(self.client.QueryItems(
1712            self.GetDocumentCollectionLink(db, collection, is_name_based),
1713            "SELECT * FROM root WHERE (ST_DISTANCE(root.Location, {type: 'Point', coordinates: [20.1, 20]}) < 20000) "))
1714        self.assertEqual(1, len(results))
1715        self.assertEqual('loc1', results[0]['id'])
1716        self.client.DeleteContainer(collection['_self'])
1717
1718    def test_attachment_crud_self_link(self):
1719        self._test_attachment_crud(False)
1720
1721    def test_attachment_crud_name_based(self):
1722        self._test_attachment_crud(True)
1723
1724    def _test_attachment_crud(self, is_name_based):
1725        class ReadableStream(object):
1726            """Customized file-like stream.
1727            """
1728
1729            def __init__(self, chunks = ['first chunk ', 'second chunk']):
1730                """Initialization.
1731
1732                :Parameters:
1733                    - `chunks`: list
1734
1735                """
1736                if six.PY2:
1737                    self._chunks = list(chunks)
1738                else:
1739                    # python3: convert to bytes
1740                    self._chunks = [chunk.encode() for chunk in chunks]
1741
1742            def read(self, n=-1):
1743                """Simulates the read method in a file stream.
1744
1745                :Parameters:
1746                    - `n`: int
1747
1748                :Returns:
1749                    str or bytes
1750
1751                """
1752                if self._chunks:
1753                    return self._chunks.pop(0)
1754                else:
1755                    return ''
1756
1757            def __len__(self):
1758                """To make len(ReadableStream) work.
1759                """
1760                return sum([len(chunk) for chunk in self._chunks])
1761
1762
1763        # Should do attachment CRUD operations successfully
1764        self.client.connection_policy.MediaReadMode = documents.MediaReadMode.Buffered
1765
1766        # create database
1767        db = self.databseForTest
1768        # create collection
1769        collection = self.configs.create_single_partition_collection_if_not_exist(self.client)
1770        # create document
1771        document = self.client.CreateItem(self.GetDocumentCollectionLink(db, collection, is_name_based),
1772                                         { 'id': 'sample document',
1773                                           'spam': 'eggs',
1774                                           'key': 'value' })
1775        # list all attachments
1776        attachments = list(self.client.ReadAttachments(self.GetDocumentLink(db, collection, document, is_name_based)))
1777        initial_count = len(attachments)
1778        valid_media_options = { 'slug': 'attachment name',
1779                                'contentType': 'application/text' }
1780        invalid_media_options = { 'slug': 'attachment name',
1781                                  'contentType': 'junt/test' }
1782        # create attachment with invalid content-type
1783        content_stream = ReadableStream()
1784        self.__AssertHTTPFailureWithStatus(
1785            StatusCodes.BAD_REQUEST,
1786            self.client.CreateAttachmentAndUploadMedia,
1787            self.GetDocumentLink(db, collection, document, is_name_based),
1788            content_stream,
1789            invalid_media_options)
1790        content_stream = ReadableStream()
1791        # create streamed attachment with valid content-type
1792        valid_attachment = self.client.CreateAttachmentAndUploadMedia(
1793            self.GetDocumentLink(db, collection, document, is_name_based), content_stream, valid_media_options)
1794        self.assertEqual(valid_attachment['id'],
1795                         'attachment name',
1796                         'id of created attachment should be the'
1797                         ' same as the one in the request')
1798        content_stream = ReadableStream()
1799        # create colliding attachment
1800        self.__AssertHTTPFailureWithStatus(
1801            StatusCodes.CONFLICT,
1802            self.client.CreateAttachmentAndUploadMedia,
1803            self.GetDocumentLink(db, collection, document, is_name_based),
1804            content_stream,
1805            valid_media_options)
1806
1807        content_stream = ReadableStream()
1808        # create attachment with media link
1809        dynamic_attachment = {
1810            'id': 'dynamic attachment',
1811            'media': 'http://xstore.',
1812            'MediaType': 'Book',
1813            'Author':'My Book Author',
1814            'Title':'My Book Title',
1815            'contentType':'application/text'
1816        }
1817        attachment = self.client.CreateAttachment(self.GetDocumentLink(db, collection, document, is_name_based),
1818                                             dynamic_attachment)
1819        self.assertEqual(attachment['MediaType'],
1820                         'Book',
1821                         'invalid media type')
1822        self.assertEqual(attachment['Author'],
1823                         'My Book Author',
1824                         'invalid property value')
1825        # list all attachments
1826        attachments = list(self.client.ReadAttachments(self.GetDocumentLink(db, collection, document, is_name_based)))
1827        self.assertEqual(len(attachments),
1828                         initial_count + 2,
1829                         'number of attachments should\'ve increased by 2')
1830        attachment['Author'] = 'new author'
1831        # replace the attachment
1832        self.client.ReplaceAttachment(self.GetAttachmentLink(db, collection, document, attachment, is_name_based), attachment)
1833        self.assertEqual(attachment['MediaType'],
1834                         'Book',
1835                         'invalid media type')
1836        self.assertEqual(attachment['Author'],
1837                         'new author',
1838                         'invalid property value')
1839        # read attachment media
1840        media_response = self.client.ReadMedia(valid_attachment['media'])
1841        self.assertEqual(media_response,
1842                         'first chunk second chunk')
1843        content_stream = ReadableStream(['modified first chunk ',
1844                                         'modified second chunk'])
1845        # update attachment media
1846        self.client.UpdateMedia(valid_attachment['media'],
1847                           content_stream,
1848                           valid_media_options)
1849        # read attachment media after update
1850        # read media buffered
1851        media_response = self.client.ReadMedia(valid_attachment['media'])
1852        self.assertEqual(media_response,
1853                         'modified first chunk modified second chunk')
1854        # read media streamed
1855        self.client.connection_policy.MediaReadMode = (
1856            documents.MediaReadMode.Streamed)
1857        media_response = self.client.ReadMedia(valid_attachment['media'])
1858        self.assertEqual(media_response.read(),
1859                         b'modified first chunk modified second chunk')
1860        # share attachment with a second document
1861        document = self.client.CreateItem(self.GetDocumentCollectionLink(db, collection, is_name_based),
1862                                         {'id': 'document 2'})
1863        second_attachment = {
1864            'id': valid_attachment['id'],
1865            'contentType': valid_attachment['contentType'],
1866            'media': valid_attachment['media'] }
1867        attachment = self.client.CreateAttachment(self.GetDocumentLink(db, collection, document, is_name_based),
1868                                             second_attachment)
1869        self.assertEqual(valid_attachment['id'],
1870                         attachment['id'],
1871                         'id mismatch')
1872        self.assertEqual(valid_attachment['media'],
1873                         attachment['media'],
1874                         'media mismatch')
1875        self.assertEqual(valid_attachment['contentType'],
1876                         attachment['contentType'],
1877                         'contentType mismatch')
1878        # deleting attachment
1879        self.client.DeleteAttachment(self.GetAttachmentLink(db, collection, document, attachment, is_name_based))
1880        # read attachments after deletion
1881        attachments = list(self.client.ReadAttachments(self.GetDocumentLink(db, collection, document, is_name_based)))
1882        self.assertEqual(len(attachments), 0)
1883
1884    # Upsert test for Attachment resource - selflink version
1885    def test_attachment_upsert_self_link(self):
1886        self._test_attachment_upsert(False)
1887
1888    # Upsert test for Attachment resource - name based routing version
1889    def test_attachment_upsert_name_based(self):
1890        self._test_attachment_upsert(True)
1891
1892    def _test_attachment_upsert(self, is_name_based):
1893        class ReadableStream(object):
1894            """Customized file-like stream.
1895            """
1896
1897            def __init__(self, chunks = ['first chunk ', 'second chunk']):
1898                """Initialization.
1899
1900                :Parameters:
1901                    - `chunks`: list
1902
1903                """
1904                if six.PY2:
1905                    self._chunks = list(chunks)
1906                else:
1907                    # python3: convert to bytes
1908                    self._chunks = [chunk.encode() for chunk in chunks]
1909
1910            def read(self, n=-1):
1911                """Simulates the read method in a file stream.
1912
1913                :Parameters:
1914                    - `n`: int
1915
1916                :Returns:
1917                    str or bytes
1918
1919                """
1920                if self._chunks:
1921                    return self._chunks.pop(0)
1922                else:
1923                    return ''
1924
1925            def __len__(self):
1926                """To make len(ReadableStream) work.
1927                """
1928                return sum([len(chunk) for chunk in self._chunks])
1929
1930        # create database
1931        db = self.databseForTest
1932
1933        # create collection
1934        collection = self.configs.create_single_partition_collection_if_not_exist(self.client)
1935
1936        # create document
1937        document = self.client.CreateItem(self.GetDocumentCollectionLink(db, collection, is_name_based),
1938                                         { 'id': 'sample document' + str(uuid.uuid4()),
1939                                           'spam': 'eggs',
1940                                           'key': 'value' })
1941
1942        # list all attachments and check count
1943        attachments = list(self.client.ReadAttachments(self.GetDocumentLink(db, collection, document, is_name_based)))
1944        initial_count = len(attachments)
1945
1946        valid_media_options = { 'slug': 'attachment name',
1947                                'contentType': 'application/text' }
1948        content_stream = ReadableStream()
1949
1950        # create streamed attachment with valid content-type using Upsert API
1951        valid_attachment = self.client.UpsertAttachmentAndUploadMedia(
1952            self.GetDocumentLink(db, collection, document, is_name_based), content_stream, valid_media_options)
1953
1954        # verify id property
1955        self.assertEqual(valid_attachment['id'],
1956                         'attachment name',
1957                         'id of created attachment should be the same')
1958
1959        valid_media_options = { 'slug': 'new attachment name',
1960                                'contentType': 'application/text' }
1961        content_stream = ReadableStream()
1962
1963        # Upsert should create new attachment since since id is different
1964        new_valid_attachment = self.client.UpsertAttachmentAndUploadMedia(
1965            self.GetDocumentLink(db, collection, document, is_name_based), content_stream, valid_media_options)
1966
1967        # verify id property
1968        self.assertEqual(new_valid_attachment['id'],
1969                         'new attachment name',
1970                         'id of new attachment should be the same')
1971
1972        # read all attachments and verify updated count
1973        attachments = list(self.client.ReadAttachments(self.GetDocumentLink(db, collection, document, is_name_based)))
1974        self.assertEqual(len(attachments),
1975                         initial_count + 2,
1976                         'number of attachments should have increased by 2')
1977
1978        # create attachment with media link
1979        attachment_definition = {
1980            'id': 'dynamic attachment',
1981            'media': 'http://xstore.',
1982            'MediaType': 'Book',
1983            'Author':'My Book Author',
1984            'Title':'My Book Title',
1985            'contentType':'application/text'
1986        }
1987
1988        # create dynamic attachment using Upsert API
1989        dynamic_attachment = self.client.UpsertAttachment(self.GetDocumentLink(db, collection, document, is_name_based),
1990                                             attachment_definition)
1991
1992        # verify id property
1993        self.assertEqual(dynamic_attachment['id'],
1994                         attachment_definition['id'],
1995                         'id of attachment should be the same')
1996
1997        # read all attachments and verify updated count
1998        attachments = list(self.client.ReadAttachments(self.GetDocumentLink(db, collection, document, is_name_based)))
1999        self.assertEqual(len(attachments),
2000                         initial_count + 3,
2001                         'number of attachments should have increased by 3')
2002
2003        dynamic_attachment['Author'] = 'new author'
2004
2005        # replace the attachment using Upsert API
2006        upserted_attachment = self.client.UpsertAttachment(self.GetDocumentLink(db, collection, document, is_name_based), dynamic_attachment)
2007
2008        # verify id property remains same
2009        self.assertEqual(dynamic_attachment['id'],
2010                         upserted_attachment['id'],
2011                         'id should stay the same')
2012
2013        # verify author property gets updated
2014        self.assertEqual(upserted_attachment['Author'],
2015                         dynamic_attachment['Author'],
2016                         'invalid property value')
2017
2018        # read all attachments and verify count doesn't increases again
2019        attachments = list(self.client.ReadAttachments(self.GetDocumentLink(db, collection, document, is_name_based)))
2020        self.assertEqual(len(attachments),
2021                         initial_count + 3,
2022                         'number of attachments should remain same')
2023
2024        dynamic_attachment['id'] = 'new dynamic attachment'
2025
2026        # Upsert should create new attachment since id is different
2027        new_attachment = self.client.UpsertAttachment(self.GetDocumentLink(db, collection, document, is_name_based), dynamic_attachment)
2028
2029        # verify id property remains same
2030        self.assertEqual(dynamic_attachment['id'],
2031                         new_attachment['id'],
2032                         'id should be same')
2033
2034        # read all attachments and verify count increases
2035        attachments = list(self.client.ReadAttachments(self.GetDocumentLink(db, collection, document, is_name_based)))
2036        self.assertEqual(len(attachments),
2037                         initial_count + 4,
2038                         'number of attachments should have increased')
2039
2040        # deleting attachments
2041        self.client.DeleteAttachment(self.GetAttachmentLink(db, collection, document, valid_attachment, is_name_based))
2042        self.client.DeleteAttachment(self.GetAttachmentLink(db, collection, document, new_valid_attachment, is_name_based))
2043        self.client.DeleteAttachment(self.GetAttachmentLink(db, collection, document, upserted_attachment, is_name_based))
2044        self.client.DeleteAttachment(self.GetAttachmentLink(db, collection, document, new_attachment, is_name_based))
2045
2046        # wait to ensure deletes are propagated for multimaster enabled accounts
2047        if self.configs.IS_MULTIMASTER_ENABLED:
2048            time.sleep(2)
2049
2050        # read all attachments and verify count remains same
2051        attachments = list(self.client.ReadAttachments(self.GetDocumentLink(db, collection, document, is_name_based)))
2052        self.assertEqual(len(attachments),
2053                         initial_count,
2054                         'number of attachments should remain the same')
2055
2056
2057    def test_user_crud_self_link(self):
2058        self._test_user_crud(False)
2059
2060    def test_user_crud_name_based(self):
2061        self._test_user_crud(True)
2062
2063    def _test_user_crud(self, is_name_based):
2064        # Should do User CRUD operations successfully.
2065        # create database
2066        db = self.databseForTest
2067        # list users
2068        users = list(self.client.ReadUsers(self.GetDatabaseLink(db, is_name_based)))
2069        before_create_count = len(users)
2070        # create user
2071        user_id = 'new user' + str(uuid.uuid4())
2072        user = self.client.CreateUser(self.GetDatabaseLink(db, is_name_based), { 'id': user_id })
2073        self.assertEqual(user['id'], user_id, 'user id error')
2074        # list users after creation
2075        users = list(self.client.ReadUsers(self.GetDatabaseLink(db, is_name_based)))
2076        self.assertEqual(len(users), before_create_count + 1)
2077        # query users
2078        results = list(self.client.QueryUsers(
2079            self.GetDatabaseLink(db, is_name_based),
2080            {
2081                'query': 'SELECT * FROM root r WHERE r.id=@id',
2082                'parameters': [
2083                    { 'name':'@id', 'value':user_id }
2084                ]
2085            }))
2086        self.assertTrue(results)
2087
2088        # replace user
2089        change_user = user.copy()
2090        replaced_user_id = 'replaced user' + str(uuid.uuid4())
2091        user['id'] = replaced_user_id
2092        replaced_user = self.client.ReplaceUser(self.GetUserLink(db, change_user, is_name_based), user)
2093        self.assertEqual(replaced_user['id'],
2094                         replaced_user_id,
2095                         'user id should change')
2096        self.assertEqual(user['id'],
2097                         replaced_user['id'],
2098                         'user id should stay the same')
2099        # read user
2100        user = self.client.ReadUser(self.GetUserLink(db, replaced_user, is_name_based))
2101        self.assertEqual(replaced_user['id'], user['id'])
2102        # delete user
2103        self.client.DeleteUser(self.GetUserLink(db, user, is_name_based))
2104        # read user after deletion
2105        self.__AssertHTTPFailureWithStatus(StatusCodes.NOT_FOUND,
2106                                           self.client.ReadUser,
2107                                           self.GetUserLink(db, user, is_name_based))
2108
2109
2110    # Upsert test for User resource - selflink version
2111    def test_user_upsert_self_link(self):
2112        self._test_user_upsert(False)
2113
2114    # Upsert test for User resource - named based routing version
2115    def test_user_upsert_name_based(self):
2116        self._test_user_upsert(True)
2117
2118    def _test_user_upsert(self, is_name_based):
2119        # create database
2120        db = self.databseForTest
2121
2122        # read users and check count
2123        users = list(self.client.ReadUsers(self.GetDatabaseLink(db, is_name_based)))
2124        before_create_count = len(users)
2125
2126        # create user using Upsert API
2127        user_id = 'user' + str(uuid.uuid4())
2128        user = self.client.UpsertUser(self.GetDatabaseLink(db, is_name_based), { 'id': user_id })
2129
2130        # verify id property
2131        self.assertEqual(user['id'], user_id, 'user id error')
2132
2133        # read users after creation and verify updated count
2134        users = list(self.client.ReadUsers(self.GetDatabaseLink(db, is_name_based)))
2135        self.assertEqual(len(users), before_create_count + 1)
2136
2137        # Should replace the user since it already exists, there is no public property to change here
2138        upserted_user = self.client.UpsertUser(self.GetDatabaseLink(db, is_name_based), user)
2139
2140        # verify id property
2141        self.assertEqual(upserted_user['id'],
2142                         user['id'],
2143                         'user id should remain same')
2144
2145        # read users after upsert and verify count doesn't increases again
2146        users = list(self.client.ReadUsers(self.GetDatabaseLink(db, is_name_based)))
2147        self.assertEqual(len(users), before_create_count + 1)
2148
2149        user['id'] = 'new user' + str(uuid.uuid4())
2150
2151        # Upsert should create new user since id is different
2152        new_user = self.client.UpsertUser(self.GetDatabaseLink(db, is_name_based), user)
2153
2154        # verify id property
2155        self.assertEqual(new_user['id'], user['id'], 'user id error')
2156
2157        # read users after upsert and verify count increases
2158        users = list(self.client.ReadUsers(self.GetDatabaseLink(db, is_name_based)))
2159        self.assertEqual(len(users), before_create_count + 2)
2160
2161        # delete users
2162        self.client.DeleteUser(self.GetUserLink(db, upserted_user, is_name_based))
2163        self.client.DeleteUser(self.GetUserLink(db, new_user, is_name_based))
2164
2165        # read users after delete and verify count remains the same
2166        users = list(self.client.ReadUsers(self.GetDatabaseLink(db, is_name_based)))
2167        self.assertEqual(len(users), before_create_count)
2168
2169
2170    def test_permission_crud_self_link(self):
2171        self._test_permission_crud(False)
2172
2173    def test_permission_crud_name_based(self):
2174        self._test_permission_crud(True)
2175
2176    def _test_permission_crud(self, is_name_based):
2177        # Should do Permission CRUD operations successfully
2178        # create database
2179        db = self.databseForTest
2180        # create user
2181        user = self.client.CreateUser(self.GetDatabaseLink(db, is_name_based), { 'id': 'new user' + str(uuid.uuid4())})
2182        # list permissions
2183        permissions = list(self.client.ReadPermissions(self.GetUserLink(db, user, is_name_based)))
2184        before_create_count = len(permissions)
2185        permission = {
2186            'id': 'new permission',
2187            'permissionMode': documents.PermissionMode.Read,
2188            'resource': 'dbs/AQAAAA==/colls/AQAAAJ0fgTc='  # A random one.
2189        }
2190        # create permission
2191        permission = self.client.CreatePermission(self.GetUserLink(db, user, is_name_based), permission)
2192        self.assertEqual(permission['id'],
2193                         'new permission',
2194                         'permission id error')
2195        # list permissions after creation
2196        permissions = list(self.client.ReadPermissions(self.GetUserLink(db, user, is_name_based)))
2197        self.assertEqual(len(permissions), before_create_count + 1)
2198        # query permissions
2199        results = list(self.client.QueryPermissions(
2200            self.GetUserLink(db, user, is_name_based),
2201            {
2202                'query': 'SELECT * FROM root r WHERE r.id=@id',
2203                'parameters': [
2204                    { 'name':'@id', 'value':permission['id'] }
2205                ]
2206            }))
2207        self.assert_(results)
2208
2209        # replace permission
2210        change_permission = permission.copy()
2211        permission['id'] = 'replaced permission'
2212        replaced_permission = self.client.ReplacePermission(self.GetPermissionLink(db, user, change_permission, is_name_based),
2213                                                       permission)
2214        self.assertEqual(replaced_permission['id'],
2215                         'replaced permission',
2216                         'permission id should change')
2217        self.assertEqual(permission['id'],
2218                         replaced_permission['id'],
2219                         'permission id should stay the same')
2220        # read permission
2221        permission = self.client.ReadPermission(self.GetPermissionLink(db, user, replaced_permission, is_name_based))
2222        self.assertEqual(replaced_permission['id'], permission['id'])
2223        # delete permission
2224        self.client.DeletePermission(self.GetPermissionLink(db, user, replaced_permission, is_name_based))
2225        # read permission after deletion
2226        self.__AssertHTTPFailureWithStatus(StatusCodes.NOT_FOUND,
2227                                           self.client.ReadPermission,
2228                                           self.GetPermissionLink(db, user, permission, is_name_based))
2229
2230    # Upsert test for Permission resource - selflink version
2231    def test_permission_upsert_self_link(self):
2232        self._test_permission_upsert(False)
2233
2234    # Upsert test for Permission resource - name based routing version
2235    def test_permission_upsert_name_based(self):
2236        self._test_permission_upsert(True)
2237
2238    def _test_permission_upsert(self, is_name_based):
2239        # create database
2240        db = self.databseForTest
2241
2242        # create user
2243        user = self.client.CreateUser(self.GetDatabaseLink(db, is_name_based), { 'id': 'new user' + str(uuid.uuid4())})
2244
2245        # read permissions and check count
2246        permissions = list(self.client.ReadPermissions(self.GetUserLink(db, user, is_name_based)))
2247        before_create_count = len(permissions)
2248
2249        permission_definition = {
2250            'id': 'permission',
2251            'permissionMode': documents.PermissionMode.Read,
2252            'resource': 'dbs/AQAAAA==/colls/AQAAAJ0fgTc='  # A random one.
2253        }
2254
2255        # create permission using Upsert API
2256        created_permission = self.client.UpsertPermission(self.GetUserLink(db, user, is_name_based), permission_definition)
2257
2258        # verify id property
2259        self.assertEqual(created_permission['id'],
2260                         permission_definition['id'],
2261                         'permission id error')
2262
2263        # read permissions after creation and verify updated count
2264        permissions = list(self.client.ReadPermissions(self.GetUserLink(db, user, is_name_based)))
2265        self.assertEqual(len(permissions), before_create_count + 1)
2266
2267        # update permission mode
2268        permission_definition['permissionMode'] = documents.PermissionMode.All
2269
2270        # should repace the permission since it already exists
2271        upserted_permission = self.client.UpsertPermission(self.GetUserLink(db, user, is_name_based),
2272                                                       permission_definition)
2273        # verify id property
2274        self.assertEqual(upserted_permission['id'],
2275                         created_permission['id'],
2276                         'permission id should remain same')
2277
2278        # verify changed property
2279        self.assertEqual(upserted_permission['permissionMode'],
2280                         permission_definition['permissionMode'],
2281                         'permissionMode should change')
2282
2283        # read permissions and verify count doesn't increases again
2284        permissions = list(self.client.ReadPermissions(self.GetUserLink(db, user, is_name_based)))
2285        self.assertEqual(len(permissions), before_create_count + 1)
2286
2287        # update permission id
2288        created_permission['id'] = 'new permission'
2289        # resource needs to be changed along with the id in order to create a new permission
2290        created_permission['resource'] = 'dbs/N9EdAA==/colls/N9EdAIugXgA='
2291
2292        # should create new permission since id has changed
2293        new_permission = self.client.UpsertPermission(self.GetUserLink(db, user, is_name_based),
2294                                                       created_permission)
2295        # verify id and resource property
2296        self.assertEqual(new_permission['id'],
2297                         created_permission['id'],
2298                         'permission id should be same')
2299
2300        self.assertEqual(new_permission['resource'],
2301                         created_permission['resource'],
2302                         'permission resource should be same')
2303
2304        # read permissions and verify count increases
2305        permissions = list(self.client.ReadPermissions(self.GetUserLink(db, user, is_name_based)))
2306        self.assertEqual(len(permissions), before_create_count + 2)
2307
2308        # delete permissions
2309        self.client.DeletePermission(self.GetPermissionLink(db, user, upserted_permission, is_name_based))
2310        self.client.DeletePermission(self.GetPermissionLink(db, user, new_permission, is_name_based))
2311
2312        # read permissions and verify count remains the same
2313        permissions = list(self.client.ReadPermissions(self.GetUserLink(db, user, is_name_based)))
2314        self.assertEqual(len(permissions), before_create_count)
2315
2316    def test_authorization(self):
2317        def __SetupEntities(client):
2318            """
2319            Sets up entities for this test.
2320
2321            :Parameters:
2322                - `client`: cosmos_client.CosmosClient
2323
2324            :Returns:
2325                dict
2326
2327            """
2328            # create database
2329            db = self.databseForTest
2330            # create collection1
2331            collection1 = client.CreateContainer(
2332                db['_self'], { 'id': 'test_authorization ' + str(uuid.uuid4()) })
2333            # create document1
2334            document1 = client.CreateItem(collection1['_self'],
2335                                              { 'id': 'coll1doc1',
2336                                                'spam': 'eggs',
2337                                                'key': 'value' })
2338            # create document 2
2339            document2 = client.CreateItem(
2340                collection1['_self'],
2341                { 'id': 'coll1doc2', 'spam': 'eggs2', 'key': 'value2' })
2342            # create attachment
2343            dynamic_attachment = {
2344                'id': 'dynamic attachment',
2345                'media': 'http://xstore.',
2346                'MediaType': 'Book',
2347                'Author': 'My Book Author',
2348                'Title': 'My Book Title',
2349                'contentType': 'application/text'
2350            }
2351            attachment = client.CreateAttachment(document1['_self'],
2352                                                 dynamic_attachment)
2353            # create collection 2
2354            collection2 = client.CreateContainer(
2355                db['_self'],
2356                { 'id': 'test_authorization2 ' + str(uuid.uuid4()) })
2357            # create user1
2358            user1 = client.CreateUser(db['_self'], { 'id': 'user1' })
2359            permission = {
2360                'id': 'permission On Coll1',
2361                'permissionMode': documents.PermissionMode.Read,
2362                'resource': collection1['_self']
2363            }
2364            # create permission for collection1
2365            permission_on_coll1 = client.CreatePermission(user1['_self'],
2366                                                          permission)
2367            self.assertTrue(permission_on_coll1['_token'] != None,
2368                            'permission token is invalid')
2369            permission = {
2370                'id': 'permission On Doc1',
2371                'permissionMode': documents.PermissionMode.All,
2372                'resource': document2['_self']
2373            }
2374            # create permission for document 2
2375            permission_on_doc2 = client.CreatePermission(user1['_self'],
2376                                                         permission)
2377            self.assertTrue(permission_on_doc2['_token'] != None,
2378                            'permission token is invalid')
2379            # create user 2
2380            user2 = client.CreateUser(db['_self'], { 'id': 'user2' })
2381            permission = {
2382                'id': 'permission On coll2',
2383                'permissionMode': documents.PermissionMode.All,
2384                'resource': collection2['_self']
2385            }
2386            # create permission on collection 2
2387            permission_on_coll2 = client.CreatePermission(
2388                user2['_self'], permission)
2389            entities = {
2390                'db': db,
2391                'coll1': collection1,
2392                'coll2': collection2,
2393                'doc1': document1,
2394                'doc2': document2,
2395                'user1': user1,
2396                'user2': user2,
2397                'attachment': attachment,
2398                'permissionOnColl1': permission_on_coll1,
2399                'permissionOnDoc2': permission_on_doc2,
2400                'permissionOnColl2': permission_on_coll2
2401            }
2402            return entities
2403
2404        # Client without any authorization will fail.
2405        client = cosmos_client.CosmosClient(CRUDTests.host, {}, CRUDTests.connectionPolicy)
2406        self.__AssertHTTPFailureWithStatus(StatusCodes.UNAUTHORIZED,
2407                                           list,
2408                                           client.ReadDatabases())
2409        # Client with master key.
2410        client = cosmos_client.CosmosClient(CRUDTests.host,
2411                                                {'masterKey': CRUDTests.masterKey}, CRUDTests.connectionPolicy)
2412        # setup entities
2413        entities = __SetupEntities(client)
2414        resource_tokens = {}
2415        resource_tokens[entities['coll1']['_rid']] = (
2416            entities['permissionOnColl1']['_token'])
2417        resource_tokens[entities['doc1']['_rid']] = (
2418            entities['permissionOnColl1']['_token'])
2419        col1_client = cosmos_client.CosmosClient(
2420            CRUDTests.host, {'resourceTokens': resource_tokens}, CRUDTests.connectionPolicy)
2421        # 1. Success-- Use Col1 Permission to Read
2422        success_coll1 = col1_client.ReadContainer(
2423            entities['coll1']['_self'])
2424        # 2. Failure-- Use Col1 Permission to delete
2425        self.__AssertHTTPFailureWithStatus(StatusCodes.FORBIDDEN,
2426                                           col1_client.DeleteContainer,
2427                                           success_coll1['_self'])
2428        # 3. Success-- Use Col1 Permission to Read All Docs
2429        success_documents = list(col1_client.ReadItems(
2430            success_coll1['_self']))
2431        self.assertTrue(success_documents != None,
2432                        'error reading documents')
2433        self.assertEqual(len(success_documents),
2434                         2,
2435                         'Expected 2 Documents to be succesfully read')
2436        # 4. Success-- Use Col1 Permission to Read Col1Doc1
2437        success_doc = col1_client.ReadItem(entities['doc1']['_self'])
2438        self.assertTrue(success_doc != None, 'error reading document')
2439        self.assertEqual(
2440            success_doc['id'],
2441            entities['doc1']['id'],
2442            'Expected to read children using parent permissions')
2443        col2_client = cosmos_client.CosmosClient(
2444            CRUDTests.host,
2445            { 'permissionFeed': [ entities['permissionOnColl2'] ] }, CRUDTests.connectionPolicy)
2446        doc = {
2447            'CustomProperty1': 'BBBBBB',
2448            'customProperty2': 1000,
2449            'id': entities['doc2']['id']
2450        }
2451        success_doc = col2_client.CreateItem(
2452            entities['coll2']['_self'], doc)
2453        self.assertTrue(success_doc != None, 'error creating document')
2454        self.assertEqual(success_doc['CustomProperty1'],
2455                         doc['CustomProperty1'],
2456                         'document should have been created successfully')
2457
2458        self.client.DeleteContainer(entities['coll1']['_self'])
2459        self.client.DeleteContainer(entities['coll2']['_self'])
2460
2461    def test_trigger_crud_self_link(self):
2462        self._test_trigger_crud(False)
2463
2464    def test_trigger_crud_name_based(self):
2465        self._test_trigger_crud(True)
2466
2467    def _test_trigger_crud(self, is_name_based):
2468        # create database
2469        db = self.databseForTest
2470        # create collection
2471        collection = self.configs.create_single_partition_collection_if_not_exist(self.client)
2472        # read triggers
2473        triggers = list(self.client.ReadTriggers(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2474        # create a trigger
2475        before_create_triggers_count = len(triggers)
2476        trigger_definition = {
2477            'id': 'sample trigger',
2478            'serverScript': 'function() {var x = 10;}',
2479            'triggerType': documents.TriggerType.Pre,
2480            'triggerOperation': documents.TriggerOperation.All
2481        }
2482        trigger = self.client.CreateTrigger(self.GetDocumentCollectionLink(db, collection, is_name_based),
2483                                       trigger_definition)
2484        for property in trigger_definition:
2485            if property != "serverScript":
2486                self.assertEqual(
2487                    trigger[property],
2488                    trigger_definition[property],
2489                    'property {property} should match'.format(property=property))
2490            else:
2491                    self.assertEqual(trigger['body'],
2492                                     'function() {var x = 10;}')
2493
2494        # read triggers after creation
2495        triggers = list(self.client.ReadTriggers(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2496        self.assertEqual(len(triggers),
2497                         before_create_triggers_count + 1,
2498                         'create should increase the number of triggers')
2499        # query triggers
2500        triggers = list(self.client.QueryTriggers(
2501            self.GetDocumentCollectionLink(db, collection, is_name_based),
2502            {
2503                'query': 'SELECT * FROM root r WHERE r.id=@id',
2504                'parameters': [
2505                    { 'name': '@id', 'value': trigger_definition['id']}
2506                ]
2507            }))
2508        self.assert_(triggers)
2509
2510        # replace trigger
2511        change_trigger = trigger.copy()
2512        trigger['body'] = 'function() {var x = 20;}'
2513        replaced_trigger = self.client.ReplaceTrigger(self.GetTriggerLink(db, collection, change_trigger, is_name_based), trigger)
2514        for property in trigger_definition:
2515            if property != "serverScript":
2516                self.assertEqual(
2517                    replaced_trigger[property],
2518                    trigger[property],
2519                    'property {property} should match'.format(property=property))
2520            else:
2521                self.assertEqual(replaced_trigger['body'],
2522                                 'function() {var x = 20;}')
2523
2524        # read trigger
2525        trigger = self.client.ReadTrigger(self.GetTriggerLink(db, collection, replaced_trigger, is_name_based))
2526        self.assertEqual(replaced_trigger['id'], trigger['id'])
2527        # delete trigger
2528        self.client.DeleteTrigger(self.GetTriggerLink(db, collection, replaced_trigger, is_name_based))
2529        # read triggers after deletion
2530        self.__AssertHTTPFailureWithStatus(StatusCodes.NOT_FOUND,
2531                                           self.client.ReadTrigger,
2532                                           self.GetTriggerLink(db, collection, replaced_trigger, is_name_based))
2533
2534    # Upsert test for Trigger resource - selflink version
2535    def test_trigger_upsert_self_link(self):
2536        self._test_trigger_upsert(False)
2537
2538    # Upsert test for Trigger resource - name based routing version
2539    def test_trigger_upsert_name_based(self):
2540        self._test_trigger_upsert(True)
2541
2542    def _test_trigger_upsert(self, is_name_based):
2543        # create database
2544        db = self.databseForTest
2545
2546        # create collection
2547        collection = self.configs.create_single_partition_collection_if_not_exist(self.client)
2548
2549        # read triggers and check count
2550        triggers = list(self.client.ReadTriggers(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2551        before_create_triggers_count = len(triggers)
2552
2553        # create a trigger
2554        trigger_definition = {
2555            'id': 'sample trigger',
2556            'serverScript': 'function() {var x = 10;}',
2557            'triggerType': documents.TriggerType.Pre,
2558            'triggerOperation': documents.TriggerOperation.All
2559        }
2560
2561        # create trigger using Upsert API
2562        created_trigger = self.client.UpsertTrigger(self.GetDocumentCollectionLink(db, collection, is_name_based),
2563                                       trigger_definition)
2564
2565        # verify id property
2566        self.assertEqual(created_trigger['id'],
2567                         trigger_definition['id'])
2568
2569        # read triggers after creation and verify updated count
2570        triggers = list(self.client.ReadTriggers(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2571        self.assertEqual(len(triggers),
2572                         before_create_triggers_count + 1,
2573                         'create should increase the number of triggers')
2574
2575        # update trigger
2576        created_trigger['body'] = 'function() {var x = 20;}'
2577
2578        # should replace trigger since it already exists
2579        upserted_trigger = self.client.UpsertTrigger(self.GetDocumentCollectionLink(db, collection, is_name_based), created_trigger)
2580
2581        # verify id property
2582        self.assertEqual(created_trigger['id'],
2583                         upserted_trigger['id'])
2584
2585        # verify changed properties
2586        self.assertEqual(upserted_trigger['body'],
2587                                 created_trigger['body'])
2588
2589        # read triggers after upsert and verify count remains same
2590        triggers = list(self.client.ReadTriggers(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2591        self.assertEqual(len(triggers),
2592                         before_create_triggers_count + 1,
2593                         'upsert should keep the number of triggers same')
2594
2595        # update trigger
2596        created_trigger['id'] = 'new trigger'
2597
2598        # should create new trigger since id is changed
2599        new_trigger = self.client.UpsertTrigger(self.GetDocumentCollectionLink(db, collection, is_name_based), created_trigger)
2600
2601        # verify id property
2602        self.assertEqual(created_trigger['id'],
2603                         new_trigger['id'])
2604
2605        # read triggers after upsert and verify count increases
2606        triggers = list(self.client.ReadTriggers(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2607        self.assertEqual(len(triggers),
2608                         before_create_triggers_count + 2,
2609                         'upsert should increase the number of triggers')
2610
2611        # delete triggers
2612        self.client.DeleteTrigger(self.GetTriggerLink(db, collection, upserted_trigger, is_name_based))
2613        self.client.DeleteTrigger(self.GetTriggerLink(db, collection, new_trigger, is_name_based))
2614
2615        # read triggers after delete and verify count remains the same
2616        triggers = list(self.client.ReadTriggers(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2617        self.assertEqual(len(triggers),
2618                         before_create_triggers_count,
2619                         'delete should bring the number of triggers to original')
2620
2621
2622    def test_udf_crud_self_link(self):
2623        self._test_udf_crud(False)
2624
2625    def test_udf_crud_name_based(self):
2626        self._test_udf_crud(True)
2627
2628    def _test_udf_crud(self, is_name_based):
2629        # create database
2630        db = self.databseForTest
2631        # create collection
2632        collection = self.configs.create_single_partition_collection_if_not_exist(self.client)
2633        # read udfs
2634        udfs = list(self.client.ReadUserDefinedFunctions(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2635        # create a udf
2636        before_create_udfs_count = len(udfs)
2637        udf_definition = {
2638            'id': 'sample udf',
2639            'body': 'function() {var x = 10;}'
2640        }
2641        udf = self.client.CreateUserDefinedFunction(self.GetDocumentCollectionLink(db, collection, is_name_based),
2642                                               udf_definition)
2643        for property in udf_definition:
2644                self.assertEqual(
2645                    udf[property],
2646                    udf_definition[property],
2647                    'property {property} should match'.format(property=property))
2648
2649        # read udfs after creation
2650        udfs = list(self.client.ReadUserDefinedFunctions(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2651        self.assertEqual(len(udfs),
2652                         before_create_udfs_count + 1,
2653                         'create should increase the number of udfs')
2654        # query udfs
2655        results = list(self.client.QueryUserDefinedFunctions(
2656            self.GetDocumentCollectionLink(db, collection, is_name_based),
2657            {
2658                'query': 'SELECT * FROM root r WHERE r.id=@id',
2659                'parameters': [
2660                    {'name':'@id', 'value':udf_definition['id']}
2661                ]
2662            }))
2663        self.assert_(results)
2664        # replace udf
2665        change_udf = udf.copy()
2666        udf['body'] = 'function() {var x = 20;}'
2667        replaced_udf = self.client.ReplaceUserDefinedFunction(self.GetUserDefinedFunctionLink(db, collection, change_udf, is_name_based), udf)
2668        for property in udf_definition:
2669                self.assertEqual(
2670                    replaced_udf[property],
2671                    udf[property],
2672                    'property {property} should match'.format(property=property))
2673        # read udf
2674        udf = self.client.ReadUserDefinedFunction(self.GetUserDefinedFunctionLink(db, collection, replaced_udf, is_name_based))
2675        self.assertEqual(replaced_udf['id'], udf['id'])
2676        # delete udf
2677        self.client.DeleteUserDefinedFunction(self.GetUserDefinedFunctionLink(db, collection, replaced_udf, is_name_based))
2678        # read udfs after deletion
2679        self.__AssertHTTPFailureWithStatus(StatusCodes.NOT_FOUND,
2680                                           self.client.ReadUserDefinedFunction,
2681                                           self.GetUserDefinedFunctionLink(db, collection, replaced_udf, is_name_based))
2682
2683
2684    # Upsert test for User Defined Function resource - selflink version
2685    def test_udf_upsert_self_link(self):
2686        self._test_udf_upsert(False)
2687
2688    # Upsert test for User Defined Function resource - name based routing version
2689    def test_udf_upsert_name_based(self):
2690        self._test_udf_upsert(True)
2691
2692    def _test_udf_upsert(self, is_name_based):
2693        # create database
2694        db = self.databseForTest
2695
2696        # create collection
2697        collection = self.configs.create_single_partition_collection_if_not_exist(self.client)
2698
2699        # read udfs and check count
2700        udfs = list(self.client.ReadUserDefinedFunctions(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2701        before_create_udfs_count = len(udfs)
2702
2703        # create a udf definition
2704        udf_definition = {
2705            'id': 'sample udf',
2706            'body': 'function() {var x = 10;}'
2707        }
2708
2709        # create udf using Upsert API
2710        created_udf = self.client.UpsertUserDefinedFunction(self.GetDocumentCollectionLink(db, collection, is_name_based),
2711                                               udf_definition)
2712
2713        # verify id property
2714        self.assertEqual(created_udf['id'],
2715                         udf_definition['id'])
2716
2717        # read udfs after creation and verify updated count
2718        udfs = list(self.client.ReadUserDefinedFunctions(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2719        self.assertEqual(len(udfs),
2720                         before_create_udfs_count + 1,
2721                         'create should increase the number of udfs')
2722
2723        # update udf
2724        created_udf['body'] = 'function() {var x = 20;}'
2725
2726        # should replace udf since it already exists
2727        upserted_udf = self.client.UpsertUserDefinedFunction(self.GetDocumentCollectionLink(db, collection, is_name_based), created_udf)
2728
2729        # verify id property
2730        self.assertEqual(created_udf['id'],
2731                         upserted_udf['id'])
2732
2733        # verify changed property
2734        self.assertEqual(upserted_udf['body'],
2735                                 created_udf['body'])
2736
2737        # read udf and verify count doesn't increases again
2738        udfs = list(self.client.ReadUserDefinedFunctions(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2739        self.assertEqual(len(udfs),
2740                         before_create_udfs_count + 1,
2741                         'upsert should keep the number of udfs same')
2742
2743        created_udf['id'] = 'new udf'
2744
2745        # should create new udf since the id is different
2746        new_udf = self.client.UpsertUserDefinedFunction(self.GetDocumentCollectionLink(db, collection, is_name_based), created_udf)
2747
2748        # verify id property
2749        self.assertEqual(created_udf['id'],
2750                         new_udf['id'])
2751
2752        # read udf and verify count increases
2753        udfs = list(self.client.ReadUserDefinedFunctions(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2754        self.assertEqual(len(udfs),
2755                         before_create_udfs_count + 2,
2756                         'upsert should keep the number of udfs same')
2757
2758        # delete udfs
2759        self.client.DeleteUserDefinedFunction(self.GetUserDefinedFunctionLink(db, collection, upserted_udf, is_name_based))
2760        self.client.DeleteUserDefinedFunction(self.GetUserDefinedFunctionLink(db, collection, new_udf, is_name_based))
2761
2762        # read udf and verify count remains the same
2763        udfs = list(self.client.ReadUserDefinedFunctions(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2764        self.assertEqual(len(udfs),
2765                         before_create_udfs_count,
2766                         'delete should keep the number of udfs same')
2767
2768
2769    def test_sproc_crud_self_link(self):
2770        self._test_sproc_crud(False)
2771
2772    def test_sproc_crud_name_based(self):
2773        self._test_sproc_crud(True)
2774
2775    def _test_sproc_crud(self, is_name_based):
2776        # create database
2777        db = self.databseForTest
2778        # create collection
2779        collection = self.configs.create_single_partition_collection_if_not_exist(self.client)
2780        # read sprocs
2781        sprocs = list(self.client.ReadStoredProcedures(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2782        # create a sproc
2783        before_create_sprocs_count = len(sprocs)
2784        sproc_definition = {
2785            'id': 'sample sproc',
2786            'serverScript': 'function() {var x = 10;}'
2787        }
2788        sproc = self.client.CreateStoredProcedure(self.GetDocumentCollectionLink(db, collection, is_name_based),
2789                                             sproc_definition)
2790        for property in sproc_definition:
2791            if property != "serverScript":
2792                self.assertEqual(
2793                    sproc[property],
2794                    sproc_definition[property],
2795                    'property {property} should match'.format(property=property))
2796            else:
2797                self.assertEqual(sproc['body'], 'function() {var x = 10;}')
2798
2799        # read sprocs after creation
2800        sprocs = list(self.client.ReadStoredProcedures(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2801        self.assertEqual(len(sprocs),
2802                         before_create_sprocs_count + 1,
2803                         'create should increase the number of sprocs')
2804        # query sprocs
2805        sprocs = list(self.client.QueryStoredProcedures(
2806            self.GetDocumentCollectionLink(db, collection, is_name_based),
2807            {
2808                'query': 'SELECT * FROM root r WHERE r.id=@id',
2809                'parameters':[
2810                    { 'name':'@id', 'value':sproc_definition['id'] }
2811                ]
2812            }))
2813        self.assert_(sprocs)
2814        # replace sproc
2815        change_sproc = sproc.copy()
2816        sproc['body'] = 'function() {var x = 20;}'
2817        replaced_sproc = self.client.ReplaceStoredProcedure(self.GetStoredProcedureLink(db, collection, change_sproc, is_name_based),
2818                                                       sproc)
2819        for property in sproc_definition:
2820            if property != 'serverScript':
2821                self.assertEqual(
2822                    replaced_sproc[property],
2823                    sproc[property],
2824                    'property {property} should match'.format(property=property))
2825            else:
2826                self.assertEqual(replaced_sproc['body'],
2827                                 "function() {var x = 20;}")
2828        # read sproc
2829        sproc = self.client.ReadStoredProcedure(self.GetStoredProcedureLink(db, collection, replaced_sproc, is_name_based))
2830        self.assertEqual(replaced_sproc['id'], sproc['id'])
2831        # delete sproc
2832        self.client.DeleteStoredProcedure(self.GetStoredProcedureLink(db, collection, replaced_sproc, is_name_based))
2833        # read sprocs after deletion
2834        self.__AssertHTTPFailureWithStatus(StatusCodes.NOT_FOUND,
2835                                           self.client.ReadStoredProcedure,
2836                                           self.GetStoredProcedureLink(db, collection, replaced_sproc, is_name_based))
2837
2838    # Upsert test for sproc resource - selflink version
2839    def test_sproc_upsert_self_link(self):
2840        self._test_sproc_upsert(False)
2841
2842    # Upsert test for sproc resource - name based routing version
2843    def test_sproc_upsert_name_based(self):
2844        self._test_sproc_upsert(True)
2845
2846    def _test_sproc_upsert(self, is_name_based):
2847        # create database
2848        db = self.databseForTest
2849
2850        # create collection
2851        collection = self.configs.create_single_partition_collection_if_not_exist(self.client)
2852
2853        # read sprocs and check count
2854        sprocs = list(self.client.ReadStoredProcedures(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2855        before_create_sprocs_count = len(sprocs)
2856
2857        # create a sproc definition
2858        sproc_definition = {
2859            'id': 'sample sproc',
2860            'serverScript': 'function() {var x = 10;}'
2861        }
2862
2863        # create sproc using Upsert API
2864        created_sproc = self.client.UpsertStoredProcedure(self.GetDocumentCollectionLink(db, collection, is_name_based),
2865                                             sproc_definition)
2866
2867        # verify id property
2868        self.assertEqual(created_sproc['id'],
2869                         sproc_definition['id'])
2870
2871        # read sprocs after creation and verify updated count
2872        sprocs = list(self.client.ReadStoredProcedures(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2873        self.assertEqual(len(sprocs),
2874                         before_create_sprocs_count + 1,
2875                         'create should increase the number of sprocs')
2876
2877        # update sproc
2878        created_sproc['body'] = 'function() {var x = 20;}'
2879
2880        # should replace sproc since it already exists
2881        upserted_sproc = self.client.UpsertStoredProcedure(self.GetDocumentCollectionLink(db, collection, is_name_based),
2882                                                       created_sproc)
2883
2884        # verify id property
2885        self.assertEqual(created_sproc['id'],
2886                         upserted_sproc['id'])
2887
2888        # verify changed property
2889        self.assertEqual(upserted_sproc['body'],
2890                                 created_sproc['body'])
2891
2892        # read sprocs after upsert and verify count remains the same
2893        sprocs = list(self.client.ReadStoredProcedures(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2894        self.assertEqual(len(sprocs),
2895                         before_create_sprocs_count + 1,
2896                         'upsert should keep the number of sprocs same')
2897
2898        # update sproc
2899        created_sproc['id'] = 'new sproc'
2900
2901        # should create new sproc since id is different
2902        new_sproc = self.client.UpsertStoredProcedure(self.GetDocumentCollectionLink(db, collection, is_name_based),
2903                                                       created_sproc)
2904
2905        # verify id property
2906        self.assertEqual(created_sproc['id'],
2907                         new_sproc['id'])
2908
2909        # read sprocs after upsert and verify count increases
2910        sprocs = list(self.client.ReadStoredProcedures(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2911        self.assertEqual(len(sprocs),
2912                         before_create_sprocs_count + 2,
2913                         'upsert should keep the number of sprocs same')
2914
2915        # delete sprocs
2916        self.client.DeleteStoredProcedure(self.GetStoredProcedureLink(db, collection, upserted_sproc, is_name_based))
2917        self.client.DeleteStoredProcedure(self.GetStoredProcedureLink(db, collection, new_sproc, is_name_based))
2918
2919        # read sprocs after delete and verify count remains same
2920        sprocs = list(self.client.ReadStoredProcedures(self.GetDocumentCollectionLink(db, collection, is_name_based)))
2921        self.assertEqual(len(sprocs),
2922                         before_create_sprocs_count,
2923                         'delete should keep the number of sprocs same')
2924
2925    def test_scipt_logging_execute_stored_procedure(self):
2926        created_db = self.databseForTest
2927
2928        created_collection = self.configs.create_single_partition_collection_if_not_exist(self.client)
2929
2930        sproc = {
2931            'id': 'storedProcedure' + str(uuid.uuid4()),
2932            'body': (
2933                'function () {' +
2934                '   var mytext = \'x\';' +
2935                '   var myval = 1;' +
2936                '   try {' +
2937                '       console.log(\'The value of %s is %s.\', mytext, myval);' +
2938                '       getContext().getResponse().setBody(\'Success!\');' +
2939                '   }' +
2940                '   catch (err) {' +
2941                '       getContext().getResponse().setBody(\'inline err: [\' + err.number + \'] \' + err);' +
2942                '   }'
2943                '}')
2944            }
2945
2946        created_sproc = self.client.CreateStoredProcedure(self.GetDocumentCollectionLink(created_db, created_collection), sproc)
2947
2948        result = self.client.ExecuteStoredProcedure(self.GetStoredProcedureLink(created_db, created_collection, created_sproc), None)
2949
2950        self.assertEqual(result, 'Success!')
2951        self.assertFalse(HttpHeaders.ScriptLogResults in self.client.last_response_headers)
2952
2953        options = { 'enableScriptLogging': True }
2954        result = self.client.ExecuteStoredProcedure(self.GetStoredProcedureLink(created_db, created_collection, created_sproc), None, options)
2955
2956        self.assertEqual(result, 'Success!')
2957        self.assertEqual(urllib.quote('The value of x is 1.'), self.client.last_response_headers.get(HttpHeaders.ScriptLogResults))
2958
2959        options = { 'enableScriptLogging': False }
2960        result = self.client.ExecuteStoredProcedure(self.GetStoredProcedureLink(created_db, created_collection, created_sproc), None, options)
2961
2962        self.assertEqual(result, 'Success!')
2963        self.assertFalse(HttpHeaders.ScriptLogResults in self.client.last_response_headers)
2964
2965    def test_collection_indexing_policy_self_link(self):
2966        self._test_collection_indexing_policy(False)
2967
2968    def test_collection_indexing_policy_name_based(self):
2969        self._test_collection_indexing_policy(True)
2970
2971    def _test_collection_indexing_policy(self, is_name_based):
2972        # create database
2973        db = self.databseForTest
2974        # create collection
2975        collection = self.client.CreateContainer(
2976            self.GetDatabaseLink(db, is_name_based),
2977            { 'id': 'test_collection_indexing_policy default policy' + str(uuid.uuid4()) })
2978        self.assertEqual(collection['indexingPolicy']['indexingMode'],
2979                         documents.IndexingMode.Consistent,
2980                         'default indexing mode should be consistent')
2981        lazy_collection_definition = {
2982            'id': 'test_collection_indexing_policy lazy collection ' + str(uuid.uuid4()),
2983            'indexingPolicy': {
2984                'indexingMode': documents.IndexingMode.Lazy
2985            }
2986        }
2987        self.client.DeleteContainer(self.GetDocumentCollectionLink(db, collection, is_name_based))
2988        lazy_collection = self.client.CreateContainer(
2989            self.GetDatabaseLink(db, is_name_based),
2990            lazy_collection_definition)
2991        self.assertEqual(lazy_collection['indexingPolicy']['indexingMode'],
2992                         documents.IndexingMode.Lazy,
2993                         'indexing mode should be lazy')
2994
2995        consistent_collection_definition = {
2996            'id': 'test_collection_indexing_policy consistent collection ' + str(uuid.uuid4()),
2997            'indexingPolicy': {
2998                'indexingMode': documents.IndexingMode.Consistent
2999            }
3000        }
3001        self.client.DeleteContainer(self.GetDocumentCollectionLink(db, lazy_collection, is_name_based))
3002        consistent_collection = self.client.CreateContainer(
3003            self.GetDatabaseLink(db, is_name_based), consistent_collection_definition)
3004        self.assertEqual(collection['indexingPolicy']['indexingMode'],
3005                         documents.IndexingMode.Consistent,
3006                         'indexing mode should be consistent')
3007        collection_definition = {
3008            'id': 'CollectionWithIndexingPolicy',
3009            'indexingPolicy': {
3010                'automatic': True,
3011                'indexingMode': documents.IndexingMode.Lazy,
3012                'includedPaths': [
3013                    {
3014                        'path': '/',
3015                        'indexes': [
3016                            {
3017                                'kind': documents.IndexKind.Hash,
3018                                'dataType': documents.DataType.Number,
3019                                'precision': 2
3020                            }
3021                        ]
3022                    }
3023                ],
3024                'excludedPaths': [
3025                    {
3026                        'path': '/"systemMetadata"/*'
3027                    }
3028                ]
3029            }
3030        }
3031        self.client.DeleteContainer(self.GetDocumentCollectionLink(db, consistent_collection, is_name_based))
3032        collection_with_indexing_policy = self.client.CreateContainer(self.GetDatabaseLink(db, is_name_based), collection_definition)
3033        self.assertEqual(1,
3034                         len(collection_with_indexing_policy['indexingPolicy']['includedPaths']),
3035                         'Unexpected includedPaths length')
3036        self.assertEqual(2,
3037                         len(collection_with_indexing_policy['indexingPolicy']['excludedPaths']),
3038                         'Unexpected excluded path count')
3039        self.client.DeleteContainer(self.GetDocumentCollectionLink(db, collection_with_indexing_policy, is_name_based))
3040
3041    def test_create_default_indexing_policy_self_link(self):
3042        self._test_create_default_indexing_policy(False)
3043
3044    def test_create_default_indexing_policy_name_based(self):
3045        self._test_create_default_indexing_policy(True)
3046
3047    def _test_create_default_indexing_policy(self, is_name_based):
3048        # create database
3049        db = self.databseForTest
3050
3051        # no indexing policy specified
3052        collection = self.client.CreateContainer(self.GetDatabaseLink(db, is_name_based),
3053                                                 {'id': 'test_create_default_indexing_policy TestCreateDefaultPolicy01' + str(uuid.uuid4())})
3054        self._check_default_indexing_policy_paths(collection['indexingPolicy'])
3055        self.client.DeleteContainer(collection['_self'])
3056
3057        # partial policy specified
3058        collection = self.client.CreateContainer(
3059            self.GetDatabaseLink(db, is_name_based),
3060            {
3061                'id': 'test_create_default_indexing_policy TestCreateDefaultPolicy02' + str(uuid.uuid4()),
3062                'indexingPolicy': {
3063                    'indexingMode': documents.IndexingMode.Lazy, 'automatic': True
3064                }
3065            })
3066        self._check_default_indexing_policy_paths(collection['indexingPolicy'])
3067        self.client.DeleteContainer(collection['_self'])
3068
3069        # default policy
3070        collection = self.client.CreateContainer(
3071            self.GetDatabaseLink(db, is_name_based),
3072            {
3073                'id': 'test_create_default_indexing_policy TestCreateDefaultPolicy03' + str(uuid.uuid4()),
3074                'indexingPolicy': { }
3075            })
3076        self._check_default_indexing_policy_paths(collection['indexingPolicy'])
3077        self.client.DeleteContainer(collection['_self'])
3078
3079        # missing indexes
3080        collection = self.client.CreateContainer(
3081            self.GetDatabaseLink(db, is_name_based),
3082            {
3083                'id': 'test_create_default_indexing_policy TestCreateDefaultPolicy04' + str(uuid.uuid4()),
3084                'indexingPolicy': {
3085                    'includedPaths': [
3086                        {
3087                            'path': '/*'
3088                        }
3089                    ]
3090                }
3091            })
3092        self._check_default_indexing_policy_paths(collection['indexingPolicy'])
3093        self.client.DeleteContainer(collection['_self'])
3094
3095        # missing precision
3096        collection = self.client.CreateContainer(
3097            self.GetDatabaseLink(db, is_name_based),
3098            {
3099                'id': 'test_create_default_indexing_policy TestCreateDefaultPolicy05' + str(uuid.uuid4()),
3100                'indexingPolicy': {
3101                    'includedPaths': [
3102                        {
3103                            'path': '/*',
3104                            'indexes': [
3105                                {
3106                                    'kind': documents.IndexKind.Hash,
3107                                    'dataType': documents.DataType.String
3108                                },
3109                                {
3110                                    'kind': documents.IndexKind.Range,
3111                                    'dataType': documents.DataType.Number
3112                                }
3113                            ]
3114                        }
3115                    ]
3116                }
3117            })
3118        self._check_default_indexing_policy_paths(collection['indexingPolicy'])
3119        self.client.DeleteContainer(collection['_self'])
3120
3121    def test_create_indexing_policy_with_composite_and_spatial_indexes_self_link(self):
3122        self._test_create_indexing_policy_with_composite_and_spatial_indexes(False)
3123
3124    def test_create_indexing_policy_with_composite_and_spatial_indexes_name_based(self):
3125        self._test_create_indexing_policy_with_composite_and_spatial_indexes(True)
3126
3127    def _test_create_indexing_policy_with_composite_and_spatial_indexes(self, is_name_based):
3128        # create database
3129        db = self.databseForTest
3130
3131        indexing_policy = {
3132            "spatialIndexes": [
3133                {
3134                    "path": "/path0/*",
3135                    "types": [
3136                        "Point",
3137                        "LineString",
3138                        "Polygon"
3139                    ]
3140                },
3141                {
3142                    "path": "/path1/*",
3143                    "types": [
3144                        "LineString",
3145                        "Polygon",
3146                        "MultiPolygon"
3147                    ]
3148                }
3149            ],
3150            "compositeIndexes": [
3151                [
3152                    {
3153                        "path": "/path1",
3154                        "order": "ascending"
3155                    },
3156                    {
3157                        "path": "/path2",
3158                        "order": "descending"
3159                    },
3160                    {
3161                        "path": "/path3",
3162                        "order": "ascending"
3163                    }
3164                ],
3165                [
3166                    {
3167                        "path": "/path4",
3168                        "order": "ascending"
3169                    },
3170                    {
3171                        "path": "/path5",
3172                        "order": "descending"
3173                    },
3174                    {
3175                        "path": "/path6",
3176                        "order": "ascending"
3177                    }
3178                ]
3179            ]
3180        }
3181
3182        container_id = 'composite_index_spatial_index' + str(uuid.uuid4())
3183        container_definition = {'id': container_id, 'indexingPolicy': indexing_policy}
3184        created_container = self.client.CreateContainer(self.GetDatabaseLink(db, is_name_based), container_definition)
3185        read_indexing_policy = created_container['indexingPolicy']
3186
3187        # All types are returned for spatial Indexes
3188        indexing_policy['spatialIndexes'][0]['types'].append('MultiPolygon')
3189        indexing_policy['spatialIndexes'][1]['types'].insert(0, 'Point')
3190
3191        self.assertListEqual(indexing_policy['spatialIndexes'], read_indexing_policy['spatialIndexes'])
3192        self.assertListEqual(indexing_policy['compositeIndexes'], read_indexing_policy['compositeIndexes'])
3193        self.client.DeleteContainer(created_container['_self'])
3194
3195    def _check_default_indexing_policy_paths(self, indexing_policy):
3196        def __get_first(array):
3197            if array:
3198                return array[0]
3199            else:
3200                return None
3201
3202        # '/_etag' is present in excluded paths by default
3203        self.assertEqual(1, len(indexing_policy['excludedPaths']))
3204        # included paths should be 1: '/'.
3205        self.assertEqual(1, len(indexing_policy['includedPaths']))
3206
3207        root_included_path = __get_first([included_path for included_path in indexing_policy['includedPaths']
3208                              if included_path['path'] == '/*'])
3209        self.assertFalse('indexes' in root_included_path)
3210
3211    def test_client_request_timeout(self):
3212        connection_policy = documents.ConnectionPolicy()
3213        # making timeout 0 ms to make sure it will throw
3214        connection_policy.RequestTimeout = 0
3215        with self.assertRaises(Exception):
3216            # client does a getDatabaseAccount on initialization, which will time out
3217            cosmos_client.CosmosClient(CRUDTests.host,
3218                                                {'masterKey': CRUDTests.masterKey},
3219                                                connection_policy)
3220
3221    def test_client_request_timeout_when_connection_retry_configuration_specified(self):
3222        connection_policy = documents.ConnectionPolicy()
3223        # making timeout 0 ms to make sure it will throw
3224        connection_policy.RequestTimeout = 0
3225        connection_policy.ConnectionRetryConfiguration = Retry(
3226                                                            total=3,
3227                                                            read=3,
3228                                                            connect=3,
3229                                                            backoff_factor=0.3,
3230                                                            status_forcelist=(500, 502, 504)
3231                                                        )
3232        with self.assertRaises(Exception):
3233            # client does a getDatabaseAccount on initialization, which will time out
3234            cosmos_client.CosmosClient(CRUDTests.host,
3235                                                {'masterKey': CRUDTests.masterKey},
3236                                                connection_policy)
3237
3238    def test_client_connection_retry_configuration(self):
3239        total_time_for_two_retries = self.initialize_client_with_connection_retry_config(2)
3240        total_time_for_three_retries = self.initialize_client_with_connection_retry_config(3)
3241        self.assertGreater(total_time_for_three_retries, total_time_for_two_retries)
3242
3243    def initialize_client_with_connection_retry_config(self, retries):
3244        connection_policy = documents.ConnectionPolicy()
3245        connection_policy.ConnectionRetryConfiguration = Retry(
3246                                                            total=retries,
3247                                                            read=retries,
3248                                                            connect=retries,
3249                                                            backoff_factor=0.3,
3250                                                            status_forcelist=(500, 502, 504)
3251                                                        )
3252        start_time = time.time()
3253        try:
3254            cosmos_client.CosmosClient("https://localhost:9999",
3255                                                {'masterKey': CRUDTests.masterKey},
3256                                                connection_policy)
3257            self.fail()
3258        except ConnectionError as e:
3259            end_time = time.time()
3260            return end_time - start_time
3261
3262    def test_query_iterable_functionality(self):
3263        def __CreateResources(client):
3264            """Creates resources for this test.
3265
3266            :Parameters:
3267                - `client`: cosmos_client.CosmosClient
3268
3269            :Returns:
3270                dict
3271
3272            """
3273            db = self.databseForTest
3274            collection = self.configs.create_single_partition_collection_if_not_exist(self.client)
3275            doc1 = client.CreateItem(
3276                collection['_self'],
3277                { 'id': 'doc1', 'prop1': 'value1'})
3278            doc2 = client.CreateItem(
3279                collection['_self'],
3280                { 'id': 'doc2', 'prop1': 'value2'})
3281            doc3 = client.CreateItem(
3282                collection['_self'],
3283                { 'id': 'doc3', 'prop1': 'value3'})
3284            resources = {
3285                'coll': collection,
3286                'doc1': doc1,
3287                'doc2': doc2,
3288                'doc3': doc3
3289            }
3290            return resources
3291
3292        # Validate QueryIterable by converting it to a list.
3293        resources = __CreateResources(self.client)
3294        results = self.client.ReadItems(resources['coll']['_self'],
3295                                       {'maxItemCount':2})
3296        docs = list(iter(results))
3297        self.assertEqual(3,
3298                         len(docs),
3299                         'QueryIterable should return all documents' +
3300                         ' using continuation')
3301        self.assertEqual(resources['doc1']['id'], docs[0]['id'])
3302        self.assertEqual(resources['doc2']['id'], docs[1]['id'])
3303        self.assertEqual(resources['doc3']['id'], docs[2]['id'])
3304
3305        # Validate QueryIterable iterator with 'for'.
3306        counter = 0
3307        # test QueryIterable with 'for'.
3308        for doc in iter(results):
3309            counter += 1
3310            if counter == 1:
3311                self.assertEqual(resources['doc1']['id'],
3312                                 doc['id'],
3313                                 'first document should be doc1')
3314            elif counter == 2:
3315                self.assertEqual(resources['doc2']['id'],
3316                                 doc['id'],
3317                                 'second document should be doc2')
3318            elif counter == 3:
3319                self.assertEqual(resources['doc3']['id'],
3320                                 doc['id'],
3321                                 'third document should be doc3')
3322        self.assertEqual(counter, 3)
3323
3324        # Get query results page by page.
3325        results = self.client.ReadItems(resources['coll']['_self'],
3326                                       {'maxItemCount':2})
3327        first_block = results.fetch_next_block()
3328        self.assertEqual(2,
3329                         len(first_block),
3330                         'First block should have 2 entries.')
3331        self.assertEqual(resources['doc1']['id'], first_block[0]['id'])
3332        self.assertEqual(resources['doc2']['id'], first_block[1]['id'])
3333        self.assertEqual(1,
3334                         len(results.fetch_next_block()),
3335                         'Second block should have 1 entry.')
3336        self.assertEqual(0,
3337                         len(results.fetch_next_block()),
3338                         'Then its empty.')
3339
3340    def test_trigger_functionality_self_link(self):
3341        self._test_trigger_functionality(False)
3342
3343    def test_trigger_functionality_name_based(self):
3344        self._test_trigger_functionality(True)
3345
3346    def _test_trigger_functionality(self, is_name_based):
3347        triggers_in_collection1 = [
3348        {
3349            'id': 't1',
3350            'body': (
3351                'function() {' +
3352                '    var item = getContext().getRequest().getBody();' +
3353                '    item.id = item.id.toUpperCase() + \'t1\';' +
3354                '    getContext().getRequest().setBody(item);' +
3355                '}'),
3356            'triggerType': documents.TriggerType.Pre,
3357            'triggerOperation': documents.TriggerOperation.All
3358        },
3359        {
3360            'id': 'response1',
3361            'body': (
3362                'function() {' +
3363                '    var prebody = getContext().getRequest().getBody();' +
3364                '    if (prebody.id != \'TESTING POST TRIGGERt1\')'
3365                '        throw \'id mismatch\';' +
3366                '    var postbody = getContext().getResponse().getBody();' +
3367                '    if (postbody.id != \'TESTING POST TRIGGERt1\')'
3368                '        throw \'id mismatch\';'
3369                '}'),
3370            'triggerType': documents.TriggerType.Post,
3371            'triggerOperation': documents.TriggerOperation.All
3372        },
3373        {
3374            'id': 'response2',
3375            # can't be used because setValue is currently disabled
3376            'body': (
3377                'function() {' +
3378                '    var predoc = getContext().getRequest().getBody();' +
3379                '    var postdoc = getContext().getResponse().getBody();' +
3380                '    getContext().getResponse().setValue(' +
3381                '        \'predocname\', predoc.id + \'response2\');' +
3382                '    getContext().getResponse().setValue(' +
3383                '        \'postdocname\', postdoc.id + \'response2\');' +
3384                '}'),
3385                'triggerType': documents.TriggerType.Post,
3386                'triggerOperation': documents.TriggerOperation.All,
3387        }]
3388        triggers_in_collection2 = [
3389        {
3390            'id': "t2",
3391            'body': "function() { }", # trigger already stringified
3392            'triggerType': documents.TriggerType.Pre,
3393            'triggerOperation': documents.TriggerOperation.All
3394        },
3395        {
3396            'id': "t3",
3397            'body': (
3398                'function() {' +
3399                '    var item = getContext().getRequest().getBody();' +
3400                '    item.id = item.id.toLowerCase() + \'t3\';' +
3401                '    getContext().getRequest().setBody(item);' +
3402                '}'),
3403            'triggerType': documents.TriggerType.Pre,
3404            'triggerOperation': documents.TriggerOperation.All
3405        }]
3406        triggers_in_collection3 = [
3407        {
3408            'id': 'triggerOpType',
3409            'body': 'function() { }',
3410            'triggerType': documents.TriggerType.Post,
3411            'triggerOperation': documents.TriggerOperation.Delete,
3412        }]
3413
3414        def __CreateTriggers(client, database, collection, triggers, is_name_based):
3415            """Creates triggers.
3416
3417            :Parameters:
3418                - `client`: cosmos_client.CosmosClient
3419                - `collection`: dict
3420
3421            """
3422            for trigger_i in triggers:
3423                trigger = client.CreateTrigger(self.GetDocumentCollectionLink(database, collection, is_name_based),
3424                                               trigger_i)
3425                for property in trigger_i:
3426                    self.assertEqual(
3427                        trigger[property],
3428                        trigger_i[property],
3429                        'property {property} should match'.format(property=property))
3430
3431        # create database
3432        db = self.databseForTest
3433        # create collections
3434        collection1 = self.client.CreateContainer(
3435            self.GetDatabaseLink(db, is_name_based), { 'id': 'test_trigger_functionality 1 ' + str(uuid.uuid4()) })
3436        collection2 = self.client.CreateContainer(
3437            self.GetDatabaseLink(db, is_name_based), { 'id': 'test_trigger_functionality 2 ' + str(uuid.uuid4()) })
3438        collection3 = self.client.CreateContainer(
3439            self.GetDatabaseLink(db, is_name_based), { 'id': 'test_trigger_functionality 3 ' + str(uuid.uuid4()) })
3440        # create triggers
3441        __CreateTriggers(self.client, db, collection1, triggers_in_collection1, is_name_based)
3442        __CreateTriggers(self.client, db, collection2, triggers_in_collection2, is_name_based)
3443        __CreateTriggers(self.client, db, collection3, triggers_in_collection3, is_name_based)
3444        # create document
3445        triggers_1 = list(self.client.ReadTriggers(self.GetDocumentCollectionLink(db, collection1, is_name_based)))
3446        self.assertEqual(len(triggers_1), 3)
3447        document_1_1 = self.client.CreateItem(self.GetDocumentCollectionLink(db, collection1, is_name_based),
3448                                             { 'id': 'doc1',
3449                                               'key': 'value' },
3450                                             { 'preTriggerInclude': 't1' })
3451        self.assertEqual(document_1_1['id'],
3452                         'DOC1t1',
3453                         'id should be capitalized')
3454        document_1_2 = self.client.CreateItem(
3455            self.GetDocumentCollectionLink(db, collection1, is_name_based),
3456            { 'id': 'testing post trigger' },
3457            { 'postTriggerInclude': 'response1',
3458              'preTriggerInclude': 't1' })
3459        self.assertEqual(document_1_2['id'], 'TESTING POST TRIGGERt1')
3460        document_1_3 = self.client.CreateItem(self.GetDocumentCollectionLink(db, collection1, is_name_based),
3461                                             { 'id': 'responseheaders' },
3462                                             { 'preTriggerInclude': 't1' })
3463        self.assertEqual(document_1_3['id'], "RESPONSEHEADERSt1")
3464
3465        triggers_2 = list(self.client.ReadTriggers(self.GetDocumentCollectionLink(db, collection2, is_name_based)))
3466        self.assertEqual(len(triggers_2), 2)
3467        document_2_1 = self.client.CreateItem(self.GetDocumentCollectionLink(db, collection2, is_name_based),
3468                                             { 'id': 'doc2',
3469                                               'key2': 'value2' },
3470                                             { 'preTriggerInclude': 't2' })
3471        self.assertEqual(document_2_1['id'],
3472                         'doc2',
3473                         'id shouldn\'t change')
3474        document_2_2 = self.client.CreateItem(self.GetDocumentCollectionLink(db, collection2, is_name_based),
3475                                             { 'id': 'Doc3',
3476                                               'prop': 'empty' },
3477                                             { 'preTriggerInclude': 't3' })
3478        self.assertEqual(document_2_2['id'], 'doc3t3')
3479
3480        triggers_3 = list(self.client.ReadTriggers(self.GetDocumentCollectionLink(db, collection3, is_name_based)))
3481        self.assertEqual(len(triggers_3), 1)
3482        with self.assertRaises(Exception):
3483            self.client.CreateItem(self.GetDocumentCollectionLink(db, collection3, is_name_based),
3484                                  { 'id': 'Docoptype' },
3485                                  { 'postTriggerInclude': 'triggerOpType' })
3486
3487        self.client.DeleteContainer(collection1['_self'])
3488        self.client.DeleteContainer(collection2['_self'])
3489        self.client.DeleteContainer(collection3['_self'])
3490
3491    def test_stored_procedure_functionality_self_link(self):
3492        self._test_stored_procedure_functionality(False)
3493
3494    def test_stored_procedure_functionality_name_based(self):
3495        self._test_stored_procedure_functionality(True)
3496
3497    def _test_stored_procedure_functionality(self, is_name_based):
3498        # create database
3499        db = self.databseForTest
3500        # create collection
3501        collection = self.configs.create_single_partition_collection_if_not_exist(self.client)
3502
3503        sproc1 = {
3504            'id': 'storedProcedure1' + str(uuid.uuid4()),
3505            'body': (
3506                'function () {' +
3507                '  for (var i = 0; i < 1000; i++) {' +
3508                '    var item = getContext().getResponse().getBody();' +
3509                '    if (i > 0 && item != i - 1) throw \'body mismatch\';' +
3510                '    getContext().getResponse().setBody(i);' +
3511                '  }' +
3512                '}')
3513        }
3514
3515        retrieved_sproc = self.client.CreateStoredProcedure(self.GetDocumentCollectionLink(db, collection, is_name_based),
3516                                                       sproc1)
3517        result = self.client.ExecuteStoredProcedure(self.GetStoredProcedureLink(db, collection, retrieved_sproc, is_name_based),
3518                                               None)
3519        self.assertEqual(result, 999)
3520        sproc2 = {
3521            'id': 'storedProcedure2' + str(uuid.uuid4()),
3522            'body': (
3523                'function () {' +
3524                '  for (var i = 0; i < 10; i++) {' +
3525                '    getContext().getResponse().appendValue(\'Body\', i);' +
3526                '  }' +
3527                '}')
3528        }
3529        retrieved_sproc2 = self.client.CreateStoredProcedure(self.GetDocumentCollectionLink(db, collection, is_name_based),
3530                                                        sproc2)
3531        result = self.client.ExecuteStoredProcedure(self.GetStoredProcedureLink(db, collection, retrieved_sproc2, is_name_based),
3532                                               None)
3533        self.assertEqual(int(result), 123456789)
3534        sproc3 = {
3535            'id': 'storedProcedure3' + str(uuid.uuid4()),
3536            'body': (
3537                'function (input) {' +
3538                    '  getContext().getResponse().setBody(' +
3539                    '      \'a\' + input.temp);' +
3540                '}')
3541        }
3542        retrieved_sproc3 = self.client.CreateStoredProcedure(self.GetDocumentCollectionLink(db, collection, is_name_based),
3543                                                        sproc3)
3544        result = self.client.ExecuteStoredProcedure(self.GetStoredProcedureLink(db, collection, retrieved_sproc3, is_name_based),
3545                                               {'temp': 'so'})
3546        self.assertEqual(result, 'aso')
3547
3548    def __ValidateOfferResponseBody(self, offer, expected_coll_link, expected_offer_type):
3549        self.assert_(offer.get('id'), 'Id cannot be null.')
3550        self.assert_(offer.get('_rid'), 'Resource Id (Rid) cannot be null.')
3551        self.assert_(offer.get('_self'), 'Self Link cannot be null.')
3552        self.assert_(offer.get('resource'), 'Resource Link cannot be null.')
3553        self.assertTrue(offer['_self'].find(offer['id']) != -1,
3554                        'Offer id not contained in offer self link.')
3555        self.assertEqual(expected_coll_link.strip('/'), offer['resource'].strip('/'))
3556        if (expected_offer_type):
3557            self.assertEqual(expected_offer_type, offer.get('offerType'))
3558
3559    def test_offer_read_and_query(self):
3560        # Create database.
3561        db = self.databseForTest
3562
3563        offers = list(self.client.ReadOffers())
3564        initial_count = len(offers)
3565
3566        # Create collection.
3567        collection = self.client.CreateContainer(db['_self'], { 'id': 'test_offer_read_and_query ' + str(uuid.uuid4()) })
3568        offers = list(self.client.ReadOffers())
3569        self.assertEqual(initial_count+1, len(offers))
3570
3571        offers = self.GetCollectionOffers(self.client, collection['_rid'])
3572
3573        self.assertEqual(1, len(offers))
3574        expected_offer = offers[0]
3575        self.__ValidateOfferResponseBody(expected_offer, collection.get('_self'), None)
3576        # Read the offer.
3577        read_offer = self.client.ReadOffer(expected_offer.get('_self'))
3578        self.__ValidateOfferResponseBody(read_offer, collection.get('_self'), expected_offer.get('offerType'))
3579        # Check if the read resource is what we expected.
3580        self.assertEqual(expected_offer.get('id'), read_offer.get('id'))
3581        self.assertEqual(expected_offer.get('_rid'), read_offer.get('_rid'))
3582        self.assertEqual(expected_offer.get('_self'), read_offer.get('_self'))
3583        self.assertEqual(expected_offer.get('resource'), read_offer.get('resource'))
3584        # Query for the offer.
3585
3586        offers = list(self.client.QueryOffers(
3587            {
3588                'query': 'SELECT * FROM root r WHERE r.id=@id',
3589                'parameters': [
3590                    { 'name': '@id', 'value': expected_offer['id']}
3591                ]
3592            }))
3593
3594        self.assertEqual(1, len(offers))
3595        query_one_offer = offers[0]
3596        self.__ValidateOfferResponseBody(query_one_offer, collection.get('_self'), expected_offer.get('offerType'))
3597        # Check if the query result is what we expected.
3598        self.assertEqual(expected_offer.get('id'), query_one_offer.get('id'))
3599        self.assertEqual(expected_offer.get('_rid'), query_one_offer.get('_rid'))
3600        self.assertEqual(expected_offer.get('_self'), query_one_offer.get('_self'))
3601        self.assertEqual(expected_offer.get('resource'), query_one_offer.get('resource'))
3602        # Expects an exception when reading offer with bad offer link.
3603        self.__AssertHTTPFailureWithStatus(StatusCodes.BAD_REQUEST, self.client.ReadOffer, expected_offer.get('_self')[:-1] + 'x')
3604        # Now delete the collection.
3605        self.client.DeleteContainer(collection.get('_self'))
3606        # Reading fails.
3607        self.__AssertHTTPFailureWithStatus(StatusCodes.NOT_FOUND, self.client.ReadOffer, expected_offer.get('_self'))
3608        # Read feed now returns 0 results.
3609        offers = list(self.client.ReadOffers())
3610        self.assertEqual(initial_count, len(offers))
3611
3612    def test_offer_replace(self):
3613        # Create database.
3614        db = self.databseForTest
3615        # Create collection.
3616        collection = self.configs.create_single_partition_collection_if_not_exist(self.client)
3617        offers = self.GetCollectionOffers(self.client, collection['_rid'])
3618        self.assertEqual(1, len(offers))
3619        expected_offer = offers[0]
3620        self.__ValidateOfferResponseBody(expected_offer, collection.get('_self'), None)
3621        # Replace the offer.
3622        offer_to_replace = dict(expected_offer)
3623        offer_to_replace['content']['offerThroughput'] += 100
3624        replaced_offer = self.client.ReplaceOffer(offer_to_replace['_self'], offer_to_replace)
3625        self.__ValidateOfferResponseBody(replaced_offer, collection.get('_self'), None)
3626        # Check if the replaced offer is what we expect.
3627        self.assertEqual(offer_to_replace.get('id'), replaced_offer.get('id'))
3628        self.assertEqual(offer_to_replace.get('_rid'), replaced_offer.get('_rid'))
3629        self.assertEqual(offer_to_replace.get('_self'), replaced_offer.get('_self'))
3630        self.assertEqual(offer_to_replace.get('resource'), replaced_offer.get('resource'))
3631        self.assertEqual(offer_to_replace.get('content').get('offerThroughput'), replaced_offer.get('content').get('offerThroughput'))
3632        # Expects an exception when replacing an offer with bad id.
3633        offer_to_replace_bad_id = dict(offer_to_replace)
3634        offer_to_replace_bad_id['_rid'] = 'NotAllowed'
3635        self.__AssertHTTPFailureWithStatus(
3636            StatusCodes.BAD_REQUEST, self.client.ReplaceOffer, offer_to_replace_bad_id['_self'], offer_to_replace_bad_id)
3637        # Expects an exception when replacing an offer with bad rid.
3638        offer_to_replace_bad_rid = dict(offer_to_replace)
3639        offer_to_replace_bad_rid['_rid'] = 'InvalidRid'
3640        self.__AssertHTTPFailureWithStatus(
3641            StatusCodes.BAD_REQUEST, self.client.ReplaceOffer, offer_to_replace_bad_rid['_self'], offer_to_replace_bad_rid)
3642        # Expects an exception when replaceing an offer with null id and rid.
3643        offer_to_replace_null_ids = dict(offer_to_replace)
3644        offer_to_replace_null_ids['id'] = None
3645        offer_to_replace_null_ids['_rid'] = None
3646        self.__AssertHTTPFailureWithStatus(
3647            StatusCodes.BAD_REQUEST, self.client.ReplaceOffer, offer_to_replace_null_ids['_self'], offer_to_replace_null_ids)
3648
3649    def test_collection_with_offer_type(self):
3650        # create database
3651        created_db = self.databseForTest
3652
3653        # create a collection
3654        offers = list(self.client.ReadOffers())
3655        before_offers_count = len(offers)
3656
3657        collection_definition = { 'id': 'test_collection_with_offer_type ' + str(uuid.uuid4()) }
3658        collection = self.client.CreateContainer(created_db['_self'],
3659                                             collection_definition,
3660                                             {
3661                                                 'offerType': 'S2'
3662                                             })
3663        offers = list(self.client.ReadOffers())
3664        self.assertEqual(before_offers_count + 1, len(offers))
3665
3666        offers = self.GetCollectionOffers(self.client, collection['_rid'])
3667
3668        self.assertEqual(1, len(offers))
3669        expected_offer = offers[0]
3670
3671        # We should have an offer of type S2.
3672        self.__ValidateOfferResponseBody(expected_offer, collection.get('_self'), 'S2')
3673        self.client.DeleteContainer(collection['_self'])
3674
3675    def test_database_account_functionality(self):
3676        # Validate database account functionality.
3677        database_account = self.client.GetDatabaseAccount()
3678        self.assertEqual(database_account.DatabasesLink, '/dbs/')
3679        self.assertEqual(database_account.MediaLink, '/media/')
3680        if (HttpHeaders.MaxMediaStorageUsageInMB in
3681            self.client.last_response_headers):
3682            self.assertEqual(
3683                database_account.MaxMediaStorageUsageInMB,
3684                self.client.last_response_headers[
3685                    HttpHeaders.MaxMediaStorageUsageInMB])
3686        if (HttpHeaders.CurrentMediaStorageUsageInMB in
3687            self.client.last_response_headers):
3688            self.assertEqual(
3689                database_account.CurrentMediaStorageUsageInMB,
3690                self.client.last_response_headers[
3691                    HttpHeaders.
3692                    CurrentMediaStorageUsageInMB])
3693        self.assertTrue(
3694            database_account.ConsistencyPolicy['defaultConsistencyLevel']
3695            != None)
3696
3697    def test_index_progress_headers_self_link(self):
3698        self._test_index_progress_headers(False)
3699
3700    def test_index_progress_headers_name_based(self):
3701        self._test_index_progress_headers(True)
3702
3703    def _test_index_progress_headers(self, is_name_based):
3704        created_db = self.databseForTest
3705        consistent_coll = self.client.CreateContainer(self.GetDatabaseLink(created_db, is_name_based), { 'id': 'test_index_progress_headers consistent_coll ' + str(uuid.uuid4()) })
3706        self.client.ReadContainer(self.GetDocumentCollectionLink(created_db, consistent_coll, is_name_based))
3707        self.assertFalse(HttpHeaders.LazyIndexingProgress in self.client.last_response_headers)
3708        self.assertTrue(HttpHeaders.IndexTransformationProgress in self.client.last_response_headers)
3709        lazy_coll = self.client.CreateContainer(self.GetDatabaseLink(created_db, is_name_based),
3710            {
3711                'id': 'test_index_progress_headers lazy_coll ' + str(uuid.uuid4()),
3712                'indexingPolicy': { 'indexingMode' : documents.IndexingMode.Lazy }
3713            })
3714        self.client.ReadContainer(self.GetDocumentCollectionLink(created_db, lazy_coll, is_name_based))
3715        self.assertTrue(HttpHeaders.LazyIndexingProgress in self.client.last_response_headers)
3716        self.assertTrue(HttpHeaders.IndexTransformationProgress in self.client.last_response_headers)
3717        none_coll = self.client.CreateContainer(self.GetDatabaseLink(created_db, is_name_based),
3718            {
3719                'id': 'test_index_progress_headers none_coll ' + str(uuid.uuid4()),
3720                'indexingPolicy': { 'indexingMode': documents.IndexingMode.NoIndex, 'automatic': False }
3721            })
3722        self.client.ReadContainer(self.GetDocumentCollectionLink(created_db, none_coll, is_name_based))
3723        self.assertFalse(HttpHeaders.LazyIndexingProgress in self.client.last_response_headers)
3724        self.assertTrue(HttpHeaders.IndexTransformationProgress in self.client.last_response_headers)
3725
3726        self.client.DeleteContainer(consistent_coll['_self'])
3727        self.client.DeleteContainer(lazy_coll['_self'])
3728        self.client.DeleteContainer(none_coll['_self'])
3729
3730    # To run this test, please provide your own CA certs file or download one from
3731    #     http://curl.haxx.se/docs/caextract.html
3732    #
3733    # def test_ssl_connection(self):
3734    #     connection_policy = documents.ConnectionPolicy()
3735    #     connection_policy.SSLConfiguration = documents.SSLConfiguration()
3736    #     connection_policy.SSLConfiguration.SSLCaCerts = './cacert.pem'
3737    #     client = cosmos_client.CosmosClient(CRUDTests.host, {'masterKey': CRUDTests.masterKey}, connection_policy)
3738    #     # Read databases after creation.
3739    #     databases = list(client.ReadDatabases())
3740
3741    def test_id_validation(self):
3742        # Id shouldn't end with space.
3743        database_definition = { 'id': 'id_with_space ' }
3744        try:
3745            self.client.CreateDatabase(database_definition)
3746            self.assertFalse(True)
3747        except ValueError as e:
3748            self.assertEqual('Id ends with a space.', e.args[0])
3749        # Id shouldn't contain '/'.
3750        database_definition = { 'id': 'id_with_illegal/_char' }
3751        try:
3752            self.client.CreateDatabase(database_definition)
3753            self.assertFalse(True)
3754        except ValueError as e:
3755            self.assertEqual('Id contains illegal chars.', e.args[0])
3756        # Id shouldn't contain '\\'.
3757        database_definition = { 'id': 'id_with_illegal\\_char' }
3758        try:
3759            self.client.CreateDatabase(database_definition)
3760            self.assertFalse(True)
3761        except ValueError as e:
3762            self.assertEqual('Id contains illegal chars.', e.args[0])
3763        # Id shouldn't contain '?'.
3764        database_definition = { 'id': 'id_with_illegal?_char' }
3765        try:
3766            self.client.CreateDatabase(database_definition)
3767            self.assertFalse(True)
3768        except ValueError as e:
3769            self.assertEqual('Id contains illegal chars.', e.args[0])
3770        # Id shouldn't contain '#'.
3771        database_definition = { 'id': 'id_with_illegal#_char' }
3772        try:
3773            self.client.CreateDatabase(database_definition)
3774            self.assertFalse(True)
3775        except ValueError as e:
3776            self.assertEqual('Id contains illegal chars.', e.args[0])
3777
3778        # Id can begin with space
3779        database_definition = { 'id': ' id_begin_space' }
3780        db = self.client.CreateDatabase(database_definition)
3781        self.assertTrue(True)
3782
3783        self.client.DeleteDatabase(db['_self'])
3784
3785    def test_id_case_validation(self):
3786        # create database
3787        created_db = self.databseForTest
3788
3789        uuid_string = str(uuid.uuid4())
3790        # pascalCase
3791        collection_definition1 = { 'id': 'sampleCollection ' + uuid_string }
3792
3793        # CamelCase
3794        collection_definition2 = { 'id': 'SampleCollection ' + uuid_string }
3795
3796        # Verify that no collections exist
3797        collections = list(self.client.ReadContainers(self.GetDatabaseLink(created_db, True)))
3798        number_of_existing_collections = len(collections)
3799
3800        # create 2 collections with different casing of IDs
3801        created_collection1 = self.client.CreateContainer(self.GetDatabaseLink(created_db, True),
3802                                                     collection_definition1)
3803
3804        created_collection2 = self.client.CreateContainer(self.GetDatabaseLink(created_db, True),
3805                                                     collection_definition2)
3806
3807        collections = list(self.client.ReadContainers(self.GetDatabaseLink(created_db, True)))
3808
3809        # verify if a total of 2 collections got created
3810        self.assertEqual(len(collections), number_of_existing_collections + 2)
3811
3812        # verify that collections are created with specified IDs
3813        self.assertEqual(collection_definition1['id'], created_collection1['id'])
3814        self.assertEqual(collection_definition2['id'], created_collection2['id'])
3815
3816        self.client.DeleteContainer(created_collection1['_self'])
3817        self.client.DeleteContainer(created_collection2['_self'])
3818
3819    def test_id_unicode_validation(self):
3820        # create database
3821        created_db = self.databseForTest
3822
3823        # unicode chars in Hindi for Id which translates to: "Hindi is the national language of India"
3824        collection_definition1 = { 'id': u'हिन्दी भारत की राष्ट्रीय भाषा है' }
3825
3826        # Special chars for Id
3827        collection_definition2 = { 'id': "!@$%^&*()-~`'_[]{}|;:,.<>" }
3828
3829        # verify that collections are created with specified IDs
3830        created_collection1 = self.client.CreateContainer(self.GetDatabaseLink(created_db, True),
3831                                                     collection_definition1)
3832
3833        created_collection2 = self.client.CreateContainer(self.GetDatabaseLink(created_db, True),
3834                                                     collection_definition2)
3835
3836        self.assertEqual(collection_definition1['id'], created_collection1['id'])
3837        self.assertEqual(collection_definition2['id'], created_collection2['id'])
3838
3839        self.client.DeleteContainer(created_collection1['_self'])
3840        self.client.DeleteContainer(created_collection2['_self'])
3841
3842    def GetDatabaseLink(self, database, is_name_based=True):
3843        if is_name_based:
3844            return 'dbs/' + database['id']
3845        else:
3846            return database['_self']
3847
3848    def GetUserLink(self, database, user, is_name_based=True):
3849        if is_name_based:
3850            return self.GetDatabaseLink(database) + '/users/' + user['id']
3851        else:
3852            return user['_self']
3853
3854    def GetPermissionLink(self, database, user, permission, is_name_based=True):
3855        if is_name_based:
3856            return self.GetUserLink(database, user) + '/permissions/' + permission['id']
3857        else:
3858            return permission['_self']
3859
3860    def GetDocumentCollectionLink(self, database, document_collection, is_name_based=True):
3861        if is_name_based:
3862            return self.GetDatabaseLink(database) + '/colls/' + document_collection['id']
3863        else:
3864            return document_collection['_self']
3865
3866    def GetDocumentLink(self, database, document_collection, document, is_name_based=True):
3867        if is_name_based:
3868            return self.GetDocumentCollectionLink(database, document_collection) + '/docs/' + document['id']
3869        else:
3870            return document['_self']
3871
3872    def GetAttachmentLink(self, database, document_collection, document, attachment, is_name_based=True):
3873        if is_name_based:
3874            return self.GetDocumentLink(database, document_collection, document) + '/attachments/' + attachment['id']
3875        else:
3876            return attachment['_self']
3877
3878    def GetTriggerLink(self, database, document_collection, trigger, is_name_based=True):
3879        if is_name_based:
3880            return self.GetDocumentCollectionLink(database, document_collection) + '/triggers/' + trigger['id']
3881        else:
3882            return trigger['_self']
3883
3884    def GetUserDefinedFunctionLink(self, database, document_collection, user_defined_function, is_name_based=True):
3885        if is_name_based:
3886            return self.GetDocumentCollectionLink(database, document_collection) + '/udfs/' + user_defined_function['id']
3887        else:
3888            return user_defined_function['_self']
3889
3890    def GetStoredProcedureLink(self, database, document_collection, stored_procedure, is_name_based=True):
3891        if is_name_based:
3892            return self.GetDocumentCollectionLink(database, document_collection) + '/sprocs/' + stored_procedure['id']
3893        else:
3894            return stored_procedure['_self']
3895
3896    def GetConflictLink(self, database, document_collection, conflict, is_name_based=True):
3897        if is_name_based:
3898            return self.GetDocumentCollectionLink(database, document_collection) + '/conflicts/' + conflict['id']
3899        else:
3900            return conflict['_self']
3901
3902    def GetCollectionOffers(self, client, collection_rid):
3903        return list(client.QueryOffers(
3904            {
3905                'query': 'SELECT * FROM root r WHERE r.offerResourceId=@offerResourceId',
3906                'parameters': [
3907                    { 'name': '@offerResourceId', 'value': collection_rid}
3908                ]
3909            }))
3910
3911if __name__ == '__main__':
3912    try:
3913        unittest.main()
3914    except SystemExit as inst:
3915        if inst.args[0] is True:  # raised by sys.exit(True) when tests failed
3916            raise
3917