1from cbas_base import *
2from couchbase import FMT_BYTES
3import threading
4import random
5
6
7class CBASSecondaryIndexes(CBASBaseTest):
8    def setUp(self):
9        self.input = TestInputSingleton.input
10        if "default_bucket" not in self.input.test_params:
11            self.input.test_params.update({"default_bucket": False})
12        super(CBASSecondaryIndexes, self).setUp()
13
14        self.load_sample_buckets(servers=[self.master],
15                                 bucketName=self.cb_bucket_name,
16                                 total_items=self.beer_sample_docs_count)
17
18        if "add_all_cbas_nodes" in self.input.test_params and \
19                self.input.test_params["add_all_cbas_nodes"] and len(
20            self.cbas_servers) > 1:
21            self.add_all_cbas_node_then_rebalance()
22
23        # Create bucket on CBAS
24        self.create_bucket_on_cbas(cbas_bucket_name=self.cbas_bucket_name,
25                                   cb_bucket_name=self.cb_bucket_name,
26                                   cb_server_ip=self.cb_server_ip)
27
28        # Create dataset on the CBAS bucket
29        self.create_dataset_on_bucket(
30            cbas_bucket_name=self.cbas_bucket_name,
31            cbas_dataset_name=self.cbas_dataset_name)
32
33    def tearDown(self):
34        super(CBASSecondaryIndexes, self).tearDown()
35
36    def verify_index_used(self, statement, index_used=False, index_name=None):
37        statement = 'EXPLAIN %s'%statement
38        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
39            statement)
40        self.assertEquals(status, "success")
41        if status == 'success':
42            self.assertEquals(errors, None)
43            if index_used:
44                self.assertTrue("index-search" in str(results))
45                self.assertFalse("data-scan" in str(results))
46                self.log.info("INDEX-SEARCH is found in EXPLAIN hence indexed data will be scanned to serve %s"%statement)
47                if index_name:
48                    self.assertTrue(index_name in str(results))
49            else:
50                self.assertTrue("data-scan" in str(results))
51                self.assertFalse("index-search" in str(results))
52                self.log.info("DATA-SCAN is found in EXPLAIN hence index is not used to serve %s"%statement)
53
54    def test_create_index(self):
55        '''
56        Steps :
57        1. Create bucket in CBAS, create dataset
58        2. Create index on various fields as passed in the parameters
59        3. Validate if the index is created and the index definition has the expected fields
60
61        Author : Mihir Kamdar
62        Created date : 8/1/2017
63        '''
64        # Create Index
65        index_fields = ""
66        for index_field in self.index_fields:
67            index_fields += index_field + ","
68        index_fields = index_fields[:-1]
69        create_idx_statement = "create index {0} on {1}({2});".format(
70            self.index_name, self.cbas_dataset_name, index_fields)
71        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
72            create_idx_statement)
73
74        self.assertTrue(status == "success", "Create Index query failed")
75
76        self.assertTrue(
77            self.verify_index_created(self.index_name, self.index_fields,
78                                      self.cbas_dataset_name)[0])
79
80    def test_create_index_without_if_not_exists(self):
81        '''
82            Steps :
83            1. Create bucket in CBAS, create dataset
84            2. Create index
85            3. Again create an index with the same name without using IF_NOT_EXISTS clause
86            3. Validate if the error msg is as expected
87
88            Author : Mihir Kamdar
89            Created date : 8/1/2017
90        '''
91        # Create Index
92        index_fields = ""
93        for index_field in self.index_fields:
94            index_fields += index_field + ","
95        index_fields = index_fields[:-1]
96        create_idx_statement = "create index {0} on {1}({2});".format(
97            self.index_name, self.cbas_dataset_name, index_fields)
98        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
99            create_idx_statement)
100
101        self.assertTrue(status == "success", "Create Index query failed")
102
103        self.assertTrue(
104            self.verify_index_created(self.index_name, self.index_fields,
105                                      self.cbas_dataset_name)[0])
106
107        # Create another index with same name
108        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
109            create_idx_statement)
110        self.assertTrue(self.validate_error_in_response(status, errors),
111                        "Error msg not matching expected error msg")
112
113    def test_create_index_with_if_not_exists(self):
114        '''
115            Steps :
116            1. Create bucket in CBAS, create dataset
117            2. Create index
118            3. Again create an index with the same name using IF_NOT_EXISTS clause
119            3. Validate if that there  is no error
120
121            Author : Mihir Kamdar
122            Created date : 8/1/2017
123        '''
124        # Create Index
125        index_fields = ""
126        for index_field in self.index_fields:
127            index_fields += index_field + ","
128        index_fields = index_fields[:-1]
129        create_idx_statement = "create index {0} IF NOT EXISTS on {1}({2});".format(
130            self.index_name, self.cbas_dataset_name, index_fields)
131        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
132            create_idx_statement)
133
134        self.assertTrue(status == "success", "Create Index query failed")
135
136        self.assertTrue(
137            self.verify_index_created(self.index_name, self.index_fields,
138                                      self.cbas_dataset_name)[0])
139
140        # Create another index with same name
141        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
142            create_idx_statement)
143        self.assertTrue(status == "success", "Create Index query failed")
144
145        self.assertTrue(
146            self.verify_index_created(self.index_name, self.index_fields,
147                                      self.cbas_dataset_name)[0])
148
149    def test_create_index_with_if_not_exists_different_fields(self):
150        '''
151            Steps :
152            1. Create bucket in CBAS, create dataset
153            2. Create index
154            3. Again create an index with the same name but with different fields using IF_NOT_EXISTS clause
155            4. Validate there is no error
156            5. The index definition of should not change.
157
158            Author : Mihir Kamdar
159            Created date : 8/1/2017
160        '''
161        index_field1 = "city:string"
162        index_field2 = "abv:bigint"
163
164        # Create Index
165        create_idx_statement = "create index {0} IF NOT EXISTS on {1}({2});".format(
166            self.index_name, self.cbas_dataset_name, index_field1)
167        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
168            create_idx_statement)
169
170        self.assertTrue(status == "success", "Create Index query failed")
171
172        self.assertTrue(
173            self.verify_index_created(self.index_name, [index_field1],
174                                      self.cbas_dataset_name)[0])
175
176        # Create another index with same name
177        create_idx_statement = "create index {0} IF NOT EXISTS on {1}({2});".format(
178            self.index_name, self.cbas_dataset_name, index_field2)
179        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
180            create_idx_statement)
181        self.assertTrue(status == "success", "Create Index query failed")
182
183        # The index definition should be based on the older field, it should not change
184        self.assertTrue(
185            self.verify_index_created(self.index_name, [index_field1],
186                                      self.cbas_dataset_name)[0])
187
188    def test_multiple_composite_index_with_overlapping_fields(self):
189        '''
190            Steps :
191            1. Create bucket in CBAS, create dataset
192            2. Create index
193            3. Again create a composite index
194            4. Now create another composite index with some overlapping fields
195            5. Both the indexes should get created successfully
196
197            Author : Mihir Kamdar
198            Created date : 8/1/2017
199        '''
200        index_fields1 = ["city:string", "abv:bigint"]
201        index_fields2 = ["abv:bigint", "geo.lat:double"]
202
203        # Create Index
204        index_fields = ""
205        for index_field in index_fields1:
206            index_fields += index_field + ","
207        index_fields = index_fields[:-1]
208        create_idx_statement = "create index {0} IF NOT EXISTS on {1}({2});".format(
209            self.index_name + "1", self.cbas_dataset_name, index_fields)
210        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
211            create_idx_statement)
212
213        self.assertTrue(status == "success", "Create Index query failed")
214
215        self.assertTrue(
216            self.verify_index_created(self.index_name + "1", index_fields1,
217                                      self.cbas_dataset_name)[0])
218
219        # Create another composite index with overlapping fields
220        index_fields = ""
221        for index_field in index_fields2:
222            index_fields += index_field + ","
223        index_fields = index_fields[:-1]
224        create_idx_statement = "create index {0} IF NOT EXISTS on {1}({2});".format(
225            self.index_name + "2", self.cbas_dataset_name, index_fields)
226        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
227            create_idx_statement)
228        self.assertTrue(status == "success", "Create Index query failed")
229
230        self.assertTrue(
231            self.verify_index_created(self.index_name + "2", index_fields2,
232                                      self.cbas_dataset_name)[0])
233
234    def test_create_index_non_empty_dataset(self):
235        '''
236            Steps :
237            1. Create bucket in CBAS, create dataset, connect to the bucket, disconnect from bucket
238            2. Create index
239            3. Validate the index is created correctly
240
241            Author : Mihir Kamdar
242            Created date : 8/1/2017
243        '''
244        # Connect to Bucket
245        result = self.connect_to_bucket(cbas_bucket_name=
246                                        self.cbas_bucket_name,
247                                        cb_bucket_password=self.cb_bucket_password)
248
249        # Allow ingestion to complete
250        self.sleep(30)
251
252        # Disconnect from bucket
253        result = self.disconnect_from_bucket(cbas_bucket_name=
254                                             self.cbas_bucket_name)
255
256        # Create Index
257        index_fields = ""
258        for index_field in self.index_fields:
259            index_fields += index_field + ","
260        index_fields = index_fields[:-1]
261        create_idx_statement = "create index {0} on {1}({2});".format(
262            self.index_name, self.cbas_dataset_name, index_fields)
263        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
264            create_idx_statement)
265
266        self.assertTrue(status == "success", "Create Index query failed")
267
268        self.assertTrue(
269            self.verify_index_created(self.index_name, self.index_fields,
270                                      self.cbas_dataset_name)[0])
271
272    def test_create_index_with_bucket_connected(self):
273        '''
274            Steps :
275            1. Create bucket in CBAS, create dataset, connect to the bucket
276            2. Create index
277            3. Create index should fail.
278            4. Validate that the error msg is as expected
279
280            Author : Mihir Kamdar
281            Created date : 8/1/2017
282        '''
283        # Connect to Bucket
284        self.connect_to_bucket(cbas_bucket_name=
285                               self.cbas_bucket_name,
286                               cb_bucket_password=self.cb_bucket_password)
287
288        # Allow ingestion to complete
289        self.sleep(30)
290
291        # Create Index
292        index_fields = ""
293        for index_field in self.index_fields:
294            index_fields += index_field + ","
295        index_fields = index_fields[:-1]
296        create_idx_statement = "create index {0} on {1}({2});".format(
297            self.index_name, self.cbas_dataset_name, index_fields)
298        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
299            create_idx_statement)
300
301        self.assertTrue(self.validate_error_in_response(status, errors))
302
303    def test_drop_index(self):
304        '''
305            Steps :
306            1. Create bucket in CBAS, create dataset
307            2. Create index
308            3. Validate the index is created correctly
309            4. Drop index
310            5. Validate that the index is dropped
311
312            Author : Mihir Kamdar
313            Created date : 8/1/2017
314        '''
315        # Create Index
316        index_fields = ""
317        for index_field in self.index_fields:
318            index_fields += index_field + ","
319        index_fields = index_fields[:-1]
320        create_idx_statement = "create index {0} on {1}({2});".format(
321            self.index_name, self.cbas_dataset_name, index_fields)
322        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
323            create_idx_statement)
324
325        self.assertTrue(status == "success", "Create Index query failed")
326
327        self.assertTrue(
328            self.verify_index_created(self.index_name, self.index_fields,
329                                      self.cbas_dataset_name)[0])
330
331        drop_idx_statement = "drop index {0}.{1};".format(
332            self.cbas_dataset_name, self.index_name)
333        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
334            drop_idx_statement)
335
336        self.assertTrue(status == "success", "Drop Index query failed")
337
338        self.assertFalse(
339            self.verify_index_created(self.index_name, self.index_fields,
340                                      self.cbas_dataset_name)[0])
341
342    def test_drop_non_existing_index(self):
343        '''
344            Steps :
345            1. Create bucket in CBAS, create dataset
346            2. Drop a non-existing index without using IF_EXISTS clause
347            3. Validate that the error msg is as expected
348            4. Drop a non-existing index using IF_EXISTS clause
349            5. Validate there is no error
350
351            Author : Mihir Kamdar
352            Created date : 8/1/2017
353        '''
354        # Drop non-existing index without IF EXISTS
355        drop_idx_statement = "drop index {0}.{1};".format(
356            self.cbas_dataset_name, self.index_name)
357        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
358            drop_idx_statement)
359
360        self.assertTrue(self.validate_error_in_response(status, errors))
361
362        # Drop non-existing index with IF EXISTS
363        drop_idx_statement = "drop index {0}.{1} IF EXISTS;".format(
364            self.cbas_dataset_name, self.index_name)
365        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
366            drop_idx_statement)
367
368        self.assertEqual(status, "success",
369                         "Drop non existent index with IF EXISTS fails")
370
371    def test_drop_dataset_drops_index(self):
372        '''
373            Steps :
374            1. Create bucket in CBAS, create dataset
375            2. Create index
376            3. Validate the index is created correctly
377            4. Drop dataset
378            5. Validate that the index is also dropped
379
380            Author : Mihir Kamdar
381            Created date : 8/1/2017
382        '''
383        # Create Index
384        index_fields = ""
385        for index_field in self.index_fields:
386            index_fields += index_field + ","
387        index_fields = index_fields[:-1]
388        create_idx_statement = "create index {0} on {1}({2});".format(
389            self.index_name, self.cbas_dataset_name, index_fields)
390        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
391            create_idx_statement)
392
393        self.assertTrue(status == "success", "Create Index query failed")
394
395        self.assertTrue(
396            self.verify_index_created(self.index_name, self.index_fields,
397                                      self.cbas_dataset_name)[0])
398
399        # Drop dataset
400        self.drop_dataset(self.cbas_dataset_name)
401
402        # Check that the index no longer exists
403        self.assertFalse(
404            self.verify_index_created(self.index_name, self.index_fields,
405                                      self.cbas_dataset_name)[0])
406
407    def test_drop_non_empty_index(self):
408        '''
409            Steps :
410            1. Create bucket in CBAS, create dataset
411            2. Create index
412            3. Validate the index is created correctly
413            4. Connect dataset, disconnect dataset
414            5. Drop index
415            6. Validate that the index is dropped
416
417            Author : Mihir Kamdar
418            Created date : 8/1/2017
419        '''
420        # Create Index
421        index_fields = ""
422        for index_field in self.index_fields:
423            index_fields += index_field + ","
424        index_fields = index_fields[:-1]
425        create_idx_statement = "create index {0} on {1}({2});".format(
426            self.index_name, self.cbas_dataset_name, index_fields)
427        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
428            create_idx_statement)
429
430        self.assertTrue(status == "success", "Create Index query failed")
431
432        self.assertTrue(
433            self.verify_index_created(self.index_name, self.index_fields,
434                                      self.cbas_dataset_name)[0])
435
436        # Connect to Bucket
437        self.connect_to_bucket(cbas_bucket_name=
438                               self.cbas_bucket_name,
439                               cb_bucket_password=self.cb_bucket_password)
440
441        # Allow ingestion to complete
442        self.sleep(30)
443
444        # Disconnect from bucket
445        self.disconnect_from_bucket(cbas_bucket_name=
446                                    self.cbas_bucket_name)
447
448        drop_idx_statement = "drop index {0}.{1};".format(
449            self.cbas_dataset_name, self.index_name)
450        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
451            drop_idx_statement)
452
453        self.assertTrue(status == "success", "Drop Index query failed")
454
455        self.assertFalse(
456            self.verify_index_created(self.index_name, self.index_fields,
457                                      self.cbas_dataset_name)[0])
458
459    def _direct_client(self, server, bucket, timeout=30):
460        # CREATE SDK CLIENT
461        if self.use_sdk_client:
462            try:
463                from sdk_client import SDKClient
464                scheme = "couchbase"
465                host = self.master.ip
466                if self.master.ip == "127.0.0.1":
467                    scheme = "http"
468                    host="{0}:{1}".format(self.master.ip,self.master.port)
469                return SDKClient(scheme=scheme,hosts = [host], bucket = bucket)
470            except Exception, ex:
471                self.log.error("cannot load sdk client due to error {0}".format(str(ex)))
472        # USE MC BIN CLIENT WHEN NOT USING SDK CLIENT
473        return self.direct_mc_bin_client(server, bucket, timeout= timeout)
474
475    def test_index_population(self):
476        '''
477        Steps :
478        1.
479        '''
480        # Create Index
481#         to_verify=0
482        search_by = self.input.param("search_by", '')
483        exp_number = self.input.param("exp_number", 0)
484        not_fit_value = self.input.param("not_fit_value", '')
485        expected_status = self.input.param("status", 'success')
486        binary = self.input.param("binary", False)
487        index_used = self.input.param("index_used", False)
488        if ";" in str(not_fit_value):
489            not_fit_value = not_fit_value.split(';')
490
491        testuser = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'password': 'password'}]
492        rolelist = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'roles': 'admin'}]
493        self.add_built_in_server_user(testuser=testuser, rolelist=rolelist)
494        self.use_sdk_client =True
495        self.client = self._direct_client(self.master, self.cb_bucket_name).cb
496        k = 'test_index_population'
497
498        index_fields = ""
499        for index_field in self.index_fields:
500            index_fields += index_field + ","
501        index_fields = index_fields[:-1]
502
503        if binary:
504            self.client.upsert('utf16_doc', not_fit_value.encode('utf16'),  format=FMT_BYTES)
505        else:
506            if "." in index_fields.split(":")[0]:
507                self.client.upsert(k, {index_fields.split(":")[0].split(".")[0]:{index_fields.split(":")[0].split(".")[1] : not_fit_value}})
508            else:
509                self.client.upsert(k, {index_fields.split(":")[0] : not_fit_value})
510
511        if index_fields.split(":")[1] == "string" and isinstance(not_fit_value,str) or \
512            index_fields.split(":")[1] == "double" and isinstance(not_fit_value,(float,int)) or \
513            index_fields.split(":")[1] == "bigint" and isinstance(not_fit_value,(float,int)):
514            index_used=True
515        create_idx_statement = "create index {0} on {1}({2});".format(
516            self.index_name, self.cbas_dataset_name, index_fields)
517        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
518            create_idx_statement)
519
520        self.assertTrue(status == "success", "Create Index query failed")
521
522        self.assertTrue(
523            self.verify_index_created(self.index_name, self.index_fields,
524                                      self.cbas_dataset_name)[0])
525        self.connect_to_bucket(cbas_bucket_name=
526                                        self.cbas_bucket_name,
527                                        cb_bucket_password=self.cb_bucket_password)
528        self.sleep(20)
529
530        if isinstance(search_by, basestring):
531            statement = 'SELECT count(*) FROM `{0}` where {1}="{2}"'.format(self.cbas_dataset_name, index_fields.split(":")[0], search_by)
532        else:
533            statement = 'SELECT count(*) FROM `{0}` where {1}={2}'.format(self.cbas_dataset_name,
534                                                                            index_fields.split(":")[0], search_by)
535        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
536            statement)
537        self.assertEquals(status, "success")
538        self.assertEquals(errors, None)
539        self.assertEquals(results, [{'$1': exp_number}])
540        if isinstance(not_fit_value,str):
541            statement = 'SELECT count(*) FROM `{0}` where {1}="{2}"'.format(self.cbas_dataset_name,
542                                                                        index_fields.split(":")[0], not_fit_value)
543        else:
544            statement = 'SELECT count(*) FROM `{0}` where {1}={2}'.format(self.cbas_dataset_name,
545                                                                        index_fields.split(":")[0], not_fit_value)
546        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
547            statement)
548        self.assertEquals(status, expected_status)
549        if status == 'success':
550            self.assertEquals(errors, None)
551            self.assertEquals(results, [{'$1': 1}])
552
553        self.log.info("Verify whether statement %s used index or not. Indexed: %s"%(statement,index_fields))
554        self.verify_index_used(statement, index_used, self.index_name)
555    # https://issues.couchbase.com/browse/MB-25646
556    # https://issues.couchbase.com/browse/MB-25657
557
558    def test_index_population_thread(self):
559        to_verify = 0
560        def update_data(client, index_fields):
561            for _ in xrange(100):
562                if index_fields.split(":")[-1] == 'double':
563                    not_fit_value = random.choice([False, "sdfs", 11111])
564                elif index_fields.split(":")[-1] == 'string':
565                    not_fit_value = random.choice([False, 11111, 36.6])
566                elif index_fields.split(":")[-1] == 'bigint':
567                    not_fit_value = random.choice([False, "sdfs", 36.6])
568                perc = random.randrange(0, 100)
569                if perc > 75:
570                    # 25% with binary data
571#                     client.upsert('utf16_doc', str(not_fit_value).encode('utf16'), format=FMT_BYTES)
572                    client.upsert(k, {index_fields.split(":")[0]: not_fit_value})
573                else:
574                    # 10% field removed
575                    client.upsert(k, {index_fields.split(":")[0] + "_NEW_FIELD": not_fit_value})
576
577        # Create Index
578        search_by = self.input.param("search_by", '')
579        exp_number = self.input.param("exp_number", 0)
580        not_fit_value = self.input.param("not_fit_value", '')
581        expected_status = self.input.param("status", 'success')
582
583        if ";" in not_fit_value:
584            not_fit_value = not_fit_value.split(';')
585
586        testuser = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'password': 'password'}]
587        rolelist = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'roles': 'admin'}]
588        self.add_built_in_server_user(testuser=testuser, rolelist=rolelist)
589        self.use_sdk_client = True
590        self.client = self._direct_client(self.master, self.cb_bucket_name).cb
591        k = 'test_index_population_thread'
592
593        index_fields = ""
594        for index_field in self.index_fields:
595            index_fields += index_field + ","
596        index_fields = index_fields[:-1]
597
598        create_idx_statement = "create index {0} on {1}({2});".format(
599            self.index_name, self.cbas_dataset_name, index_fields)
600        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
601            create_idx_statement)
602
603        self.assertTrue(status == "success", "Create Index query failed")
604
605        self.assertTrue(
606            self.verify_index_created(self.index_name, self.index_fields,
607                                      self.cbas_dataset_name)[0])
608        self.connect_to_bucket(cbas_bucket_name=
609                               self.cbas_bucket_name,
610                               cb_bucket_password=self.cb_bucket_password)
611        self.sleep(10)
612
613        d = threading.Thread(name='daemon', target=update_data, args=(self.client, index_fields,))
614        d.setDaemon(True)
615        d.start()
616
617        for i in xrange(10):
618            if isinstance(search_by, basestring):
619                statement = 'SELECT count(*) FROM `{0}` where {1}="{2}"'.format(self.cbas_dataset_name,
620                                                                                index_fields.split(":")[0], search_by)
621            else:
622                statement = 'SELECT count(*) FROM `{0}` where {1}={2}'.format(self.cbas_dataset_name,
623                                                                              index_fields.split(":")[0], search_by)
624            status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
625                statement)
626            self.assertEquals(status, "success")
627            self.assertEquals(errors, None)
628            self.assertEquals(results, [{'$1': exp_number}])
629            if isinstance(not_fit_value,str):
630                statement = 'SELECT count(*) FROM `{0}` where {1}="{2}"'.format(self.cbas_dataset_name,
631                                                                            index_fields.split(":")[0], not_fit_value)
632            else:
633                statement = 'SELECT count(*) FROM `{0}` where {1}={2}'.format(self.cbas_dataset_name,
634                                                                            index_fields.split(":")[0], not_fit_value)
635            status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
636                statement)
637            self.assertEquals(status, expected_status)
638            if status == 'success':
639                self.assertEquals(errors, None)
640                self.assertEquals(results, [{'$1': 0}])
641
642            self.log.info("Verify whether statement %s used index or not. Indexed: %s"%(statement,index_fields))
643            self.verify_index_used(statement, index_used, self.index_name)
644
645    def test_index_population_where_statements(self):
646        exp_number = self.input.param("exp_number", 0)
647        where_statement = self.input.param("where_statement", '').replace('_EQ_', '=')
648        index_used = self.input.param("index_used", False)
649
650        testuser = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'password': 'password'}]
651        rolelist = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'roles': 'admin'}]
652        self.add_built_in_server_user(testuser=testuser, rolelist=rolelist)
653        self.use_sdk_client = True
654        self.client = self._direct_client(self.master, self.cb_bucket_name).cb
655
656        index_fields = ""
657        for index_field in self.index_fields:
658            index_fields += index_field + ","
659        index_fields = index_fields[:-1]
660
661        create_idx_statement = "create index {0} on {1}({2});".format(
662            self.index_name, self.cbas_dataset_name, index_fields)
663        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
664            create_idx_statement)
665
666        self.assertTrue(status == "success", "Create Index query failed")
667
668        self.assertTrue(
669            self.verify_index_created(self.index_name, self.index_fields,
670                                      self.cbas_dataset_name)[0])
671        self.connect_to_bucket(cbas_bucket_name=
672                               self.cbas_bucket_name,
673                               cb_bucket_password=self.cb_bucket_password)
674        self.sleep(20)
675
676        statement = 'SELECT count(*) FROM `{0}` where {1};'.format(self.cbas_dataset_name, where_statement)
677        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
678            statement)
679        self.assertEquals(status, "success")
680        self.assertEquals(errors, None)
681        self.assertEquals(results, [{'$1': exp_number}])
682
683        self.log.info("Verify whether statement %s used index or not. Indexed: %s"%(statement,index_fields))
684        self.verify_index_used(statement, index_used, self.index_name)
685
686    def test_index_population_joins(self):
687        exp_number = self.input.param("exp_number", 0)
688        self.index_name2 = self.input.param('index_name2', None)
689        self.index_fields2 = self.input.param('index_fields2', None)
690        if self.index_fields2:
691            self.index_fields2 = self.index_fields2.split("-")
692        statement = self.input.param("statement", '').replace('_EQ_', '=').replace('_COMMA_', ',')
693
694        testuser = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'password': 'password'}]
695        rolelist = [{'id': self.cb_bucket_name, 'name': self.cb_bucket_name, 'roles': 'admin'}]
696        self.add_built_in_server_user(testuser=testuser, rolelist=rolelist)
697        self.use_sdk_client = True
698        self.client = self._direct_client(self.master, self.cb_bucket_name).cb
699
700        index_fields = ""
701        for index_field in self.index_fields:
702            index_fields += index_field + ","
703        index_fields = index_fields[:-1]
704
705        create_idx_statement = "create index {0} on {1}({2});".format(
706            self.index_name, self.cbas_dataset_name, index_fields)
707        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
708            create_idx_statement)
709
710        self.assertTrue(status == "success", "Create Index query failed")
711
712        self.assertTrue(
713            self.verify_index_created(self.index_name, self.index_fields,
714                                      self.cbas_dataset_name)[0])
715
716        index_fields2 = ""
717        for index_field in self.index_fields2:
718            index_fields2 += index_field + ","
719        index_fields2 = index_fields2[:-1]
720
721        create_idx_statement = "create index {0} on {1}({2});".format(
722            self.index_name2, self.cbas_dataset_name, index_fields2)
723        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
724            create_idx_statement)
725
726        self.assertTrue(status == "success", "Create Index query failed")
727
728        self.assertTrue(
729            self.verify_index_created(self.index_name2, self.index_fields2,
730                                      self.cbas_dataset_name)[0])
731
732        self.connect_to_bucket(cbas_bucket_name=
733                               self.cbas_bucket_name,
734                               cb_bucket_password=self.cb_bucket_password)
735        self.sleep(20)
736
737        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
738            statement)
739        self.assertEquals(status, "success")
740        self.assertEquals(errors, None)
741        self.assertEquals(len(results), exp_number)
742
743    # https://issues.couchbase.com/browse/MB-25695
744
745    def test_index_metadata(self):
746        self.buckets = [Bucket(name="beer-sample")]
747        self.perform_doc_ops_in_all_cb_buckets(100000, "create", start_key=0, end_key=100000)
748        index_fields = ""
749        for index_field in self.index_fields:
750            index_fields += index_field + ","
751        index_fields = index_fields[:-1]
752        create_idx_statement = "create index {0} on {1}({2});".format(
753            self.index_name, self.cbas_dataset_name, index_fields)
754        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
755            create_idx_statement)
756
757        self.assertTrue(status == "success", "Create Index query failed")
758
759        self.connect_to_bucket(cbas_bucket_name=
760                               self.cbas_bucket_name,
761                               cb_bucket_password=self.cb_bucket_password)
762        self.wait_for_ingestion_complete([self.cbas_dataset_name], 107303)
763        statement = 'SELECT count(*) FROM `{0}`'.format(self.cbas_dataset_name)
764#
765        _, result = self.verify_index_created(self.index_name, self.index_fields,
766                                              self.cbas_dataset_name)
767
768        self.assertEquals(result[0]['Index']['DatasetName'], self.cbas_dataset_name)
769        self.assertEquals(result[0]['Index']['DataverseName'], 'Default')
770        self.assertEquals(result[0]['Index']['IndexName'], self.index_name)
771        self.assertEquals(result[0]['Index']['IndexStructure'], 'BTREE')
772        self.assertEquals(result[0]['Index']['IsPrimary'], False)
773        self.assertEquals(result[0]['Index']['PendingOp'], 0)
774        self.assertEquals(result[0]['Index']['SearchKey'], [index_field.split(":")[:-1]])
775        self.assertEquals(result[0]['Index']['SearchKeyType'], index_field.split(":")[1:])
776
777        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
778            statement)
779        self.assertEquals(status, "success")
780        self.assertEquals(errors, None)
781        self.assertEquals(results, [{'$1': 107303}])
782
783        self.disconnect_from_bucket(cbas_bucket_name=
784                                    self.cbas_bucket_name)
785
786        drop_idx_statement = "drop index {0}.{1};".format(self.cbas_dataset_name, self.index_name)
787        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
788            drop_idx_statement)
789
790        _, result = self.verify_index_created(self.index_name, self.index_fields,
791                                              self.cbas_dataset_name)
792
793        self.assertEquals(result, [])
794
795        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
796            statement)
797        self.assertEquals(status, "success")
798        self.assertEquals(errors, None)
799        self.assertEquals(results, [{'$1': 107303}])
800        self.drop_dataset(self.cbas_dataset_name)
801
802        status, metrics, errors, results, _ = self.execute_statement_on_cbas_via_rest(
803            statement)
804        self.assertEquals(errors, [
805            {u'msg': u'Cannot find dataset beer_ds in dataverse Default nor an alias with name beer_ds!', u'code': 1}])