1from logredaction.log_redaction_base import LogRedactionBase
2from couchbase_helper.documentgenerator import BlobGenerator
3from couchbase_helper.query_definitions import SQLDefinitionGenerator
4from couchbase_helper.tuq_generators import JsonGenerator
5from couchbase_helper.tuq_helper import N1QLHelper
6from lib.membase.api.rest_client import RestConnection, RestHelper
7from remote.remote_util import RemoteMachineShellConnection, RemoteUtilHelper
8import json
9import logging
10
11log = logging.getLogger()
12
13class LogRedactionTests(LogRedactionBase):
14    def setUp(self):
15        super(LogRedactionTests, self).setUp()
16        self.doc_per_day = self.input.param("doc-per-day", 100)
17        self.n1ql_port = self.input.param("n1ql_port", 8093)
18        self.interrupt_replication = self.input.param("interrupt-replication", False)
19
20    def tearDown(self):
21        super(LogRedactionTests, self).tearDown()
22
23    def test_enabling_redaction(self):
24        self.set_redaction_level()
25
26    def test_cbcollect_with_redaction_enabled(self):
27        self.set_redaction_level()
28        self.start_logs_collection()
29        result = self.monitor_logs_collection()
30        try:
31            logs_path = result["perNode"]["ns_1@" + str(self.master.ip)]["path"]
32        except KeyError:
33            logs_path = result["perNode"]["ns_1@127.0.0.1"]["path"]
34        redactFileName = logs_path.split('/')[-1]
35        nonredactFileName = logs_path.split('/')[-1].replace('-redacted', '')
36        remotepath = logs_path[0:logs_path.rfind('/')+1]
37        self.verify_log_files_exist(remotepath=remotepath,
38                                    redactFileName=redactFileName,
39                                    nonredactFileName=nonredactFileName)
40
41    def test_cbcollect_with_redaction_disabled(self):
42        self.set_redaction_level()
43        self.start_logs_collection()
44        result = self.monitor_logs_collection()
45        try:
46            logs_path = result["perNode"]["ns_1@" + str(self.master.ip)]["path"]
47        except KeyError:
48            logs_path = result["perNode"]["ns_1@127.0.0.1"]["path"]
49        nonredactFileName = logs_path.split('/')[-1]
50        remotepath = logs_path[0:logs_path.rfind('/')+1]
51        self.verify_log_files_exist(remotepath=remotepath,
52                                    nonredactFileName=nonredactFileName)
53
54    def test_ns_server_with_redaction_enabled(self):
55        #load bucket and do some ops
56        gen_create = BlobGenerator('logredac', 'logredac-', self.value_size, end=self.num_items)
57        self._load_all_buckets(self.master, gen_create, "create", 0)
58
59        gen_delete = BlobGenerator('logredac', 'logredac-', self.value_size, start=self.num_items / 2, end=self.num_items)
60        gen_update = BlobGenerator('logredac', 'logredac-', self.value_size, start=self.num_items + 1,
61                                   end=self.num_items * 3 / 2)
62
63        self._load_all_buckets(self.master, gen_delete, "create", 0)
64        self._load_all_buckets(self.master, gen_update, "create", 0)
65
66        #set log redaction level, collect logs, verify log files exist and verify them for redaction
67        self.set_redaction_level()
68        self.start_logs_collection()
69        result = self.monitor_logs_collection()
70        try:
71            logs_path = result["perNode"]["ns_1@" + str(self.master.ip)]["path"]
72        except KeyError:
73            logs_path = result["perNode"]["ns_1@127.0.0.1"]["path"]
74        redactFileName = logs_path.split('/')[-1]
75        nonredactFileName = logs_path.split('/')[-1].replace('-redacted', '')
76        remotepath = logs_path[0:logs_path.rfind('/') + 1]
77        self.verify_log_files_exist(remotepath=remotepath,
78                                    redactFileName=redactFileName,
79                                    nonredactFileName=nonredactFileName)
80        self.verify_log_redaction(remotepath=remotepath,
81                                  redactFileName=redactFileName,
82                                  nonredactFileName=nonredactFileName,
83                                  logFileName="ns_server.debug.log")
84
85    def test_ns_server_with_rebalance_failover_with_redaction_enabled(self):
86        kv_node = self.get_nodes_from_services_map(service_type="kv", get_all_nodes=False)
87        rest = RestConnection(self.master)
88        # load bucket and do some ops
89        gen_create = BlobGenerator('logredac', 'logredac-', self.value_size, end=self.num_items)
90        self._load_all_buckets(self.master, gen_create, "create", 0)
91        gen_delete = BlobGenerator('logredac', 'logredac-', self.value_size, start=self.num_items / 2,
92                                   end=self.num_items)
93        gen_update = BlobGenerator('logredac', 'logredac-', self.value_size, start=self.num_items + 1,
94                                   end=self.num_items * 3 / 2)
95        self._load_all_buckets(self.master, gen_delete, "create", 0)
96        self._load_all_buckets(self.master, gen_update, "create", 0)
97        # set log redaction level, collect logs, verify log files exist and verify them for redaction
98        self.set_redaction_level()
99        self.start_logs_collection()
100        services_in = ["kv"]
101        to_add_nodes = [self.servers[self.nodes_init]]
102        rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], to_add_nodes, [], services=services_in)
103        reached = RestHelper(rest).rebalance_reached()
104        self.assertTrue(reached, "rebalance failed, stuck or did not complete")
105        rebalance.result()
106        # failover a node
107        server_failed_over = self.servers[self.nodes_init]
108        fail_over_task = self.cluster.async_failover([self.master], failover_nodes=[server_failed_over], graceful=True)
109        fail_over_task.result()
110        rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], [], [server_failed_over])
111        reached = RestHelper(rest).rebalance_reached()
112        self.assertTrue(reached, "rebalance failed, stuck or did not complete")
113        rebalance.result()
114        result = self.monitor_logs_collection()
115        log.info(result)
116        try:
117            logs_path = result["perNode"]["ns_1@" + str(self.master.ip)]["path"]
118        except KeyError:
119            logs_path = result["perNode"]["ns_1@127.0.0.1"]["path"]
120        redactFileName = logs_path.split('/')[-1]
121        nonredactFileName = logs_path.split('/')[-1].replace('-redacted', '')
122        remotepath = logs_path[0:logs_path.rfind('/') + 1]
123        self.verify_log_files_exist(remotepath=remotepath,
124                                    redactFileName=redactFileName,
125                                    nonredactFileName=nonredactFileName)
126        self.verify_log_redaction(remotepath=remotepath,
127                                  redactFileName=redactFileName,
128                                  nonredactFileName=nonredactFileName,
129                                  logFileName="ns_server.debug.log")
130
131##############################################################################################
132#
133#   GSI
134##############################################################################################
135
136    def set_indexer_logLevel(self, loglevel="info"):
137        """
138        :param loglevel:
139        Possible Values
140            -- info
141            -- debug
142            -- warn
143            -- verbose
144            -- Silent
145            -- Fatal
146            -- Error
147            -- Timing
148            -- Trace
149        """
150        self.log.info("Setting indexer log level to {0}".format(loglevel))
151        server = self.get_nodes_from_services_map(service_type="index")
152        rest = RestConnection(server)
153        status = rest.set_indexer_params("logLevel", loglevel)
154
155    def set_projector_logLevel(self, loglevel="info"):
156        """
157        :param loglevel:
158        Possible Values
159            -- info
160            -- debug
161            -- warn
162            -- verbose
163            -- Silent
164            -- Fatal
165            -- Error
166            -- Timing
167            -- Trace
168        """
169        self.log.info("Setting indexer log level to {0}".format(loglevel))
170        server = self.get_nodes_from_services_map(service_type="index")
171        rest = RestConnection(server)
172        proj_settings = {"projector.settings.log_level": loglevel}
173        status = rest.set_index_settings(proj_settings)
174
175    def test_gsi_with_crud_with_redaction_enabled(self):
176        # load bucket and do some ops
177        self.set_indexer_logLevel("trace")
178        self.set_projector_logLevel("trace")
179        json_generator = JsonGenerator()
180        gen_docs = json_generator.generate_all_type_documents_for_gsi(docs_per_day=self.doc_per_day, start=0)
181        full_docs_list = self.generate_full_docs_list(gen_docs)
182        n1ql_helper = N1QLHelper(use_rest=True, buckets=self.buckets, full_docs_list=full_docs_list,
183                                      log=log, input=self.input, master=self.master)
184        self.load(gen_docs)
185        n1ql_node = self.get_nodes_from_services_map(service_type="n1ql")
186        query_definition_generator = SQLDefinitionGenerator()
187        n1ql_helper.create_primary_index(using_gsi=True, server=n1ql_node)
188        query_definitions = query_definition_generator.generate_airlines_data_query_definitions()
189        query_definitions = query_definition_generator.filter_by_group(["simple"], query_definitions)
190        # set log redaction level, collect logs, verify log files exist and verify them for redaction
191        self.set_redaction_level()
192        self.start_logs_collection()
193        # Create partial Index
194        for query_definition in query_definitions:
195            for bucket in self.buckets:
196                create_query = query_definition.generate_index_create_query(bucket.name)
197                n1ql_helper.run_cbq_query(query=create_query, server=n1ql_node)
198
199        for query_definition in query_definitions:
200            for bucket in self.buckets:
201                scan_query = query_definition.generate_query(bucket=bucket.name)
202                n1ql_helper.run_cbq_query(query=scan_query, server=n1ql_node)
203
204        for query_definition in query_definitions:
205            for bucket in self.buckets:
206                drop_query = query_definition.generate_index_drop_query(bucket=bucket.name)
207                n1ql_helper.run_cbq_query(query=drop_query, server=n1ql_node)
208        result = self.monitor_logs_collection()
209        log.info(result)
210        try:
211            logs_path = result["perNode"]["ns_1@" + str(self.master.ip)]["path"]
212        except KeyError:
213            logs_path = result["perNode"]["ns_1@127.0.0.1"]["path"]
214        redactFileName = logs_path.split('/')[-1]
215        nonredactFileName = logs_path.split('/')[-1].replace('-redacted', '')
216        remotepath = logs_path[0:logs_path.rfind('/') + 1]
217        log_file = self.input.param("log_file_name", "indexer.log")
218        self.verify_log_files_exist(remotepath=remotepath,
219                                    redactFileName=redactFileName,
220                                    nonredactFileName=nonredactFileName)
221        self.verify_log_redaction(remotepath=remotepath,
222                                  redactFileName=redactFileName,
223                                  nonredactFileName=nonredactFileName,
224                                  logFileName="ns_server.{0}".format(log_file))
225
226    def test_gsi_with_flush_bucket_redaction_enabled(self):
227        # load bucket and do some ops
228        self.set_indexer_logLevel("trace")
229        self.set_projector_logLevel("trace")
230        json_generator = JsonGenerator()
231        gen_docs = json_generator.generate_all_type_documents_for_gsi(docs_per_day=self.doc_per_day, start=0)
232        full_docs_list = self.generate_full_docs_list(gen_docs)
233        n1ql_helper = N1QLHelper(use_rest=True, buckets=self.buckets, full_docs_list=full_docs_list,
234                                      log=log, input=self.input, master=self.master)
235        self.load(gen_docs)
236        n1ql_node = self.get_nodes_from_services_map(service_type="n1ql")
237        n1ql_helper.create_primary_index(using_gsi=True, server=n1ql_node)
238        query_definition_generator = SQLDefinitionGenerator()
239        query_definitions = query_definition_generator.generate_airlines_data_query_definitions()
240        query_definitions = query_definition_generator.filter_by_group(["simple"], query_definitions)
241        # set log redaction level, collect logs, verify log files exist and verify them for redaction
242        self.set_redaction_level()
243        self.start_logs_collection()
244        # Create partial Index
245        for query_definition in query_definitions:
246            for bucket in self.buckets:
247                create_query = query_definition.generate_index_create_query(bucket.name)
248                n1ql_helper.run_cbq_query(query=create_query, server=n1ql_node)
249
250        for query_definition in query_definitions:
251            for bucket in self.buckets:
252                scan_query = query_definition.generate_query(bucket=bucket.name)
253                n1ql_helper.run_cbq_query(query=scan_query, server=n1ql_node)
254
255        rest = RestConnection(self.master)
256        rest.flush_bucket(self.buckets[0].name)
257
258        self.sleep(100)
259        self.load(gen_docs, buckets=[self.buckets[0]])
260
261        for query_definition in query_definitions:
262            for bucket in self.buckets:
263                scan_query = query_definition.generate_query(bucket=bucket.name)
264                n1ql_helper.run_cbq_query(query=scan_query, server=n1ql_node)
265
266        for query_definition in query_definitions:
267            for bucket in self.buckets:
268                drop_query = query_definition.generate_index_drop_query(bucket=bucket.name)
269                n1ql_helper.run_cbq_query(query=drop_query, server=n1ql_node)
270        result = self.monitor_logs_collection()
271        log.info(result)
272        try:
273            logs_path = result["perNode"]["ns_1@" + str(self.master.ip)]["path"]
274        except KeyError:
275            logs_path = result["perNode"]["ns_1@127.0.0.1"]["path"]
276        redactFileName = logs_path.split('/')[-1]
277        nonredactFileName = logs_path.split('/')[-1].replace('-redacted', '')
278        remotepath = logs_path[0:logs_path.rfind('/') + 1]
279        log_file = self.input.param("log_file_name", "indexer.log")
280        self.verify_log_files_exist(remotepath=remotepath,
281                                    redactFileName=redactFileName,
282                                    nonredactFileName=nonredactFileName)
283        self.verify_log_redaction(remotepath=remotepath,
284                                  redactFileName=redactFileName,
285                                  nonredactFileName=nonredactFileName,
286                                  logFileName="ns_server.{0}".format(log_file))
287
288    def test_gsi_with_index_restart_redaction_enabled(self):
289        # load bucket and do some ops
290        self.set_indexer_logLevel("trace")
291        self.set_projector_logLevel("trace")
292        json_generator = JsonGenerator()
293        gen_docs = json_generator.generate_all_type_documents_for_gsi(docs_per_day=self.doc_per_day, start=0)
294        full_docs_list = self.generate_full_docs_list(gen_docs)
295        n1ql_helper = N1QLHelper(use_rest=True, buckets=self.buckets, full_docs_list=full_docs_list,
296                                      log=log, input=self.input, master=self.master)
297        self.load(gen_docs)
298        n1ql_node = self.get_nodes_from_services_map(service_type="n1ql")
299        n1ql_helper.create_primary_index(using_gsi=True, server=n1ql_node)
300        query_definition_generator = SQLDefinitionGenerator()
301        query_definitions = query_definition_generator.generate_airlines_data_query_definitions()
302        query_definitions = query_definition_generator.filter_by_group(["simple"], query_definitions)
303        # set log redaction level, collect logs, verify log files exist and verify them for redaction
304        self.set_redaction_level()
305        self.start_logs_collection()
306        # Create partial Index
307        for query_definition in query_definitions:
308            for bucket in self.buckets:
309                create_query = query_definition.generate_index_create_query(bucket.name)
310                n1ql_helper.run_cbq_query(query=create_query, server=n1ql_node)
311
312        for query_definition in query_definitions:
313            for bucket in self.buckets:
314                scan_query = query_definition.generate_query(bucket=bucket.name)
315                n1ql_helper.run_cbq_query(query=scan_query, server=n1ql_node)
316
317        index_node = self.get_nodes_from_services_map(service_type="index")
318        remote = RemoteMachineShellConnection(index_node)
319        remote.stop_server()
320        self.sleep(30)
321        remote.start_server()
322        self.sleep(30)
323        for query_definition in query_definitions:
324            for bucket in self.buckets:
325                scan_query = query_definition.generate_query(bucket=bucket.name)
326                n1ql_helper.run_cbq_query(query=scan_query, server=n1ql_node)
327
328        for query_definition in query_definitions:
329            for bucket in self.buckets:
330                drop_query = query_definition.generate_index_drop_query(bucket=bucket.name)
331                n1ql_helper.run_cbq_query(query=drop_query, server=n1ql_node)
332        result = self.monitor_logs_collection()
333        log.info(result)
334        try:
335            logs_path = result["perNode"]["ns_1@" + str(self.master.ip)]["path"]
336        except KeyError:
337            logs_path = result["perNode"]["ns_1@127.0.0.1"]["path"]
338        redactFileName = logs_path.split('/')[-1]
339        nonredactFileName = logs_path.split('/')[-1].replace('-redacted', '')
340        remotepath = logs_path[0:logs_path.rfind('/') + 1]
341        log_file = self.input.param("log_file_name", "indexer.log")
342        self.verify_log_files_exist(remotepath=remotepath,
343                                    redactFileName=redactFileName,
344                                    nonredactFileName=nonredactFileName)
345        self.verify_log_redaction(remotepath=remotepath,
346                                  redactFileName=redactFileName,
347                                  nonredactFileName=nonredactFileName,
348                                  logFileName="ns_server.{0}".format(log_file))
349
350    def test_gsi_with_index_rebalance_redaction_enabled(self):
351        # load bucket and do some ops
352        self.set_indexer_logLevel("trace")
353        self.set_projector_logLevel("trace")
354        json_generator = JsonGenerator()
355        gen_docs = json_generator.generate_all_type_documents_for_gsi(docs_per_day=self.doc_per_day, start=0)
356        full_docs_list = self.generate_full_docs_list(gen_docs)
357        n1ql_helper = N1QLHelper(use_rest=True, buckets=self.buckets, full_docs_list=full_docs_list,
358                                 log=log, input=self.input, master=self.master)
359        self.load(gen_docs)
360        self.find_nodes_in_list()
361        n1ql_node = self.get_nodes_from_services_map(service_type="n1ql")
362        n1ql_helper.create_primary_index(using_gsi=True, server=n1ql_node)
363        query_definition_generator = SQLDefinitionGenerator()
364        query_definitions = query_definition_generator.generate_airlines_data_query_definitions()
365        query_definitions = query_definition_generator.filter_by_group(["simple"], query_definitions)
366        # set log redaction level, collect logs, verify log files exist and verify them for redaction
367        self.set_redaction_level()
368        self.start_logs_collection()
369        # Create partial Index
370        for query_definition in query_definitions:
371            for bucket in self.buckets:
372                create_query = query_definition.generate_index_create_query(bucket.name)
373                n1ql_helper.run_cbq_query(query=create_query, server=n1ql_node)
374
375        for query_definition in query_definitions:
376            for bucket in self.buckets:
377                scan_query = query_definition.generate_query(bucket=bucket.name)
378                n1ql_helper.run_cbq_query(query=scan_query, server=n1ql_node)
379        rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], self.nodes_in_list,
380                                                 [], services=self.services_in)
381
382        rebalance.result()
383        self.sleep(30)
384
385        for query_definition in query_definitions:
386            for bucket in self.buckets:
387                scan_query = query_definition.generate_query(bucket=bucket.name)
388                n1ql_helper.run_cbq_query(query=scan_query, server=n1ql_node)
389
390        for query_definition in query_definitions:
391            for bucket in self.buckets:
392                drop_query = query_definition.generate_index_drop_query(bucket=bucket.name)
393                n1ql_helper.run_cbq_query(query=drop_query, server=n1ql_node)
394        result = self.monitor_logs_collection()
395        log.info(result)
396        try:
397            logs_path = result["perNode"]["ns_1@" + str(self.master.ip)]["path"]
398        except KeyError:
399            logs_path = result["perNode"]["ns_1@127.0.0.1"]["path"]
400        redactFileName = logs_path.split('/')[-1]
401        nonredactFileName = logs_path.split('/')[-1].replace('-redacted', '')
402        remotepath = logs_path[0:logs_path.rfind('/') + 1]
403        log_file = self.input.param("log_file_name", "indexer.log")
404        self.verify_log_files_exist(remotepath=remotepath,
405                                    redactFileName=redactFileName,
406                                    nonredactFileName=nonredactFileName)
407        self.verify_log_redaction(remotepath=remotepath,
408                                  redactFileName=redactFileName,
409                                  nonredactFileName=nonredactFileName,
410                                  logFileName="ns_server.{0}".format(log_file))
411
412    def test_cbcollect_with_redaction_enabled_with_views(self):
413        self.set_redaction_level()
414        self._create_views()
415        """ start collect logs """
416        self.start_logs_collection()
417        result = self.monitor_logs_collection()
418        try:
419            logs_path = result["perNode"]["ns_1@" + str(self.master.ip)]["path"]
420        except KeyError:
421            logs_path = result["perNode"]["ns_1@127.0.0.1"]["path"]
422        redactFileName = logs_path.split('/')[-1]
423        nonredactFileName = logs_path.split('/')[-1].replace('-redacted', '')
424        remotepath = logs_path[0:logs_path.rfind('/')+1]
425        self.verify_log_files_exist(remotepath=remotepath,
426                                    redactFileName=redactFileName,
427                                    nonredactFileName=nonredactFileName)
428
429    def test_cbcollect_with_redaction_enabled_with_xdcr(self):
430        rest_src = RestConnection(self.master)
431        rest_src.remove_all_replications()
432        rest_src.remove_all_remote_clusters()
433
434        rest_dest = RestConnection(self.servers[1])
435        rest_dest_helper = RestHelper(rest_dest)
436
437        try:
438            rest_src.remove_all_replications()
439            rest_src.remove_all_remote_clusters()
440            self.set_redaction_level()
441            rest_src.add_remote_cluster(self.servers[1].ip, self.servers[1].port,
442                                        self.servers[1].rest_username,
443                                        self.servers[1].rest_password, "C2")
444
445            """ at dest cluster """
446            self.add_built_in_server_user(node=self.servers[1])
447            rest_dest.create_bucket(bucket='default', ramQuotaMB=512)
448            bucket_ready = rest_dest_helper.vbucket_map_ready('default')
449            if not bucket_ready:
450                self.fail("Bucket default at dest not created after 120 seconds.")
451            repl_id = rest_src.start_replication('continuous', 'default', "C2")
452            if repl_id is not None:
453                self.log.info("Replication created successfully")
454            gen = BlobGenerator("ent-backup", "ent-backup-", self.value_size, end=self.num_items)
455            tasks = self._async_load_all_buckets(self.master, gen, "create", 0)
456            for task in tasks:
457                task.result()
458            self.sleep(10)
459
460            """ enable firewall """
461            if self.interrupt_replication:
462                RemoteUtilHelper.enable_firewall(self.master, xdcr=True)
463
464            """ start collect logs """
465            self.start_logs_collection()
466            result = self.monitor_logs_collection()
467            """ verify logs """
468            try:
469                logs_path = result["perNode"]["ns_1@" + str(self.master.ip)]["path"]
470            except KeyError:
471                logs_path = result["perNode"]["ns_1@127.0.0.1"]["path"]
472            redactFileName = logs_path.split('/')[-1]
473            nonredactFileName = logs_path.split('/')[-1].replace('-redacted', '')
474            remotepath = logs_path[0:logs_path.rfind('/')+1]
475            self.verify_log_files_exist(remotepath=remotepath,
476                                    redactFileName=redactFileName,
477                                    nonredactFileName=nonredactFileName)
478            self.log.info("Verify on log ns_server.goxdcr.log")
479            self.verify_log_redaction(remotepath=remotepath,
480                                  redactFileName=redactFileName,
481                                  nonredactFileName=nonredactFileName,
482                                  logFileName="ns_server.goxdcr.log")
483        finally:
484            """ clean up xdcr """
485            rest_dest.delete_bucket()
486            rest_src.remove_all_replications()
487            rest_src.remove_all_remote_clusters()
488            if self.interrupt_replication:
489                shell = RemoteMachineShellConnection(self.master)
490                shell.disable_firewall()
491                shell.disconnect()
492
493##############################################################################################
494#
495#   N1QL
496##############################################################################################
497
498    def test_n1ql_through_rest_with_redaction_enabled(self):
499        gen_create = BlobGenerator('logredac', 'logredac-', self.value_size, end=self.num_items)
500        self._load_all_buckets(self.master, gen_create, "create", 0)
501        shell = RemoteMachineShellConnection(self.master)
502        type = shell.extract_remote_info().distribution_type
503        curl_path = "curl"
504        if type.lower() == 'windows':
505            self.curl_path = "%scurl" % self.path
506
507        shell.execute_command("%s -u Administrator:password http://%s:%s/query/service -d 'statement=create primary index on default'"
508                              % (curl_path, self.master.ip, self.n1ql_port))
509
510        shell.execute_command("%s -u Administrator:password http://%s:%s/query/service -d 'statement=create index idx on default(fake)'"
511                              % (curl_path, self.master.ip, self.n1ql_port))
512
513        shell.execute_command("%s -u Administr:pasword http://%s:%s/query/service -d 'statement=select * from default'"
514                              % (curl_path, self.master.ip, self.n1ql_port))
515
516        shell.execute_command("%s http://Administrator:password@%s:%s/query/service -d 'statement=select * from default'"
517                              % (curl_path, self.master.ip, self.n1ql_port))
518
519        shell.execute_command("%s -u Administrator:password http://%s:%s/query/service -d 'statement=select * from default'"
520                              % (curl_path, self.master.ip, self.n1ql_port))
521
522        # Get the CAS mismatch error by double inserting a document, second one will throw desired error
523        shell.execute_command("%s -u Administrator:password http://%s:%s/query/service -d 'statement=insert into default (KEY,VALUE) VALUES(\"test\",{\"field1\":\"test\"})'"
524                              % (curl_path, self.master.ip, self.n1ql_port))
525
526        shell.execute_command("%s -u Administrator:password http://%s:%s/query/service -d 'statement=insert into default (KEY,VALUE) VALUES(\"test\",{\"field1\":\"test\"})'"
527                              % (curl_path, self.master.ip, self.n1ql_port))
528
529        # Delete a document that does not exist
530        shell.execute_command("%s -u Administrator:password http://%s:%s/query/service -d 'statement=DELETE FROM default USE KEYS \"fakekey\"})'"
531                              % (curl_path, self.master.ip, self.n1ql_port))
532
533
534        #set log redaction level, collect logs, verify log files exist and verify them for redaction
535        self.set_redaction_level()
536        self.start_logs_collection()
537        result = self.monitor_logs_collection()
538        try:
539            logs_path = result["perNode"]["ns_1@" + str(self.master.ip)]["path"]
540        except KeyError:
541            logs_path = result["perNode"]["ns_1@127.0.0.1"]["path"]
542        redactFileName = logs_path.split('/')[-1]
543        nonredactFileName = logs_path.split('/')[-1].replace('-redacted', '')
544        remotepath = logs_path[0:logs_path.rfind('/')+1]
545        self.verify_log_files_exist(remotepath=remotepath,
546                                    redactFileName=redactFileName,
547                                    nonredactFileName=nonredactFileName)
548        self.verify_log_redaction(remotepath=remotepath,
549                                  redactFileName=redactFileName,
550                                  nonredactFileName=nonredactFileName,
551                                  logFileName="ns_server.query.log")
552        shell.disconnect()
553
554    '''Convert output of remote_util.execute_command to json
555       (stripping all white space to match execute_command_inside output)'''
556    def convert_list_to_json(self,output_of_curl):
557        new_list = [string.replace(" ", "") for string in output_of_curl]
558        concat_string = ''.join(new_list)
559        json_output=json.loads(concat_string)
560        return json_output
561
562    def test_fts_log_redaction(self):
563        gen_create = BlobGenerator('logredac', 'logredac-', self.value_size, end=self.num_items)
564        self._load_all_buckets(self.master, gen_create, "create", 0)
565        index_definition = {
566            "type": "fulltext-index",
567            "name": "index1",
568            "sourceType": "couchbase",
569            "sourceName": "default"
570        }
571        rest = RestConnection(self.master)
572        status = rest.create_fts_index("index1", index_definition)
573        if status:
574            log.info("Index 'index1' created")
575        else:
576            log.info("Error creating index, status = {0}".format(status))
577        self.sleep(60, "waiting for docs to get indexed")
578        query_json = {"query": {"field": "type", "match": "emp"}}
579        hits, _, _, _ = rest.run_fts_query(index_name="index1",
580                           query_json=query_json)
581        log.info("Hits from query {0}: {1}".format(query_json, hits))
582        self.set_redaction_level()
583        self.start_logs_collection()
584        result = self.monitor_logs_collection()
585        try:
586            logs_path = result["perNode"]["ns_1@" + str(self.master.ip)]["path"]
587        except KeyError:
588            logs_path = result["perNode"]["ns_1@127.0.0.1"]["path"]
589        redactFileName = logs_path.split('/')[-1]
590        nonredactFileName = logs_path.split('/')[-1].replace('-redacted', '')
591        remotepath = logs_path[0:logs_path.rfind('/') + 1]
592        self.verify_log_files_exist(remotepath=remotepath,
593                                    redactFileName=redactFileName,
594                                    nonredactFileName=nonredactFileName)
595        self.verify_log_redaction(remotepath=remotepath,
596                                  redactFileName=redactFileName,
597                                  nonredactFileName=nonredactFileName,
598                                  logFileName="ns_server.fts.log")
599