1import logger 2import unittest 3 4from membase.helper.rebalance_helper import RebalanceHelper 5from couchbase_helper.cluster import Cluster 6from basetestcase import BaseTestCase 7from remote.remote_util import RemoteMachineShellConnection 8 9from membase.helper.subdoc_helper import SubdocHelper 10from random import randint 11 12class SubdocSanityTests(unittest.TestCase): 13 def setUp(self): 14 self.log = logger.Logger.get_logger() 15 self.helper = SubdocHelper(self, "default") 16 self.helper.setup_cluster() 17 self.cluster = Cluster() 18 self.servers = self.helper.servers 19 20 def tearDown(self): 21 self.helper.cleanup_cluster() 22 23 def test_simple_dataset_sanity(self): 24 self.test_simple_dataset_get() 25 self.test_deep_nested_dataset_get_dict() 26 self.test_deep_nested_dataset_get_array() 27 self.test_simple_dataset_dict_upsert() 28 self.test_simple_dataset_dict_add() 29 self.test_simple_dataset_remove() 30 self.test_simple_dataset_exists() 31 self.test_simple_dataset_replace() 32 self.test_simple_dataset_array_push_last() 33 self.test_simple_dataset_array_push_first() 34 self.test_simple_dataset_counter() 35 self.test_simple_dataset_array_add_unique() 36 self.test_simple_dataset_counter() 37 38 def test_simple_dataset_get(self): 39 num_docs = self.helper.input.param("num-docs") 40 self.log.info("description : Issue simple get sub doc single path " 41 "dataset with {0} docs".format(num_docs)) 42 43 data_set = SimpleDataSet(self.helper, num_docs) 44 inserted_keys = data_set.load() 45 46 data_set.get_all_docs(inserted_keys, path = 'isDict') 47 data_set.get_all_docs(inserted_keys, path='geometry.coordinates[0]') 48 data_set.get_all_docs(inserted_keys, path='dict_value.name') 49 data_set.get_all_docs(inserted_keys, path='array[0]') 50 data_set.get_all_docs(inserted_keys, path='array[-1]') 51 52 ''' This should go into ErrorTesting ''' 53 #self.assertFalse(data_set.get_all_docs(inserted_keys, path='array[-5]')) 54 #self.assertFalse(data_set.get_all_docs(inserted_keys, path=' ')) 55 56 def test_deep_nested_dataset_get_dict(self): 57 num_docs = self.helper.input.param("num-docs") 58 self.log.info("description : Issue get sub doc on deep nested single path on dictionaries " 59 "dataset with {0} docs".format(num_docs)) 60 61 data_set = DeeplyNestedDataSet(self.helper, num_docs) 62 inserted_keys, levels = data_set.load() 63 64 '''Top level element''' 65 #data_set.get_all_docs(inserted_keys, path = 'number', check_data=levels) 66 data_set.get_all_docs(inserted_keys, path = 'array') 67 data_set.get_all_docs(inserted_keys, path = 'array[0]') 68 69 '''Last element Dictionary''' 70 self.log.info('Testing last element dictionary') 71 data_set.get_all_docs(inserted_keys, path = self._get_path('child', levels-1)) 72 73 '''Last element Dict.Array''' 74 self.log.info('Testing Dict.Array') 75 data_set.get_all_docs(inserted_keys, path = self._get_path('child', levels-2)+'.array[0]') 76 77 '''Intermediate element Dict.Array''' 78 self.log.info('Testing Intermediate element Dict. Array') 79 data_set.get_all_docs(inserted_keys, path = self._get_path('child', levels/2)+'.array[0]') 80 81 def test_deep_nested_dataset_get_array(self): 82 num_docs = self.helper.input.param("num-docs") 83 self.log.info("description : Issue get sub doc on deep nested single path on dictionaries " 84 "dataset with {0} docs".format(num_docs)) 85 86 data_set = DeeplyNestedDataSet(self.helper, num_docs) 87 inserted_keys, levels = data_set.load() 88 89 '''Top level element''' 90 data_set.get_all_docs(inserted_keys, path = 'number') 91 data_set.get_all_docs(inserted_keys, path = 'array') 92 data_set.get_all_docs(inserted_keys, path = 'array[0]') 93 94 '''Last element Array''' 95 last_path ='child' 96 for i in xrange(levels-1): 97 last_path +='.child' 98 data_set.get_all_docs(inserted_keys, path = last_path) 99 100 '''Last element Array of Array''' 101 last_path ='child' 102 for i in xrange(levels-3): 103 last_path +='.child' 104 last_path +='.array[-1][-1][-1]' 105 data_set.get_all_docs(inserted_keys, path = last_path) 106 107 '''Intermediate element Array''' 108 last_path ='child' 109 for i in xrange(levels/2): 110 last_path +='.child' 111 last_path +='.array[0][-1]' 112 data_set.get_all_docs(inserted_keys, path = last_path) 113 114 def test_simple_dataset_dict_upsert(self): 115 num_docs = self.helper.input.param("num-docs") 116 self.log.info("description : Issue simple upsert dict sub doc single path " 117 "dataset with {0} docs".format(num_docs)) 118 119 data_set = SimpleDataSet(self.helper, num_docs) 120 inserted_keys = data_set.load() 121 122 ''' Randomly generate 1000 long string to replace existing path strings ''' 123 replace_string = self.generate_string(1000) 124 125 data_set.upsert_all_docs(inserted_keys, replace_string, path='isDict') 126 data_set.upsert_all_docs(inserted_keys, replace_string, path='geometry.coordinates[0]') 127 data_set.upsert_all_docs(inserted_keys, replace_string, path='dict_value.name') 128 data_set.upsert_all_docs(inserted_keys, "999", path='height') 129 data_set.upsert_all_docs(inserted_keys, replace_string, path='array[-1]') 130 131 def test_simple_dataset_dict_add(self): 132 num_docs = self.helper.input.param("num-docs") 133 self.log.info("description : Issue simple add dict sub doc single path " 134 "dataset with {0} docs".format(num_docs)) 135 136 data_set = SimpleDataSet(self.helper, num_docs) 137 inserted_keys = data_set.load() 138 139 ''' Randomly generate 1000 long string to replace existing path strings ''' 140 replace_string = self.generate_string(1000) 141 142 #data_set.add_all_docs(inserted_keys, replace_string, path='isDict') 143 #data_set.add_all_docs(inserted_keys, replace_string, path='geometry.coordinates[0]') 144 data_set.add_all_docs(inserted_keys, replace_string, path='dict_value') 145 #data_set.add_all_docs(inserted_keys, "999", path='height') 146 #data_set.add_all_docs(inserted_keys, replace_string, path='array[-1]') 147 148 def test_simple_dataset_remove(self): 149 num_docs = self.helper.input.param("num-docs") 150 self.log.info("description : Issue simple remove sub doc single path " 151 "dataset with {0} docs".format(num_docs)) 152 153 data_set = SimpleDataSet(self.helper, num_docs) 154 inserted_keys = data_set.load() 155 156 data_set.remove_all_docs(inserted_keys, path='isDict') 157 data_set.remove_all_docs(inserted_keys, path='geometry.coordinates[0]') 158 data_set.remove_all_docs(inserted_keys, path='dict_value.name') 159 data_set.remove_all_docs(inserted_keys, path='array[0]') 160 data_set.remove_all_docs(inserted_keys, path='array[-1]') 161 162 def test_simple_dataset_exists(self): 163 num_docs = self.helper.input.param("num-docs") 164 self.log.info("description : Issue simple exists sub doc single path " 165 "dataset with {0} docs".format(num_docs)) 166 167 data_set = SimpleDataSet(self.helper, num_docs) 168 inserted_keys = data_set.load() 169 170 ''' add test code to accept Bool values and not error out ''' 171 data_set.exists_all_docs(inserted_keys, path='isDict') 172 data_set.exists_all_docs(inserted_keys, path='geometry.coordinates[0]') 173 data_set.exists_all_docs(inserted_keys, path='dict_value.name') 174 data_set.exists_all_docs(inserted_keys, path='array[0]') 175 data_set.exists_all_docs(inserted_keys, path='array[-1]') 176 177 def test_simple_dataset_replace(self): 178 num_docs = self.helper.input.param("num-docs") 179 self.log.info("description : Issue simple replace sub doc single path " 180 "dataset with {0} docs".format(num_docs)) 181 182 data_set = SimpleDataSet(self.helper, num_docs) 183 inserted_keys = data_set.load() 184 185 ''' Randomly generate 1000 long string to replace existing path strings ''' 186 replace_string = self.generate_string(10) 187 188 data_set.replace_all_docs(inserted_keys, replace_string, path='isDict') 189 data_set.replace_all_docs(inserted_keys, replace_string, path='geometry.coordinates[0]') 190 data_set.replace_all_docs(inserted_keys, replace_string, path='dict_value.name') 191 data_set.replace_all_docs(inserted_keys, "999", path='height') 192 data_set.replace_all_docs(inserted_keys, replace_string, path='array[-1]') 193 194 def test_simple_dataset_array_push_last(self): 195 num_docs = self.helper.input.param("num-docs") 196 self.log.info("description : Issue simple array_push_last sub doc single path " 197 "dataset with {0} docs".format(num_docs)) 198 199 data_set = SimpleDataSet(self.helper, num_docs) 200 inserted_keys = data_set.load() 201 202 ''' Randomly generate 1000 long string to replace existing path strings ''' 203 replace_string = self.generate_string(10) 204 205 #Should be a negative testcase below. 206 #data_set.array_push_last(inserted_keys, replace_string, path='isDict') 207 data_set.array_push_last(inserted_keys, replace_string, path='geometry.coordinates') 208 #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name') 209 #data_set.array_push_last(inserted_keys, "999", path='height') 210 data_set.array_push_last(inserted_keys, replace_string, path='array') 211 212 def test_simple_dataset_array_push_first(self): 213 num_docs = self.helper.input.param("num-docs") 214 self.log.info("description : Issue simple array_push_first sub doc single path " 215 "dataset with {0} docs".format(num_docs)) 216 217 data_set = SimpleDataSet(self.helper, num_docs) 218 inserted_keys = data_set.load() 219 220 ''' Randomly generate 1000 long string to replace existing path strings ''' 221 replace_string = self.generate_string(10) 222 223 #Should be a negative testcase below. 224 #data_set.array_push_last(inserted_keys, replace_string, path='isDict') 225 data_set.array_push_first(inserted_keys, replace_string, path='geometry.coordinates') 226 #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name') 227 #data_set.array_push_last(inserted_keys, "999", path='height') 228 data_set.array_push_first(inserted_keys, replace_string, path='array') 229 230 def test_simple_dataset_counter(self): 231 num_docs = self.helper.input.param("num-docs") 232 self.log.info("description : Issue simple counter sub doc single path " 233 "dataset with {0} docs".format(num_docs)) 234 235 data_set = SimpleDataSet(self.helper, num_docs) 236 inserted_keys = data_set.load() 237 238 ''' Randomly generate 1000 long string to replace existing path strings ''' 239 replace_string = self.generate_string(10) 240 241 #Should be a negative testcase below. 242 #data_set.array_push_last(inserted_keys, replace_string, path='isDict') 243 data_set.counter_all_paths(inserted_keys,path='geometry.coordinates[0]') 244 #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name') 245 data_set.counter_all_paths(inserted_keys, path='height') 246 #data_set.counter_all_paths(inserted_keys, path='array') 247 248 249 def test_simple_dataset_array_add_unique(self): 250 num_docs = self.helper.input.param("num-docs") 251 self.log.info("description : Issue simple add array unique sub doc single path " 252 "dataset with {0} docs".format(num_docs)) 253 254 data_set = SimpleDataSet(self.helper, num_docs) 255 inserted_keys = data_set.load() 256 257 ''' Randomly generate 1000 long string to replace existing path strings ''' 258 replace_string = self.generate_string(10) 259 260 #Should be a negative testcase below. 261 #data_set.array_push_last(inserted_keys, replace_string, path='isDict') 262 data_set.array_add_unique(inserted_keys, replace_string, path='geometry.coordinates') 263 #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name') 264 #data_set.counter_all_paths(inserted_keys, 1, path='height') 265 #data_set.counter_all_paths(inserted_keys, replace_string, path='array') 266 267 def test_simple_dataset_multi_lookup(self): 268 num_docs = self.helper.input.param("num-docs") 269 self.log.info("description : Issue simple multi lookup sub doc single path " 270 "dataset with {0} docs".format(num_docs)) 271 272 data_set = SimpleDataSet(self.helper, num_docs) 273 inserted_keys = data_set.load() 274 275 ''' Randomly generate 1000 long string to replace existing path strings ''' 276 replace_string = self.generate_string(10) 277 278 #Should be a negative testcase below. 279 #data_set.array_push_last(inserted_keys, replace_string, path='isDict') 280 data_set.multi_lookup_all_paths(inserted_keys, path='geometry.coordinates') 281 #data_set.array_push_last(inserted_keys, replace_string, path='dict_value.name') 282 #data_set.counter_all_paths(inserted_keys, 1, path='height') 283 #data_set.counter_all_paths(inserted_keys, replace_string, path='array') 284 285 286 def test_simple_dataset_multi_lookup2(self): 287 pass 288 289 def generate_string(self, range_val=100): 290 long_string = ''.join(chr(97 + randint(0, 25)) for i in range(range_val)) 291 return '"' + long_string + '"' 292 293 def _get_path(self, subdoc_elt=None, levels=None): 294 subdoc_path = subdoc_elt 295 for i in xrange(levels-1): 296 subdoc_path +='.'+subdoc_elt 297 return subdoc_path 298 299class SimpleDataSet(SubdocSanityTests): 300 def __init__(self, helper, num_docs): 301 self.helper = helper 302 self.num_docs = num_docs 303 self.name = "simple_dataset" 304 self.log = logger.Logger.get_logger() 305 306 def load(self): 307 inserted_keys = self.helper.insert_docs(self.num_docs, self.name) 308 return inserted_keys 309 310 def get_all_docs(self, inserted_keys, path): 311 for in_key in inserted_keys: 312 num_tries = 1 313 try: 314 opaque, cas, data = self.helper.client.get_sd(in_key, path) 315 except Exception as e: 316 self.helper.testcase.fail( 317 "Unable to get key {0} for path {1} after {2} tries" 318 .format(in_key, path, num_tries)) 319 320 def upsert_all_docs(self, inserted_keys, long_string, path): 321 for in_key in inserted_keys: 322 num_tries = 1 323 try: 324 opaque, cas, data = self.helper.client.dict_upsert_sd(in_key, path ,long_string) 325 except Exception as e: 326 print '[ERROR] {}'.format(e) 327 self.helper.testcase.fail( 328 "Unable to upsert key {0} for path {1} after {2} tries" 329 .format(in_key, path, num_tries)) 330 331 def add_all_docs(self, inserted_keys, long_string, path): 332 for in_key in inserted_keys: 333 num_tries = 1 334 try: 335 opaque, cas, data = self.helper.client.dict_add_sd(in_key, path ,long_string) 336 except Exception as e: 337 print '[ERROR] {}'.format(e) 338 self.helper.testcase.fail( 339 "Unable to add key {0} for path {1} after {2} tries" 340 .format(in_key, path, num_tries)) 341 342 def remove_all_docs(self, inserted_keys, path): 343 for in_key in inserted_keys: 344 num_tries = 1 345 try: 346 opaque, cas, data = self.helper.client.delete_sd(in_key, path) 347 except Exception as e: 348 print '[ERROR] {}'.format(e) 349 self.helper.testcase.fail( 350 "Unable to remove value for key {0} for path {1} after {2} tries" 351 .format(in_key, path, num_tries)) 352 353 def exists_all_docs(self, inserted_keys, path): 354 for in_key in inserted_keys: 355 num_tries = 1 356 try: 357 opaque, cas, data = self.helper.client.exists_sd(in_key, path) 358 except Exception as e: 359 print '[ERROR] {}'.format(e) 360 self.helper.testcase.fail( 361 "Unable to validate value for key {0} for path {1} after {2} tries" 362 .format(in_key, path, num_tries)) 363 364 def replace_all_docs(self, inserted_keys, long_string, path): 365 for in_key in inserted_keys: 366 num_tries = 1 367 try: 368 opaque, cas, data = self.helper.client.replace_sd(in_key, path ,long_string) 369 except Exception as e: 370 print '[ERROR] {}'.format(e) 371 self.helper.testcase.fail( 372 "Unable to replace for key {0} for path {1} after {2} tries" 373 .format(in_key, path, num_tries)) 374 375 def array_push_last(self, inserted_keys, long_string, path): 376 for in_key in inserted_keys: 377 num_tries = 1 378 try: 379 opaque, cas, data = self.helper.client.array_push_last_sd(in_key, path ,long_string) 380 except Exception as e: 381 print '[ERROR] {}'.format(e) 382 self.helper.testcase.fail( 383 "Unable to array push last for key {0} for path {1} after {2} tries" 384 .format(in_key, path, num_tries)) 385 386 def array_push_first(self, inserted_keys, long_string, path): 387 for in_key in inserted_keys: 388 num_tries = 1 389 try: 390 opaque, cas, data = self.helper.client.array_push_first_sd(in_key, path ,long_string) 391 except Exception as e: 392 print '[ERROR] {}'.format(e) 393 self.helper.testcase.fail( 394 "Unable to array push first for key {0} for path {1} after {2} tries" 395 .format(in_key, path, num_tries)) 396 397 def counter_all_paths(self, inserted_keys, path): 398 for in_key in inserted_keys: 399 num_tries = 1 400 try: 401 opaque, cas, data = self.helper.client.counter_sd(in_key, path ,10000) 402 except Exception as e: 403 print '[ERROR] {}'.format(e) 404 self.helper.testcase.fail( 405 "Unable to counter incr/decr for key {0} for path {1} after {2} tries" 406 .format(in_key, path, num_tries)) 407 408 def array_add_unique(self, inserted_keys, long_string, path): 409 for in_key in inserted_keys: 410 num_tries = 1 411 try: 412 opaque, cas, data = self.helper.client.array_add_unique_sd(in_key, path ,long_string) 413 except Exception as e: 414 print '[ERROR] {}'.format(e) 415 self.helper.testcase.fail( 416 "Unable to add array_unique key {0} for path {1} after {2} tries" 417 .format(in_key, path, num_tries)) 418 419 def multi_lookup_all_paths(self, inserted_keys, path): 420 for in_key in inserted_keys: 421 num_tries = 1 422 try: 423 opaque, cas, data = self.helper.client.multi_lookup_sd(in_key, path) 424 print data 425 except Exception as e: 426 print '[ERROR] {}'.format(e) 427 self.helper.testcase.fail( 428 "Unable to add array_unique key {0} for path {1} after {2} tries" 429 .format(in_key, path, num_tries)) 430 431class DeeplyNestedDataSet(SubdocSanityTests): 432 def __init__(self, helper, num_docs): 433 self.helper = helper 434 self.num_docs = num_docs 435 self.name = "deeplynested_dataset" 436 self.levels = 30 437 self.log = logger.Logger.get_logger() 438 439 def load(self, long_path=False): 440 inserted_keys = self.helper.insert_nested_docs(self.num_docs, self.name, self.levels, long_path) 441 return inserted_keys, self.levels 442 443 def get_all_docs(self, inserted_keys, path): 444 for in_key in inserted_keys: 445 num_tries = 1 446 try: 447 opaque, cas, data = self.helper.client.get_sd(in_key, path) 448 #self.log.info(data) 449 #assert data == check_data 450 except Exception as e: 451 self.log.info(e) 452 self.helper.testcase.fail( 453 "Unable to get key {0} for path {1} after {2} tries" 454 .format(in_key, path, num_tries)) 455 456 def upsert_all_docs(self): 457 pass 458 459 460 461