Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- Total time: 0.012725 s
- File: /home/amanda-simbiose/Documents/API_s1search/slicing_dice-api_python3/api/tests/insertion_performance/wrk_scripts/insert_for_profile.py
- Function: process_entities_in_request_for_test at line 305
- Line # Hits Time Per Hit % Time Line Contents
- ==============================================================
- 305 async def process_entities_in_request_for_test(entities, client_id, project_id,
- 306 dimension,
- 307 test, permission_fields,
- 308 permitted_fields,
- 309 auto_create, permission,
- 310 force_new_entity=False):
- 311
- 312 1 4.0 4.0 0.0 entity_ids, remaining_names = await util.get_entity_ids(
- 313 1 6.0 6.0 0.0 entities.keys(), str(client_id), str(project_id), test, dimension,
- 314 1 8633.0 8633.0 67.8 force_new_entity=force_new_entity)
- 315 1 3.0 3.0 0.0 if test:
- 316 metrics_name = "new-entities-test"
- 317 else:
- 318 1 2.0 2.0 0.0 metrics_name = "new-entities"
- 319
- 320 1 5.0 5.0 0.0 if project_id != METRICS.project_id and len(remaining_names) > 1:
- 321 data_to_metric = await make_metric_command(
- 322 metrics_name, client_id, project_id, len(remaining_names))
- 323 METRICS.add_to_bulk(data_to_metric)
- 324
- 325 1 3.0 3.0 0.0 fields_as_search, list_of_field_values = get_fields(project_id,
- 326 1 2.0 2.0 0.0 dimension,
- 327 1 2.0 2.0 0.0 entities,
- 328 1 2.0 2.0 0.0 (permission_fields,
- 329 1 2.0 2.0 0.0 permitted_fields,
- 330 1 2.0 2.0 0.0 permission),
- 331 1 642.0 642.0 5.0 test)
- 332
- 333 2 7.0 3.5 0.1 for field_ in fields_as_search:
- 334 1 2.0 2.0 0.0 if field_['api_name'] == "":
- 335 raise exceptions.FieldInvalidApiNameError(
- 336 endpoint="Index", apiname=field_['api_name'])
- 337
- 338 1 5.0 5.0 0.0 s1search_field_name = util.get_s1search_name(
- 339 1 14.0 14.0 0.1 project_id, dimension, 'entity-id', test)
- 340 1 23.0 23.0 0.2 if not util.check_field_cache(project_id, dimension, s1search_field_name):
- 341 dict_entity_id = {
- 342 1 3.0 3.0 0.0 "api_name": "entity-id",
- 343 1 3.0 3.0 0.0 "table": dimension
- 344 }
- 345 1 2.0 2.0 0.0 fields_as_search.append(dict_entity_id)
- 346 1 2.0 2.0 0.0 list_of_field_values.append(dict_entity_id)
- 347
- 348 1 3.0 3.0 0.0 util.add_field_cache(project_id, dimension, s1search_field_name,
- 349 1 30.0 30.0 0.2 dict_entity_id)
- 350 1 13.0 13.0 0.1 field_uniq = [_f for _f in fields_as_search if
- 351 _f['api_name'] == 'entity-id']
- 352 1 2.0 2.0 0.0 if not field_uniq:
- 353 entity_id = util.check_field_cache(project_id, dimension,
- 354 s1search_field_name)
- 355 fields_as_search.append(entity_id)
- 356 list_of_field_values.append(entity_id)
- 357 1 2.0 2.0 0.0 if fields_as_search:
- 358 1 3.0 3.0 0.0 fields_as = util.search_fields_as(project_id, dimension,
- 359 1 250.0 250.0 2.0 fields_as_search, test)
- 360 else:
- 361 fields_as = []
- 362 1 9.0 9.0 0.1 new_fields = set([x for x in fields_as if isinstance(x, str)])
- 363 1 5.0 5.0 0.0 new_fields = list(new_fields)
- 364 1 3.0 3.0 0.0 if len(new_fields) > 0:
- 365 if 'column' in auto_create:
- 366 fields_as = await index.auto_create_fields_s1(
- 367 client_id, project_id, dimension, test, new_fields,
- 368 list_of_field_values, permission, auto_create)
- 369 else:
- 370 if new_fields == ["entity-id"]:
- 371 fields_as = await index.auto_create_fields_s1(
- 372 client_id, project_id, dimension, test, new_fields,
- 373 list_of_field_values, permission, auto_create)
- 374 else:
- 375 raise exceptions.IndexFieldNotActiveError(
- 376 parameter=new_fields[0])
- 377
- 378 51 121.0 2.4 1.0 for entity_name, fields in entities.items():
- 379 50 126.0 2.5 1.0 if len(entity_name) > ENTITY_NAME_MAX_LENGTH:
- 380 LOGGER.info('Entity name is too big %s', entity_name)
- 381 raise exceptions.IndexEntityNameTooBigError(more_info={
- 382 'entity-id': entity_name,
- 383 'max-length': ENTITY_NAME_MAX_LENGTH
- 384 })
- 385
- 386 50 130.0 2.6 1.0 if len(entity_name) == 0:
- 387 raise exceptions.IndexEmptyEntityIdError()
- 388
- 389 50 144.0 2.9 1.1 if len(fields) == 0:
- 390 raise exceptions.IndexInvalidEntityError()
- 391
- 392 100 281.0 2.8 2.2 for field_name, field_value in fields.items():
- 393 50 116.0 2.3 0.9 if field_name == 'entity-id':
- 394 raise exceptions.InvalidIndexationOnEntityIdFieldError(
- 395 more_info={
- 396 "entity": entity_name,
- 397 "msg": "The field \"entity-id\" is an internal field and "
- 398 "its value is based on indexed entity ID. "
- 399 "You are not allowed to index directly on it."
- 400 })
- 401
- 402 50 193.0 3.9 1.5 field = [_f for _f in fields_as if _f['api_name'] == field_name]
- 403
- 404 50 132.0 2.6 1.0 s1search_field_name = util.get_s1search_name(project_id, dimension,
- 405 50 190.0 3.8 1.5 field_name, test)
- 406 50 122.0 2.4 1.0 if not field:
- 407 field = util.check_field_cache(
- 408 project_id, dimension, s1search_field_name)
- 409 if not field:
- 410 LOGGER.info('Index field not active %s', field_name)
- 411 raise exceptions.IndexFieldNotActiveError(
- 412 more_info={'field-name': field_name})
- 413 else:
- 414 50 127.0 2.5 1.0 if not util.check_field_cache(
- 415 50 569.0 11.4 4.5 project_id, dimension, s1search_field_name):
- 416 1 4.0 4.0 0.0 util.add_field_cache(
- 417 1 31.0 31.0 0.2 project_id, dimension, s1search_field_name, field)
- 418 50 154.0 3.1 1.2 if isinstance(field_value, (list, tuple)):
- 419 for item in field_value:
- 420 if isinstance(field, list):
- 421 field_to_index = field[0]
- 422 else:
- 423 field_to_index = field
- 424 index.validate_field_param(field_to_index,
- 425 field_name, item)
- 426 else:
- 427 50 125.0 2.5 1.0 if isinstance(field, list):
- 428 50 128.0 2.6 1.0 field_to_index = field[0]
- 429 else:
- 430 field_to_index = field
- 431 index.validate_field_param(field_to_index,
- 432 field_name,
- 433 field_value)
- 434 50 225.0 4.5 1.8 field_uniq = [_f for _f in fields_as if _f['api_name'] == 'entity-id']
- 435
- 436 50 116.0 2.3 0.9 if entity_name in remaining_names:
- 437 index.validate_field_param(field_uniq[0], 'entity-id', entity_name)
- 438 1 2.0 2.0 0.0 return entity_ids
- Process finished with exit code 0
Add Comment
Please, Sign In to add comment