storing.py 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118
  1. from google.cloud import storage, bigquery, datastore
  2. from google.oauth2 import service_account
  3. from covid19_ISMIR.utils.bq_fcn import bqCreateDataset, bqCreateTable, exportItems2BQ
  4. from covid19_ISMIR.utils.ner_fcn import loadModel, addTask, extractMedEntities
  5. import en_core_sci_lg
  6. import logging
  7. import re
  8. import time
  9. import os
  10. import pandas as pd
  11. import sys
  12. import argparse
  13. project_id = os.getenv('PROJECT_ID')
  14. bucket_name = os.getenv('BUCKET_NAME')
  15. location = os.getenv('LOCATION')
  16. key_path = os.getenv('SA_KEY_PATH')
  17. dataset_name = os.getenv('BQ_DATASET_NAME')
  18. table_name = os.getenv('BQ_TABLE_NAME')
  19. credentials = service_account.Credentials.from_service_account_file(key_path)
  20. storage_client = storage.Client(credentials=credentials,
  21. project_id=project_id)
  22. datastore_client = datastore.Client(credentials=credentials,
  23. project_id=project_id)
  24. gcs_source_prefix = 'raw_txt'
  25. lst_blobs = storage_client.list_blobs(bucket_or_name=bucket_name,
  26. prefix=gcs_source_prefix)
  27. start_time = time.time()
  28. try:
  29. dataset_id = bqCreateDataset(dataset_name)
  30. logging.info("The following dataset {} was successfully created/retrieved.".format(dataset_name))
  31. except Exception as e:
  32. logging.error("An error occurred.", e)
  33. try:
  34. table_id = bqCreateTable(dataset_id, table_name)
  35. logging.info("The following table {} was successfully created/retrieved.".format(table_name))
  36. except Exception as e:
  37. logging.error("An error occurred.", e)
  38. for blob in lst_blobs:
  39. doc_title = blob.name.split('/')[-1].split('.txt')[0]
  40. # download as string
  41. it_raw_blob = storage_client.get_bucket(bucket_name).get_blob('raw_txt/{}.txt'.format(doc_title))
  42. # set the GCS path
  43. path_blob_eng_raw = 'eng_txt/{}/{}_raw_txt_{}_en_translations.txt'.format(doc_title,
  44. bucket_name,
  45. doc_title)
  46. eng_raw_blob = storage_client.get_bucket(bucket_name).get_blob(path_blob_eng_raw)
  47. # Upload blob of interest
  48. curated_eng_blob = storage_client.get_bucket(bucket_name) \
  49. .get_blob('curated_eng_txt/{}.txt'.format(doc_title))
  50. # populate to BQ dataset
  51. exportItems2BQ(dataset_id, table_id, doc_title, it_raw_blob, eng_raw_blob, curated_eng_blob)
  52. total_time = time.time() - start_time
  53. logging.info('The export to BigQuery was completed successfully and took {} minutes.'.format(round(total_time / 60, 1)))
  54. curated_gcs_source_prefix = 'curated_eng_txt'
  55. lst_curated_blobs = storage_client.list_blobs(bucket_or_name=bucket_name,
  56. prefix=curated_gcs_source_prefix)
  57. nlp = loadModel(model=en_core_sci_lg)
  58. start_time = time.time()
  59. for blob in lst_curated_blobs:
  60. doc_title = blob.name.split('/')[-1].split('.txt')[0]
  61. # download as string
  62. eng_string = blob.download_as_string().decode('utf-8')
  63. # convert to vector
  64. doc = nlp(eng_string)
  65. # Extract medical entities
  66. UMLS_tuis_entity = extractMedEntities(doc)
  67. # Generate dataframes
  68. entities = list(UMLS_tuis_entity.keys())
  69. TUIs = list(UMLS_tuis_entity.values())
  70. df_entities = pd.DataFrame(data={'entity': entities, 'TUIs': TUIs})
  71. df_reference_TUIs = pd.read_csv('~/data/UMLS_tuis.csv')
  72. df_annotated_text_entities = pd.merge(df_entities, df_reference_TUIs, how='inner', on=['TUIs'])
  73. # Upload entities to datastore
  74. entities_dict = {}
  75. for idx in range(df_annotated_text_entities.shape[0]):
  76. category = df_annotated_text_entities.iloc[idx].values[2]
  77. med_entity = df_annotated_text_entities.iloc[idx].values[0]
  78. # Append to list of entities if the key,value pair already exist
  79. try:
  80. entities_dict[category].append(med_entity)
  81. except:
  82. entities_dict[category] = []
  83. entities_dict[category].append(med_entity)
  84. # API call
  85. key = addTask(datastore_client, doc_title, entities_dict)
  86. logging.info('The upload of {} entities is done.'.format(doc_title))
  87. total_time = time.time() - start_time
  88. logging.info(
  89. "The export to Datastore was completed successfully and took {} minutes.".format(round(total_time / 60, 1)))