Du kan inte välja fler än 25 ämnen Ämnen måste starta med en bokstav eller siffra, kan innehålla bindestreck ('-') och vara max 35 tecken långa.

224 rader
9.0 KiB

  1. #!/usr/bin/env python
  2. import base64
  3. import datetime
  4. import json
  5. import logging
  6. import os
  7. import pathlib
  8. import shutil
  9. import time
  10. from typing import Optional
  11. import urllib.parse
  12. import click
  13. import minio
  14. import requests
  15. from progress import Progress
  16. logging.basicConfig(level=logging.INFO)
  17. BACKFEED_DELIM = "\n"
  18. # TODO: Add rsync support
  19. # TODO: Add rsync+ssh support
  20. # TODO: Add webdav support.
  21. # TODO: Fix the "ctrl-c handling" logic so it actually cleans up in the s3 bucket.
  22. def retry_failures(fn, msg, *args, **kwargs):
  23. tries = 0
  24. while True:
  25. try:
  26. return fn(*args, **kwargs)
  27. except Exception:
  28. logging.exception(msg)
  29. delay = min(2 ** tries, 64)
  30. tries = tries + 1
  31. logging.info(f"Sleeping {delay} seconds...")
  32. time.sleep(delay)
  33. @click.group()
  34. def sender():
  35. pass
  36. def watch_pass(input_directory: pathlib.Path, work_directory: pathlib.Path, ia_collection: str, ia_item_title: str,
  37. ia_item_prefix: str, ia_item_date: str, project: str, dispatcher: str, delete: bool, backfeed_key: str):
  38. logging.info("Checking for new items...")
  39. for original_directory in input_directory.iterdir():
  40. if original_directory.is_dir():
  41. original_name = original_directory.name
  42. new_directory = work_directory.joinpath(original_name)
  43. try:
  44. original_directory.rename(new_directory)
  45. except FileNotFoundError:
  46. logging.warning(f"Unable to move item {original_directory}")
  47. continue
  48. single_impl(new_directory, ia_collection, ia_item_title, ia_item_prefix, ia_item_date, project,
  49. dispatcher, delete, backfeed_key)
  50. return True
  51. return False
  52. @sender.command()
  53. @click.option('--input-directory', envvar='UPLOAD_QUEUE_DIR', default="/data/upload-queue",
  54. type=click.Path(exists=True))
  55. @click.option('--work-directory', envvar='UPLOADER_WORKING_DIR', default="/data/uploader-work",
  56. type=click.Path(exists=True))
  57. @click.option('--ia-collection', envvar='IA_COLLECTION', required=True)
  58. @click.option('--ia-item-title', envvar='IA_ITEM_TITLE', required=True)
  59. @click.option('--ia-item-prefix', envvar='IA_ITEM_PREFIX', required=True)
  60. @click.option('--ia-item-date', envvar='IA_ITEM_DATE', required=False)
  61. @click.option('--project', envvar='PROJECT', required=True)
  62. @click.option('--dispatcher', envvar='DISPATCHER', required=True)
  63. @click.option('--delete/--no-delete', envvar='DELETE', default=False)
  64. @click.option('--backfeed-key', envvar='BACKFEED_KEY', required=True)
  65. def watch(input_directory: pathlib.Path, work_directory: pathlib.Path, ia_collection: str, ia_item_title: str,
  66. ia_item_prefix: str, ia_item_date: str, project: str, dispatcher: str, delete: bool, backfeed_key: str):
  67. if not isinstance(input_directory, pathlib.Path):
  68. input_directory = pathlib.Path(input_directory)
  69. if not isinstance(work_directory, pathlib.Path):
  70. work_directory = pathlib.Path(work_directory)
  71. while True:
  72. if not watch_pass(input_directory, work_directory, ia_collection, ia_item_title, ia_item_prefix, ia_item_date,
  73. project, dispatcher, delete, backfeed_key):
  74. logging.info("No item found, sleeping...")
  75. time.sleep(10)
  76. @sender.command()
  77. @click.option('--item-directory', type=click.Path(exists=True), required=True)
  78. @click.option('--ia-collection', envvar='IA_COLLECTION', required=True)
  79. @click.option('--ia-item-title', envvar='IA_ITEM_TITLE', required=True)
  80. @click.option('--ia-item-prefix', envvar='IA_ITEM_PREFIX', required=True)
  81. @click.option('--ia-item-date', envvar='IA_ITEM_DATE', required=False)
  82. @click.option('--project', envvar='PROJECT', required=True)
  83. @click.option('--dispatcher', envvar='DISPATCHER', required=True)
  84. @click.option('--delete/--no-delete', envvar='DELETE', default=False)
  85. @click.option('--backfeed-key', envvar='BACKFEED_KEY', required=True)
  86. def single(item_directory: pathlib.Path, ia_collection: str, ia_item_title: str, ia_item_prefix: str,
  87. ia_item_date: Optional[str], project: str, dispatcher: str, delete: bool, backfeed_key: str):
  88. single_impl(item_directory, ia_collection, ia_item_title, ia_item_prefix, ia_item_date, project, dispatcher, delete,
  89. backfeed_key)
  90. def single_impl(item_directory: pathlib.Path, ia_collection: str, ia_item_title: str, ia_item_prefix: str,
  91. ia_item_date: Optional[str], project: str, dispatcher: str, delete: bool, backfeed_key: str):
  92. if not isinstance(item_directory, pathlib.Path):
  93. item_directory = pathlib.Path(item_directory)
  94. logging.info(f"Processing item {item_directory}...")
  95. if ia_item_date is None:
  96. s = item_directory.name.split("_")
  97. if len(s) > 0:
  98. ds = s[0]
  99. try:
  100. d = datetime.datetime.strptime(ds, "%Y%m%d%H%M%S")
  101. ia_item_date = d.strftime("%Y-%m")
  102. except ValueError:
  103. pass
  104. meta_json_loc = item_directory.joinpath('__upload_meta.json')
  105. if meta_json_loc.exists():
  106. raise Exception("META JSON EXISTS WTF")
  107. meta_json = {
  108. "IA_COLLECTION": ia_collection,
  109. "IA_ITEM_TITLE": f"{ia_item_title} {item_directory.name}",
  110. "IA_ITEM_DATE": ia_item_date,
  111. "IA_ITEM_NAME": f"{ia_item_prefix}{item_directory.name}",
  112. "PROJECT": project,
  113. }
  114. with open(meta_json_loc, 'w') as f:
  115. f.write(json.dumps(meta_json))
  116. logging.info("Wrote metadata json.")
  117. total_size = 0
  118. files = list(item_directory.glob("**/*"))
  119. for item in files:
  120. total_size = total_size + os.path.getsize(item)
  121. logging.info(f"Item size is {total_size} bytes across {len(files)} files.")
  122. meta_json["SIZE_HINT"] = str(total_size)
  123. def assign_target():
  124. logging.info("Attempting to assign target...")
  125. r = requests.get(f"{dispatcher}/offload_target", params=meta_json, timeout=60)
  126. if r.status_code == 200:
  127. data = r.json()
  128. return data["url"]
  129. else:
  130. raise Exception(f"Invalid status code {r.status_code}: {r.text}")
  131. url = retry_failures(assign_target, "Failed to fetch target")
  132. logging.info(f"Assigned target {url}")
  133. parsed_url = urllib.parse.urlparse(url)
  134. bf_item = None
  135. if parsed_url.scheme == "minio+http" or parsed_url.scheme == "minio+https":
  136. secure = (parsed_url.scheme == "minio+https")
  137. ep = parsed_url.hostname
  138. if parsed_url.port is not None:
  139. ep = f"{ep}:{parsed_url.port}"
  140. client = None
  141. def create_client():
  142. logging.info("Connecting to minio...")
  143. return minio.Minio(endpoint=ep, access_key=parsed_url.username, secret_key=parsed_url.password,
  144. secure=secure)
  145. client = retry_failures(create_client, "Failed to connect to minio")
  146. bucket_name = item_directory.name.replace("_", "-")
  147. def make_bucket():
  148. logging.info("Attempting to make bucket...")
  149. if client.bucket_exists(bucket_name=bucket_name):
  150. raise Exception("Bucket already exists!")
  151. client.make_bucket(bucket_name=bucket_name)
  152. retry_failures(make_bucket, "Failed to make bucket")
  153. logging.info("Starting uploads...")
  154. for file in files:
  155. rel_file = file.relative_to(item_directory)
  156. def upload_file():
  157. logging.info(f"Uploading file {rel_file}...")
  158. client.fput_object(bucket_name=bucket_name, object_name=str(rel_file), file_path=file,
  159. progress=Progress())
  160. retry_failures(upload_file, f"Failed to upload {rel_file}")
  161. item_data = {"url": url, "item_name": item_directory.name, "bucket_name": bucket_name}
  162. bf_item_part = base64.urlsafe_b64encode(str(json.dumps(item_data)).encode("UTF-8")).decode("UTF-8")
  163. bf_item = f"{project}:{parsed_url.hostname}:{bf_item_part}"
  164. else:
  165. raise Exception("Unable to upload, don't understand url: {url}")
  166. if bf_item is None:
  167. raise Exception("Unable to create backfeed item")
  168. if backfeed_key == "SKIPBF":
  169. logging.warning(f"Skipping backfeed! Would have submitted: {bf_item}")
  170. else:
  171. def submit_item():
  172. u = f"https://legacy-api.arpa.li/backfeed/legacy/{backfeed_key}"
  173. logging.info(f"Attempting to submit bf item {bf_item} to {u}...")
  174. resp = requests.post(u, params={"skipbloom": "1", "delimiter": BACKFEED_DELIM},
  175. data=f"{bf_item}{BACKFEED_DELIM}".encode("UTF-8"), timeout=60)
  176. if resp.status_code != 200:
  177. raise Exception(f"Failed to submit to backfeed {resp.status_code}: {resp.text}")
  178. retry_failures(submit_item, "Failed to submit to backfeed")
  179. logging.info("Backfeed submit complete!")
  180. if delete:
  181. logging.info("Removing item...")
  182. shutil.rmtree(item_directory)
  183. logging.info("Upload complete!")
  184. if __name__ == '__main__':
  185. sender()