A framework for quick web archiving
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

275 lines
9.7 KiB

  1. import fcntl
  2. import gzip
  3. import io
  4. import itertools
  5. import json
  6. import logging
  7. import os
  8. import qwarc.utils
  9. import tempfile
  10. import time
  11. import warcio
  12. class WARC:
  13. def __init__(self, prefix, maxFileSize, dedupe, command, specFile, specDependencies, logFilename):
  14. '''
  15. Initialise the WARC writer
  16. prefix: str, path prefix for WARCs; a dash, a five-digit number, and ".warc.gz" will be appended.
  17. maxFileSize: int, maximum size of an individual WARC. Use 0 to disable splitting.
  18. dedupe: bool, whether to enable record deduplication
  19. command: list, the command line call for qwarc
  20. specFile: str, path to the spec file
  21. specDependencies: qwarc.utils.SpecDependencies
  22. logFilename: str, name of the log file written by this process
  23. '''
  24. self._prefix = prefix
  25. self._counter = 0
  26. self._maxFileSize = maxFileSize
  27. self._closed = True
  28. self._file = None
  29. self._journalFile = None
  30. self._journalClean = None
  31. self._warcWriter = None
  32. self._dedupe = dedupe
  33. self._dedupeMap = {}
  34. self._command = command
  35. self._specFile = specFile
  36. self._specDependencies = specDependencies
  37. self._logFilename = logFilename
  38. self._metaWarcinfoRecordID = None
  39. self._write_meta_warc(self._write_initial_meta_records)
  40. def _ensure_opened(self):
  41. '''Open the next file that doesn't exist yet if there is currently no file opened'''
  42. if not self._closed:
  43. return
  44. while True:
  45. filename = f'{self._prefix}-{self._counter:05d}.warc.gz'
  46. try:
  47. # Try to open the file for writing, requiring that it does not exist yet, and attempt to get an exclusive, non-blocking lock on it
  48. self._file = open(filename, 'xb')
  49. fcntl.flock(self._file.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
  50. except FileExistsError:
  51. logging.info(f'{filename} already exists, skipping')
  52. self._counter += 1
  53. else:
  54. break
  55. logging.info(f'Opened {filename}')
  56. self._open_journal(filename)
  57. self._warcWriter = warcio.warcwriter.WARCWriter(self._file, gzip = True, warc_version = '1.1')
  58. self._closed = False
  59. self._counter += 1
  60. def _open_journal(self, filename):
  61. try:
  62. self._journalFile = open(f'{filename}.qwarcjournal', 'xb')
  63. fcntl.flock(self._journalFile.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
  64. except FileExistsError:
  65. logging.error(f'{filename}.qwarcjournal already exists!')
  66. raise RuntimeError(f'Unable to create journal file for {filename}: {filename}.qwarcjournal already exists')
  67. except OSError as e:
  68. if e.errno == errno.EWOULDBLOCK:
  69. logging.error(f'{filename}.qwarcjournal is already locked!')
  70. raise RuntimeError(f'Unable to lock journal file {filename}.qwarcjournal')
  71. self._journalClean = True
  72. def _write_record(self, record):
  73. # Write the current offset to the journal file
  74. # Since the size can only grow, it is not necessary to explicitly delete the previous contents.
  75. self._journalFile.seek(0)
  76. previousSize = self._file.tell()
  77. self._journalFile.write(f'qwarc journal version: 1\noffset: {previousSize}\nwrite ok: no \n'.encode('ascii'))
  78. self._journalFile.flush()
  79. self._journalClean = False
  80. try:
  81. self._warcWriter.write_record(record)
  82. except (OSError, IOError):
  83. self._file.truncate(previousSize)
  84. raise
  85. else:
  86. # Mark the write as ok
  87. self._journalFile.seek(-4, os.SEEK_END) # len(b'no \n')
  88. self._journalFile.write(b'yes\n')
  89. self._journalFile.flush()
  90. self._journalClean = True
  91. def _write_warcinfo_record(self):
  92. data = {
  93. 'software': qwarc.utils.get_software_info(self._specDependencies.packages),
  94. 'command': self._command,
  95. 'files': {
  96. 'spec': self._specFile,
  97. 'spec-dependencies': self._specDependencies.files
  98. },
  99. 'extra': self._specDependencies.extra,
  100. }
  101. payload = io.BytesIO(json.dumps(data, indent = 2).encode('utf-8'))
  102. # Workaround for https://github.com/webrecorder/warcio/issues/87
  103. digester = warcio.utils.Digester('sha1')
  104. digester.update(payload.getvalue())
  105. record = self._warcWriter.create_warc_record(
  106. None,
  107. 'warcinfo',
  108. payload = payload,
  109. warc_headers_dict = {'Content-Type': 'application/json; charset=utf-8', 'WARC-Block-Digest': str(digester)},
  110. length = len(payload.getvalue()),
  111. )
  112. self._write_record(record)
  113. return record.rec_headers.get_header('WARC-Record-ID')
  114. def write_client_response(self, response):
  115. '''
  116. Write the requests and responses stored in a ClientResponse instance to the currently opened WARC.
  117. A new WARC will be started automatically if the size of the current file exceeds the limit after writing all requests and responses from this `response` to the current WARC.
  118. '''
  119. self._ensure_opened()
  120. for r in response.iter_all():
  121. usec = f'{(r.rawRequestTimestamp - int(r.rawRequestTimestamp)):.6f}'[2:]
  122. requestDate = time.strftime(f'%Y-%m-%dT%H:%M:%S.{usec}Z', time.gmtime(r.rawRequestTimestamp))
  123. r.rawRequestData.seek(0, io.SEEK_END)
  124. length = r.rawRequestData.tell()
  125. r.rawRequestData.seek(0)
  126. requestRecord = self._warcWriter.create_warc_record(
  127. str(r.url),
  128. 'request',
  129. payload = r.rawRequestData,
  130. length = length,
  131. warc_headers_dict = {
  132. 'WARC-Date': requestDate,
  133. 'WARC-IP-Address': r.remoteAddress[0],
  134. 'WARC-Warcinfo-ID': self._metaWarcinfoRecordID,
  135. }
  136. )
  137. requestRecordID = requestRecord.rec_headers.get_header('WARC-Record-ID')
  138. r.rawResponseData.seek(0, io.SEEK_END)
  139. length = r.rawResponseData.tell()
  140. r.rawResponseData.seek(0)
  141. responseRecord = self._warcWriter.create_warc_record(
  142. str(r.url),
  143. 'response',
  144. payload = r.rawResponseData,
  145. length = length,
  146. warc_headers_dict = {
  147. 'WARC-Date': requestDate,
  148. 'WARC-IP-Address': r.remoteAddress[0],
  149. 'WARC-Concurrent-To': requestRecordID,
  150. 'WARC-Warcinfo-ID': self._metaWarcinfoRecordID,
  151. }
  152. )
  153. payloadDigest = responseRecord.rec_headers.get_header('WARC-Payload-Digest')
  154. assert payloadDigest is not None
  155. if self._dedupe and responseRecord.payload_length > 100: # Don't deduplicate small responses; the additional headers are typically larger than the payload dedupe savings...
  156. if payloadDigest in self._dedupeMap:
  157. refersToRecordId, refersToUri, refersToDate = self._dedupeMap[payloadDigest]
  158. responseHttpHeaders = responseRecord.http_headers
  159. responseRecord = self._warcWriter.create_revisit_record(
  160. str(r.url),
  161. digest = payloadDigest,
  162. refers_to_uri = refersToUri,
  163. refers_to_date = refersToDate,
  164. http_headers = responseHttpHeaders,
  165. warc_headers_dict = {
  166. 'WARC-Date': requestDate,
  167. 'WARC-IP-Address': r.remoteAddress[0],
  168. 'WARC-Concurrent-To': requestRecordID,
  169. 'WARC-Refers-To': refersToRecordId,
  170. 'WARC-Truncated': 'length',
  171. 'WARC-Warcinfo-ID': self._metaWarcinfoRecordID,
  172. }
  173. )
  174. # Workaround for https://github.com/webrecorder/warcio/issues/94
  175. responseRecord.rec_headers.replace_header('WARC-Profile', 'http://netpreserve.org/warc/1.1/revisit/identical-payload-digest')
  176. else:
  177. self._dedupeMap[payloadDigest] = (responseRecord.rec_headers.get_header('WARC-Record-ID'), str(r.url), requestDate)
  178. self._write_record(requestRecord)
  179. self._write_record(responseRecord)
  180. if self._maxFileSize and self._file.tell() > self._maxFileSize:
  181. self._close_file()
  182. def _write_resource_records(self):
  183. '''Write spec file and dependencies'''
  184. assert self._metaWarcinfoRecordID is not None, 'write_warcinfo_record must be called first'
  185. for type_, contentType, fn in itertools.chain((('specfile', 'application/x-python', self._specFile),), map(lambda x: ('spec-dependency-file', 'application/octet-stream', x), self._specDependencies.files)):
  186. with open(fn, 'rb') as f:
  187. f.seek(0, io.SEEK_END)
  188. length = f.tell()
  189. f.seek(0)
  190. record = self._warcWriter.create_warc_record(
  191. f'file://{fn}',
  192. 'resource',
  193. payload = f,
  194. length = length,
  195. warc_headers_dict = {'X-QWARC-Type': type_, 'WARC-Warcinfo-ID': self._metaWarcinfoRecordID, 'Content-Type': contentType},
  196. )
  197. self._write_record(record)
  198. def _write_initial_meta_records(self):
  199. self._metaWarcinfoRecordID = self._write_warcinfo_record()
  200. self._write_resource_records()
  201. def _write_log_record(self):
  202. assert self._metaWarcinfoRecordID is not None, 'write_warcinfo_record must be called first'
  203. rootLogger = logging.getLogger()
  204. for handler in rootLogger.handlers: #FIXME: Uses undocumented attribute handlers
  205. handler.flush()
  206. with open(self._logFilename, 'rb') as fp:
  207. fp.seek(0, io.SEEK_END)
  208. length = fp.tell()
  209. fp.seek(0)
  210. record = self._warcWriter.create_warc_record(
  211. f'file://{self._logFilename}',
  212. 'resource',
  213. payload = fp,
  214. length = length,
  215. warc_headers_dict = {'X-QWARC-Type': 'log', 'Content-Type': 'text/plain; charset=utf-8', 'WARC-Warcinfo-ID': self._metaWarcinfoRecordID},
  216. )
  217. self._write_record(record)
  218. def _close_file(self):
  219. '''Close the currently opened WARC'''
  220. if not self._closed:
  221. self._file.close()
  222. journalFilename = self._journalFile.name
  223. self._journalFile.close()
  224. if self._journalClean:
  225. os.remove(journalFilename)
  226. self._warcWriter = None
  227. self._file = None
  228. self._journalFile = None
  229. self._journalClean = None
  230. self._closed = True
  231. def _write_meta_warc(self, callback):
  232. filename = f'{self._prefix}-meta.warc.gz'
  233. self._file = open(filename, 'ab')
  234. try:
  235. fcntl.flock(self._file.fileno(), fcntl.LOCK_EX)
  236. logging.info(f'Opened {filename}')
  237. self._open_journal(filename)
  238. self._warcWriter = warcio.warcwriter.WARCWriter(self._file, gzip = True, warc_version = '1.1')
  239. self._closed = False
  240. callback()
  241. finally:
  242. self._close_file()
  243. def close(self):
  244. '''Clean up everything.'''
  245. self._close_file()
  246. self._write_meta_warc(self._write_log_record)