The little things give you away... A collection of various small helper stuff
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 

43 lines
1.2 KiB

  1. #!/bin/bash
  2. # Read a list of URLs from stdin, replace suitable social media URLs with correctly capitalised version
  3. errorUrls=()
  4. while read -r url
  5. do
  6. if [[ "${url}" =~ ^https?://(www|m|[a-z][a-z]-[a-z][a-z]).facebook.com/[^/]+/?$ ]]
  7. then
  8. user="$(curl -s -A 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36' -H 'Accept-Language: en-US,en;q=0.5' "https://www.${url#*.}" | grep -Po '<div\s[^>]*(?<=\s)data-key\s*=\s*"tab_home".*?</div>' | grep -Po '<a\s[^>]*(?<=\s)href="/\K[^/]+')"
  9. if [[ "${user}" ]]
  10. then
  11. echo "https://www.facebook.com/${user}/"
  12. else
  13. errorUrls+=("${url}")
  14. echo "${url}"
  15. fi
  16. elif [[ "${url}" =~ ^https?://twitter\.com/[^/]+$ ]]
  17. then
  18. user="$(snscrape --max-results 1 twitter-user "${url##*/}" | grep -Po '^https?://twitter\.com/\K[^/]+')"
  19. if [[ "${user}" ]]
  20. then
  21. echo "https://twitter.com/${user}"
  22. else
  23. errorUrls+=("${url}")
  24. echo "${url}"
  25. fi
  26. elif [[ "${url}" =~ ^https?://www\.instagram\.com/[^/]+/$ ]]
  27. then
  28. echo "${url,,}"
  29. else
  30. echo "${url}"
  31. fi
  32. done
  33. if [[ ${#errorUrls[@]} -gt 0 ]]
  34. then
  35. echo "" >&2
  36. echo "Failed to process URLs:" >&2
  37. for errorUrl in "${errorUrls[@]}"
  38. do
  39. echo "${errorUrl}" >&2
  40. done
  41. fi