The little things give you away... A collection of various small helper stuff
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 

45 lines
1.3 KiB

  1. #!/bin/bash
  2. # Read a list of URLs from stdin, replace suitable social media URLs with correctly capitalised version
  3. errorUrls=()
  4. while read -r url
  5. do
  6. if [[ "${url}" =~ ^https?://(www|m|[a-z][a-z]-[a-z][a-z]).facebook.com/[^/]+/?$ ]]
  7. then
  8. user="$(curl -s -A 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36' -H 'Accept-Language: en-US,en;q=0.5' "https://www.${url#*.}" | grep -Po '<div\s[^>]*(?<=\s)data-key\s*=\s*"tab_home".*?</div>' | grep -Po '<a\s[^>]*(?<=\s)href="/\K[^/]+')"
  9. if [[ "${user}" ]]
  10. then
  11. echo "https://www.facebook.com/${user}/"
  12. else
  13. errorUrls+=("${url}")
  14. echo "${url}"
  15. fi
  16. elif [[ "${url}" =~ ^https?://(www\.)?twitter\.com/[^/]+$ ]]
  17. then
  18. user="$(snscrape --max-results 1 twitter-user "${url##*/}" | grep -Po '^https?://twitter\.com/\K[^/]+')"
  19. if [[ "${user}" ]]
  20. then
  21. echo "https://twitter.com/${user}"
  22. else
  23. errorUrls+=("${url}")
  24. echo "${url}"
  25. fi
  26. elif [[ "${url}" =~ ^https?://(www\.)?instagram\.com/[^/]+/?$ ]]
  27. then
  28. user="${url%/}"
  29. user="${user##*/}"
  30. echo "https://www.instagram.com/${user,,}/"
  31. else
  32. echo "${url}"
  33. fi
  34. done
  35. if [[ ${#errorUrls[@]} -gt 0 ]]
  36. then
  37. echo "" >&2
  38. echo "Failed to process URLs:" >&2
  39. for errorUrl in "${errorUrls[@]}"
  40. do
  41. echo "${errorUrl}" >&2
  42. done
  43. fi