2016-02-24 06:57:29 -05:00
|
|
|
class FollowRemoteAccountService < BaseService
|
|
|
|
# Find or create a local account for a remote user.
|
|
|
|
# When creating, look up the user's webfinger and fetch all
|
|
|
|
# important information from their feed
|
|
|
|
# @param [String] uri User URI in the form of username@domain
|
|
|
|
# @return [Account]
|
2016-09-19 18:39:03 -04:00
|
|
|
def call(uri)
|
2016-02-22 10:00:20 -05:00
|
|
|
username, domain = uri.split('@')
|
2016-03-21 13:26:47 -04:00
|
|
|
|
2016-09-07 20:40:51 -04:00
|
|
|
return Account.find_local(username) if domain == Rails.configuration.x.local_domain || domain.nil?
|
2016-03-21 13:26:47 -04:00
|
|
|
|
2016-09-04 15:15:52 -04:00
|
|
|
account = Account.find_remote(username, domain)
|
2016-02-20 16:53:20 -05:00
|
|
|
|
2016-09-19 18:39:03 -04:00
|
|
|
return account unless account.nil?
|
|
|
|
|
|
|
|
Rails.logger.debug "Creating new remote account for #{uri}"
|
|
|
|
account = Account.new(username: username, domain: domain)
|
2016-02-20 16:53:20 -05:00
|
|
|
|
2016-02-22 12:10:30 -05:00
|
|
|
data = Goldfinger.finger("acct:#{uri}")
|
2016-02-20 16:53:20 -05:00
|
|
|
|
|
|
|
account.remote_url = data.link('http://schemas.google.com/g/2010#updates-from').href
|
|
|
|
account.salmon_url = data.link('salmon').href
|
2016-02-23 13:17:37 -05:00
|
|
|
account.url = data.link('http://webfinger.net/rel/profile-page').href
|
2016-02-20 16:53:20 -05:00
|
|
|
account.public_key = magic_key_to_pem(data.link('magic-public-key').href)
|
|
|
|
account.private_key = nil
|
|
|
|
|
|
|
|
feed = get_feed(account.remote_url)
|
|
|
|
hubs = feed.xpath('//xmlns:link[@rel="hub"]')
|
|
|
|
|
2016-09-17 11:03:36 -04:00
|
|
|
if hubs.empty? || hubs.first.attribute('href').nil?
|
|
|
|
raise Goldfinger::Error, "No PubSubHubbub hubs found"
|
|
|
|
end
|
|
|
|
|
|
|
|
if feed.at_xpath('/xmlns:feed/xmlns:author/xmlns:uri').nil?
|
|
|
|
raise Goldfinger::Error, "No author URI found"
|
2016-09-17 10:36:10 -04:00
|
|
|
end
|
2016-02-20 16:53:20 -05:00
|
|
|
|
2016-02-22 12:10:30 -05:00
|
|
|
account.uri = feed.at_xpath('/xmlns:feed/xmlns:author/xmlns:uri').content
|
2016-02-20 16:53:20 -05:00
|
|
|
account.hub_url = hubs.first.attribute('href').value
|
2016-02-22 12:10:30 -05:00
|
|
|
|
|
|
|
get_profile(feed, account)
|
2016-02-20 16:53:20 -05:00
|
|
|
account.save!
|
|
|
|
|
2016-02-22 12:10:30 -05:00
|
|
|
return account
|
2016-02-20 16:53:20 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def get_feed(url)
|
|
|
|
response = http_client.get(Addressable::URI.parse(url))
|
|
|
|
Nokogiri::XML(response)
|
|
|
|
end
|
|
|
|
|
2016-02-22 12:10:30 -05:00
|
|
|
def get_profile(xml, account)
|
|
|
|
author = xml.at_xpath('/xmlns:feed/xmlns:author')
|
2016-02-28 08:26:26 -05:00
|
|
|
update_remote_profile_service.(author, account)
|
2016-02-22 12:10:30 -05:00
|
|
|
end
|
|
|
|
|
2016-02-20 16:53:20 -05:00
|
|
|
def magic_key_to_pem(magic_key)
|
|
|
|
_, modulus, exponent = magic_key.split('.')
|
|
|
|
modulus, exponent = [modulus, exponent].map { |n| Base64.urlsafe_decode64(n).bytes.inject(0) { |num, byte| (num << 8) | byte } }
|
|
|
|
|
|
|
|
key = OpenSSL::PKey::RSA.new
|
|
|
|
key.n = modulus
|
2016-02-22 10:00:20 -05:00
|
|
|
key.e = exponent
|
2016-02-20 16:53:20 -05:00
|
|
|
|
|
|
|
key.to_pem
|
|
|
|
end
|
|
|
|
|
2016-02-28 08:26:26 -05:00
|
|
|
def update_remote_profile_service
|
|
|
|
@update_remote_profile_service ||= UpdateRemoteProfileService.new
|
|
|
|
end
|
|
|
|
|
2016-02-20 16:53:20 -05:00
|
|
|
def http_client
|
Fix #24 - Thread resolving for remote statuses
This is a big one, so let me enumerate:
Accounts as well as stream entry pages now contain Link headers that
reference the Atom feed and Webfinger URL for the former and Atom entry
for the latter. So you only need to HEAD those resources to get that
information, no need to download and parse HTML <link>s.
ProcessFeedService will now queue ThreadResolveWorker for each remote
status that it cannot find otherwise. Furthermore, entries are now
processed in reverse order (from bottom to top) in case a newer entry
references a chronologically previous one.
ThreadResolveWorker uses FetchRemoteStatusService to obtain a status
and attach the child status it was queued for to it.
FetchRemoteStatusService looks up the URL, first with a HEAD, tests
if it's an Atom feed, in which case it processes it directly. Next
for Link headers to the Atom feed, in which case that is fetched
and processed. Lastly if it's HTML, it is checked for <link>s to the Atom
feed, and if such is found, that is fetched and processed. The account for
the status is derived from author/name attribute in the XML and the hostname
in the URL (domain). FollowRemoteAccountService and ProcessFeedService
are used.
This means that potentially threads are resolved recursively until a dead-end
is encountered, however it is performed asynchronously over background jobs,
so it should be ok.
2016-09-20 19:34:14 -04:00
|
|
|
HTTP.timeout(:per_operation, write: 20, connect: 20, read: 50)
|
2016-02-20 16:53:20 -05:00
|
|
|
end
|
|
|
|
end
|
2016-09-17 11:03:36 -04:00
|
|
|
|