A small tool to view real-world ActivityPub objects as JSON! Enter a URL
or username from Mastodon or a similar service below, and we'll send a
request with
the right
Accept
header
to the server to view the underlying object.
{
"@context": [
"https://www.w3.org/ns/activitystreams",
{
"ostatus": "http://ostatus.org#",
"atomUri": "ostatus:atomUri",
"inReplyToAtomUri": "ostatus:inReplyToAtomUri",
"conversation": "ostatus:conversation",
"sensitive": "as:sensitive",
"toot": "http://joinmastodon.org/ns#",
"votersCount": "toot:votersCount",
"blurhash": "toot:blurhash",
"focalPoint": {
"@container": "@list",
"@id": "toot:focalPoint"
},
"Hashtag": "as:Hashtag"
}
],
"id": "https://mstdn.social/users/AlexCrimi/statuses/114477449352416239",
"type": "Note",
"summary": null,
"inReplyTo": null,
"published": "2025-05-09T10:40:52Z",
"url": "https://mstdn.social/@AlexCrimi/114477449352416239",
"attributedTo": "https://mstdn.social/users/AlexCrimi",
"to": [
"https://www.w3.org/ns/activitystreams#Public"
],
"cc": [
"https://mstdn.social/users/AlexCrimi/followers"
],
"sensitive": false,
"atomUri": "https://mstdn.social/users/AlexCrimi/statuses/114477449352416239",
"inReplyToAtomUri": null,
"conversation": "tag:mstdn.social,2025-05-09:objectId=551234324:objectType=Conversation",
"content": "<p>Another cool <a href=\"https://mstdn.social/tags/LLMs\" class=\"mention hashtag\" rel=\"tag\">#<span>LLMs</span></a> fine-tuning : </p><p>LORSA, A Sparse <a href=\"https://mstdn.social/tags/Attention\" class=\"mention hashtag\" rel=\"tag\">#<span>Attention</span></a> Mechanism That Recovers Atomic Attention Units Hidden in Transformer Superposition<br /> Code: <a href=\"https://github.com/OpenMOSS/Lorsa\" target=\"_blank\" rel=\"nofollow noopener noreferrer\" translate=\"no\"><span class=\"invisible\">https://</span><span class=\"\">github.com/OpenMOSS/Lorsa</span><span class=\"invisible\"></span></a><br /> Model: <a href=\"https://huggingface.co/collections/fnlp/low-rank-sparse-attention-680f28a37f982a9e7d6bbab0\" target=\"_blank\" rel=\"nofollow noopener noreferrer\" translate=\"no\"><span class=\"invisible\">https://</span><span class=\"ellipsis\">huggingface.co/collections/fnl</span><span class=\"invisible\">p/low-rank-sparse-attention-680f28a37f982a9e7d6bbab0</span></a><br /> Paper: <a href=\"https://arxiv.org/abs/2504.20938\" target=\"_blank\" rel=\"nofollow noopener noreferrer\" translate=\"no\"><span class=\"invisible\">https://</span><span class=\"\">arxiv.org/abs/2504.20938</span><span class=\"invisible\"></span></a></p>",
"contentMap": {
"en": "<p>Another cool <a href=\"https://mstdn.social/tags/LLMs\" class=\"mention hashtag\" rel=\"tag\">#<span>LLMs</span></a> fine-tuning : </p><p>LORSA, A Sparse <a href=\"https://mstdn.social/tags/Attention\" class=\"mention hashtag\" rel=\"tag\">#<span>Attention</span></a> Mechanism That Recovers Atomic Attention Units Hidden in Transformer Superposition<br /> Code: <a href=\"https://github.com/OpenMOSS/Lorsa\" target=\"_blank\" rel=\"nofollow noopener noreferrer\" translate=\"no\"><span class=\"invisible\">https://</span><span class=\"\">github.com/OpenMOSS/Lorsa</span><span class=\"invisible\"></span></a><br /> Model: <a href=\"https://huggingface.co/collections/fnlp/low-rank-sparse-attention-680f28a37f982a9e7d6bbab0\" target=\"_blank\" rel=\"nofollow noopener noreferrer\" translate=\"no\"><span class=\"invisible\">https://</span><span class=\"ellipsis\">huggingface.co/collections/fnl</span><span class=\"invisible\">p/low-rank-sparse-attention-680f28a37f982a9e7d6bbab0</span></a><br /> Paper: <a href=\"https://arxiv.org/abs/2504.20938\" target=\"_blank\" rel=\"nofollow noopener noreferrer\" translate=\"no\"><span class=\"invisible\">https://</span><span class=\"\">arxiv.org/abs/2504.20938</span><span class=\"invisible\"></span></a></p>"
},
"attachment": [
{
"type": "Document",
"mediaType": "image/jpeg",
"url": "https://media.mstdn.social/media_attachments/files/114/477/449/195/530/010/original/9d2e7d4a467dc555.jpeg",
"name": null,
"blurhash": "U9RfnHM}-oXS_Ns;i^bbD-WBt6ax-;ofWYo2",
"width": 1146,
"height": 716
}
],
"tag": [
{
"type": "Hashtag",
"href": "https://mstdn.social/tags/llms",
"name": "#llms"
},
{
"type": "Hashtag",
"href": "https://mstdn.social/tags/attention",
"name": "#attention"
}
],
"replies": {
"id": "https://mstdn.social/users/AlexCrimi/statuses/114477449352416239/replies",
"type": "Collection",
"first": {
"type": "CollectionPage",
"next": "https://mstdn.social/users/AlexCrimi/statuses/114477449352416239/replies?only_other_accounts=true&page=true",
"partOf": "https://mstdn.social/users/AlexCrimi/statuses/114477449352416239/replies",
"items": []
}
},
"likes": {
"id": "https://mstdn.social/users/AlexCrimi/statuses/114477449352416239/likes",
"type": "Collection",
"totalItems": 0
},
"shares": {
"id": "https://mstdn.social/users/AlexCrimi/statuses/114477449352416239/shares",
"type": "Collection",
"totalItems": 0
}
}