{ localUrl: '../page/22l.html', arbitalUrl: 'https://arbital.com/p/22l', rawJsonUrl: '../raw/22l.json', likeableId: '1016', likeableType: 'page', myLikeValue: '0', likeCount: '1', dislikeCount: '0', likeScore: '1', individualLikes: [ 'TravisRivera' ], pageId: '22l', edit: '1', editSummary: '', prevEdit: '0', currentEdit: '1', wasPublished: 'true', type: 'comment', title: '"Do we have(or need) any emp..."', clickbait: '', textLength: '1512', alias: '22l', externalUrl: '', sortChildrenBy: 'recentFirst', hasVote: 'false', voteType: '', votesAnonymous: 'false', editCreatorId: 'MYass', editCreatedAt: '2016-02-22 05:23:44', pageCreatorId: 'MYass', pageCreatedAt: '2016-02-22 05:23:44', seeDomainId: '0', editDomainId: 'AlexeiAndreev', submitToDomainId: '0', isAutosave: 'false', isSnapshot: 'false', isLiveEdit: 'true', isMinorEdit: 'false', indirectTeacher: 'false', todoCount: '0', isEditorComment: 'false', isApprovedComment: 'true', isResolved: 'false', snapshotText: '', anchorContext: '', anchorText: '', anchorOffset: '0', mergedInto: '', isDeleted: 'false', viewCount: '1699', text: 'Do we have(or need) any empirical evidence that algorithmic simplicity (space) is the ideal and ultimate absolute prior? If I reread the article carefully I see it doesn't *quite* seem to advocate this, but I think it's very easy for a learner to pick up that misconception from the way AIXI is generally discussed, the assumption that we somehow know that space complexity is the final answer, and I wonder if it should be ruled out or qualified here.\n\n(I believe it's easy to pick up that misconception because it happened to me. I later came to realize I probably wouldn't bet at good odds that Space AIXI wont ever be dominated by a variant AIXI made with some other razor, the *speed* of generating environment turing machines, for instance, instead of space. Or how about inverse cyclomatic complexity or some more general measure of algorithm quality that humans thus far have lacked the breadth of mind to find, test or work with mathematically? Or maybe even just some other space complexity of some other machine model? Space complexity of TMs seems like extremely low-hanging fruit.)\n\nI'm hoping to hear someone's done the work of compiling some ridiculously extensive dataset of algorithms and their competitors and demonstrating that space complexity seemed more predictive of domination than any of the other general metrics we could think of. If this result had been found, nobody ever seems to cite it. Though I suppose we might not hear about it when so many people find it completely intuitive.', metaText: '', isTextLoaded: 'true', isSubscribedToDiscussion: 'false', isSubscribedToUser: 'false', isSubscribedAsMaintainer: 'false', discussionSubscriberCount: '2', maintainerCount: '1', userSubscriberCount: '0', lastVisit: '', hasDraft: 'false', votes: [], voteSummary: 'null', muVoteSummary: '0', voteScaling: '0', currentUserVote: '-2', voteCount: '0', lockedVoteType: '', maxEditEver: '0', redLinkCount: '0', lockedBy: '', lockedUntil: '', nextPageId: '', prevPageId: '', usedAsMastery: 'false', proposalEditNum: '0', permissions: { edit: { has: 'false', reason: 'You don't have domain permission to edit this page' }, proposeEdit: { has: 'true', reason: '' }, delete: { has: 'false', reason: 'You don't have domain permission to delete this page' }, comment: { has: 'false', reason: 'You can't comment in this domain because you are not a member' }, proposeComment: { has: 'true', reason: '' } }, summaries: {}, creatorIds: [ 'MYass' ], childIds: [], parentIds: [ 'solomonoff_induction' ], commentIds: [], questionIds: [], tagIds: [], relatedIds: [], markIds: [], explanations: [], learnMore: [], requirements: [], subjects: [], lenses: [], lensParentId: '', pathPages: [], learnMoreTaughtMap: {}, learnMoreCoveredMap: {}, learnMoreRequiredMap: {}, editHistory: {}, domainSubmissions: {}, answers: [], answerCount: '0', commentCount: '0', newCommentCount: '0', linkedMarkCount: '0', changeLogs: [ { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '7557', pageId: '22l', userId: 'MYass', edit: '1', type: 'newEdit', createdAt: '2016-02-22 05:23:44', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '7556', pageId: '22l', userId: 'MYass', edit: '0', type: 'newParent', createdAt: '2016-02-22 04:32:49', auxPageId: 'solomonoff_induction', oldSettingsValue: '', newSettingsValue: '' } ], feedSubmissions: [], searchStrings: {}, hasChildren: 'false', hasParents: 'true', redAliases: {}, improvementTagIds: [], nonMetaTagIds: [], todos: [], slowDownMap: 'null', speedUpMap: 'null', arcPageIds: 'null', contentRequests: {} }