{ localUrl: '../page/advanced_agent_theory.html', arbitalUrl: 'https://arbital.com/p/advanced_agent_theory', rawJsonUrl: '../raw/7vg.json', likeableId: '0', likeableType: 'page', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], pageId: 'advanced_agent_theory', edit: '1', editSummary: '', prevEdit: '0', currentEdit: '1', wasPublished: 'true', type: 'wiki', title: 'Theory of (advanced) agents', clickbait: 'One of the research subproblems of building powerful nice AIs, is the theory of (sufficiently advanced) minds in general.', textLength: '565', alias: 'advanced_agent_theory', externalUrl: '', sortChildrenBy: 'likes', hasVote: 'false', voteType: '', votesAnonymous: 'false', editCreatorId: 'EliezerYudkowsky', editCreatedAt: '2017-02-17 21:22:50', pageCreatorId: 'EliezerYudkowsky', pageCreatedAt: '2017-02-17 21:22:50', seeDomainId: '0', editDomainId: 'EliezerYudkowsky', submitToDomainId: '0', isAutosave: 'false', isSnapshot: 'false', isLiveEdit: 'true', isMinorEdit: 'false', indirectTeacher: 'false', todoCount: '0', isEditorComment: 'false', isApprovedComment: 'false', isResolved: 'false', snapshotText: '', anchorContext: '', anchorText: '', anchorOffset: '0', mergedInto: '', isDeleted: 'false', viewCount: '45', text: 'Many issues in AI alignment have dependencies on what we think we can factually say about the general design space of cognitively powerful agents, or on which background assumptions yield which implications about advanced agents. E.g., the [1y Orthogonality Thesis] is a claim about the general design space of powerful AIs. The design space of advanced agents is very wide, and only very weak statements seem likely to be true about the *whole* design space; but we can still try to say 'If X then Y' and refute claims about 'No need for if-X, Y happens anyway!'', metaText: '', isTextLoaded: 'true', isSubscribedToDiscussion: 'false', isSubscribedToUser: 'false', isSubscribedAsMaintainer: 'false', discussionSubscriberCount: '1', maintainerCount: '1', userSubscriberCount: '0', lastVisit: '', hasDraft: 'false', votes: [], voteSummary: 'null', muVoteSummary: '0', voteScaling: '0', currentUserVote: '-2', voteCount: '0', lockedVoteType: '', maxEditEver: '0', redLinkCount: '0', lockedBy: '', lockedUntil: '', nextPageId: '', prevPageId: '', usedAsMastery: 'false', proposalEditNum: '0', permissions: { edit: { has: 'false', reason: 'You don't have domain permission to edit this page' }, proposeEdit: { has: 'true', reason: '' }, delete: { has: 'false', reason: 'You don't have domain permission to delete this page' }, comment: { has: 'false', reason: 'You can't comment in this domain because you are not a member' }, proposeComment: { has: 'true', reason: '' } }, summaries: {}, creatorIds: [ 'EliezerYudkowsky' ], childIds: [ 'instrumental_convergence', 'orthogonality', 'advanced_agent' ], parentIds: [ 'ai_alignment' ], commentIds: [], questionIds: [], tagIds: [ 'stub_meta_tag' ], relatedIds: [], markIds: [], explanations: [], learnMore: [], requirements: [], subjects: [], lenses: [], lensParentId: '', pathPages: [], learnMoreTaughtMap: {}, learnMoreCoveredMap: {}, learnMoreRequiredMap: {}, editHistory: {}, domainSubmissions: {}, answers: [], answerCount: '0', commentCount: '0', newCommentCount: '0', linkedMarkCount: '0', changeLogs: [ { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '22084', pageId: 'advanced_agent_theory', userId: 'EliezerYudkowsky', edit: '0', type: 'newChild', createdAt: '2017-02-17 21:26:03', auxPageId: 'advanced_agent', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '22080', pageId: 'advanced_agent_theory', userId: 'EliezerYudkowsky', edit: '0', type: 'newChild', createdAt: '2017-02-17 21:24:15', auxPageId: 'instrumental_convergence', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '22076', pageId: 'advanced_agent_theory', userId: 'EliezerYudkowsky', edit: '0', type: 'newChild', createdAt: '2017-02-17 21:23:17', auxPageId: 'orthogonality', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '22072', pageId: 'advanced_agent_theory', userId: 'EliezerYudkowsky', edit: '0', type: 'newParent', createdAt: '2017-02-17 21:22:51', auxPageId: 'ai_alignment', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '22073', pageId: 'advanced_agent_theory', userId: 'EliezerYudkowsky', edit: '0', type: 'newTag', createdAt: '2017-02-17 21:22:51', auxPageId: 'stub_meta_tag', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '22070', pageId: 'advanced_agent_theory', userId: 'EliezerYudkowsky', edit: '1', type: 'newEdit', createdAt: '2017-02-17 21:22:50', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' } ], feedSubmissions: [], searchStrings: {}, hasChildren: 'true', hasParents: 'true', redAliases: {}, improvementTagIds: [], nonMetaTagIds: [], todos: [], slowDownMap: 'null', speedUpMap: 'null', arcPageIds: 'null', contentRequests: {} }