{ localUrl: '../page/mind_design_space_wide.html', arbitalUrl: 'https://arbital.com/p/mind_design_space_wide', rawJsonUrl: '../raw/4ly.json', likeableId: '0', likeableType: 'page', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], pageId: 'mind_design_space_wide', edit: '1', editSummary: '', prevEdit: '0', currentEdit: '1', wasPublished: 'true', type: 'wiki', title: 'Mind design space is wide', clickbait: 'Imagine all human beings as one tiny dot inside a much vaster sphere of possibilities for "The space of minds in general." It is wiser to make claims about *some* minds than *all* minds.', textLength: '1434', alias: 'mind_design_space_wide', externalUrl: '', sortChildrenBy: 'likes', hasVote: 'false', voteType: '', votesAnonymous: 'false', editCreatorId: 'EliezerYudkowsky', editCreatedAt: '2016-06-19 20:28:17', pageCreatorId: 'EliezerYudkowsky', pageCreatedAt: '2016-06-19 20:28:17', seeDomainId: '0', editDomainId: 'EliezerYudkowsky', submitToDomainId: '0', isAutosave: 'false', isSnapshot: 'false', isLiveEdit: 'true', isMinorEdit: 'false', indirectTeacher: 'false', todoCount: '0', isEditorComment: 'false', isApprovedComment: 'true', isResolved: 'false', snapshotText: '', anchorContext: '', anchorText: '', anchorOffset: '0', mergedInto: '', isDeleted: 'false', viewCount: '51', text: 'Imagine an enormous space of possible mind designs, within which all humans who've ever lived are a single tiny dot. We all have the same cerebral cortex, cerebellum, thalamus, etcetera. There's an instinct to imagine "Artificial Intelligences" as a kind of weird tribe that lives across the river and ask what peculiar customs this foreign tribe might have. Really the word "Artificial Intelligence" just refers to the *entire* space of possibilities outside the tiny human dot. So to most questions about AI, the answer may be, "It depends on the exact mind design of the AI." By similar reasoning, a universal claim over all possible AIs is much more dubious than a claim about at least one AI. If you imagine that in the vast space of all mind designs, there's at least a billion binary design choices that can be made, then, there's at least $2^{1,000,000,000}$ distinct mind designs. We might say that any claim of the form, "Every possible mind design has property $P$" has $2^{1,000,000,000}$ chances to be false, while any claim of the form "There exists at least one mind design with property $P$" has $2^{1,000,000,000}$ chances to be true. This doesn't preclude us from thinking about properties that *most* mind designs might have. But it does suggest that if we don't like some property $P$ that seems likely to *usually* hold, we can maybe find some special case of a mind design which unusually has $P$ false.', metaText: '', isTextLoaded: 'true', isSubscribedToDiscussion: 'false', isSubscribedToUser: 'false', isSubscribedAsMaintainer: 'false', discussionSubscriberCount: '1', maintainerCount: '1', userSubscriberCount: '0', lastVisit: '', hasDraft: 'false', votes: [], voteSummary: 'null', muVoteSummary: '0', voteScaling: '0', currentUserVote: '-2', voteCount: '0', lockedVoteType: '', maxEditEver: '0', redLinkCount: '0', lockedBy: '', lockedUntil: '', nextPageId: '', prevPageId: '', usedAsMastery: 'false', proposalEditNum: '0', permissions: { edit: { has: 'false', reason: 'You don't have domain permission to edit this page' }, proposeEdit: { has: 'true', reason: '' }, delete: { has: 'false', reason: 'You don't have domain permission to delete this page' }, comment: { has: 'false', reason: 'You can't comment in this domain because you are not a member' }, proposeComment: { has: 'true', reason: '' } }, summaries: {}, creatorIds: [ 'EliezerYudkowsky' ], childIds: [], parentIds: [ 'orthogonality' ], commentIds: [], questionIds: [], tagIds: [ 'stub_meta_tag' ], relatedIds: [], markIds: [], explanations: [], learnMore: [], requirements: [], subjects: [], lenses: [], lensParentId: '', pathPages: [], learnMoreTaughtMap: {}, learnMoreCoveredMap: {}, learnMoreRequiredMap: {}, editHistory: {}, domainSubmissions: {}, answers: [], answerCount: '0', commentCount: '0', newCommentCount: '0', linkedMarkCount: '0', changeLogs: [ { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '14012', pageId: 'mind_design_space_wide', userId: 'EliezerYudkowsky', edit: '0', type: 'newTag', createdAt: '2016-06-19 20:28:19', auxPageId: 'stub_meta_tag', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '14011', pageId: 'mind_design_space_wide', userId: 'EliezerYudkowsky', edit: '0', type: 'newParent', createdAt: '2016-06-19 20:28:18', auxPageId: 'orthogonality', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '14009', pageId: 'mind_design_space_wide', userId: 'EliezerYudkowsky', edit: '1', type: 'newEdit', createdAt: '2016-06-19 20:28:17', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' } ], feedSubmissions: [], searchStrings: {}, hasChildren: 'false', hasParents: 'true', redAliases: {}, improvementTagIds: [], nonMetaTagIds: [], todos: [], slowDownMap: 'null', speedUpMap: 'null', arcPageIds: 'null', contentRequests: {} }